diff options
135 files changed, 2636 insertions, 657 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 52e21152dd2..60a2b5d5b5b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -579,7 +579,7 @@ codequality: script: - cp .rubocop.yml .rubocop.yml.bak - grep -v "rubocop-gitlab-security" .rubocop.yml.bak > .rubocop.yml - - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate:0.69.0 analyze -f json > raw_codeclimate.json + - docker run --env CODECLIMATE_CODE="$PWD" --volume "$PWD":/code --volume /var/run/docker.sock:/var/run/docker.sock --volume /tmp/cc:/tmp/cc codeclimate/codeclimate analyze -f json > raw_codeclimate.json - cat raw_codeclimate.json | docker run -i stedolan/jq -c 'map({check_name,fingerprint,location})' > codeclimate.json - mv .rubocop.yml.bak .rubocop.yml artifacts: @@ -111,7 +111,7 @@ gem 'google-api-client', '~> 0.13.6' gem 'unf', '~> 0.1.4' # Seed data -gem 'seed-fu', '~> 2.3.5' +gem 'seed-fu', '~> 2.3.7' # Markdown and HTML processing gem 'html-pipeline', '~> 1.11.0' diff --git a/Gemfile.lock b/Gemfile.lock index cfad693f933..7375fce8b1e 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -815,7 +815,7 @@ GEM rake (>= 0.9, < 13) sass (~> 3.4.20) securecompare (1.0.0) - seed-fu (2.3.6) + seed-fu (2.3.7) activerecord (>= 3.1) activesupport (>= 3.1) select2-rails (3.5.9.3) @@ -1153,7 +1153,7 @@ DEPENDENCIES sanitize (~> 2.0) sass-rails (~> 5.0.6) scss_lint (~> 0.54.0) - seed-fu (~> 2.3.5) + seed-fu (~> 2.3.7) select2-rails (~> 3.5.9) selenium-webdriver (~> 3.5) sentry-raven (~> 2.5.3) diff --git a/app/assets/javascripts/dispatcher.js b/app/assets/javascripts/dispatcher.js index 34708977d20..a21c92f24d6 100644 --- a/app/assets/javascripts/dispatcher.js +++ b/app/assets/javascripts/dispatcher.js @@ -383,6 +383,7 @@ import ProjectVariables from './project_variables'; projectImport(); break; case 'projects:pipelines:new': + case 'projects:pipelines:create': new NewBranchForm($('.js-new-pipeline-form')); break; case 'projects:pipelines:builds': @@ -521,6 +522,13 @@ import ProjectVariables from './project_variables'; case 'projects:settings:ci_cd:show': // Initialize expandable settings panels initSettingsPanels(); + + import(/* webpackChunkName: "ci-cd-settings" */ './projects/ci_cd_settings_bundle') + .then(ciCdSettings => ciCdSettings.default()) + .catch((err) => { + Flash(s__('ProjectSettings|Problem setting up the CI/CD settings JavaScript')); + throw err; + }); case 'groups:settings:ci_cd:show': new ProjectVariables(); break; diff --git a/app/assets/javascripts/flash.js b/app/assets/javascripts/flash.js index 67261c1c9b4..44deab9288e 100644 --- a/app/assets/javascripts/flash.js +++ b/app/assets/javascripts/flash.js @@ -41,7 +41,7 @@ const createFlashEl = (message, type, isInContentWrapper = false) => ` `; const removeFlashClickListener = (flashEl, fadeTransition) => { - flashEl.parentNode.addEventListener('click', () => hideFlash(flashEl, fadeTransition)); + flashEl.addEventListener('click', () => hideFlash(flashEl, fadeTransition)); }; /* diff --git a/app/assets/javascripts/issue_show/components/app.vue b/app/assets/javascripts/issue_show/components/app.vue index 1f9571ff413..5bdc7c99503 100644 --- a/app/assets/javascripts/issue_show/components/app.vue +++ b/app/assets/javascripts/issue_show/components/app.vue @@ -16,6 +16,10 @@ export default { required: true, type: String, }, + updateEndpoint: { + required: true, + type: String, + }, canUpdate: { required: true, type: Boolean, @@ -262,6 +266,8 @@ export default { :description-text="state.descriptionText" :updated-at="state.updatedAt" :task-status="state.taskStatus" + :issuable-type="issuableType" + :update-url="updateEndpoint" /> <edited-component v-if="hasUpdated" diff --git a/app/assets/javascripts/issue_show/components/description.vue b/app/assets/javascripts/issue_show/components/description.vue index 48bad8f1e68..b7559ced946 100644 --- a/app/assets/javascripts/issue_show/components/description.vue +++ b/app/assets/javascripts/issue_show/components/description.vue @@ -22,6 +22,16 @@ required: false, default: '', }, + issuableType: { + type: String, + required: false, + default: 'issue', + }, + updateUrl: { + type: String, + required: false, + default: null, + }, }, data() { return { @@ -48,7 +58,7 @@ if (this.canUpdate) { // eslint-disable-next-line no-new new TaskList({ - dataType: 'issue', + dataType: this.issuableType, fieldName: 'description', selector: '.detail-page-description', }); @@ -95,7 +105,9 @@ <textarea class="hidden js-task-list-field" v-if="descriptionText" - v-model="descriptionText"> + v-model="descriptionText" + :data-update-url="updateUrl" + > </textarea> </div> </template> diff --git a/app/assets/javascripts/issue_show/components/title.vue b/app/assets/javascripts/issue_show/components/title.vue index 00002709ac6..a363d06d950 100644 --- a/app/assets/javascripts/issue_show/components/title.vue +++ b/app/assets/javascripts/issue_show/components/title.vue @@ -79,7 +79,7 @@ v-tooltip v-if="showInlineEditButton && canUpdate" type="button" - class="btn-blank btn-edit note-action-button" + class="btn btn-default btn-edit btn-svg" v-html="pencilIcon" title="Edit title and description" data-placement="bottom" diff --git a/app/assets/javascripts/lib/utils/common_utils.js b/app/assets/javascripts/lib/utils/common_utils.js index 6fa1e84c170..33cc807912c 100644 --- a/app/assets/javascripts/lib/utils/common_utils.js +++ b/app/assets/javascripts/lib/utils/common_utils.js @@ -190,7 +190,7 @@ export const insertText = (target, text) => { target.selectionStart = target.selectionEnd = selectionStart + insertedText.length; // Trigger autosave - $(target).trigger('input'); + target.dispatchEvent(new Event('input')); // Trigger autosize const event = document.createEvent('Event'); diff --git a/app/assets/javascripts/main.js b/app/assets/javascripts/main.js index 08e326cba9c..5e0edd823be 100644 --- a/app/assets/javascripts/main.js +++ b/app/assets/javascripts/main.js @@ -301,6 +301,8 @@ $(function () { const flashContainer = document.querySelector('.flash-container'); if (flashContainer && flashContainer.children.length) { - removeFlashClickListener(flashContainer.children[0]); + flashContainer.querySelectorAll('.flash-alert, .flash-notice, .flash-success').forEach((flashEl) => { + removeFlashClickListener(flashEl); + }); } }); diff --git a/app/assets/javascripts/projects/ci_cd_settings_bundle.js b/app/assets/javascripts/projects/ci_cd_settings_bundle.js new file mode 100644 index 00000000000..90e418f6771 --- /dev/null +++ b/app/assets/javascripts/projects/ci_cd_settings_bundle.js @@ -0,0 +1,19 @@ +function updateAutoDevopsRadios(radioWrappers) { + radioWrappers.forEach((radioWrapper) => { + const radio = radioWrapper.querySelector('.js-auto-devops-enable-radio'); + const runPipelineCheckboxWrapper = radioWrapper.querySelector('.js-run-auto-devops-pipeline-checkbox-wrapper'); + const runPipelineCheckbox = radioWrapper.querySelector('.js-run-auto-devops-pipeline-checkbox'); + + if (runPipelineCheckbox) { + runPipelineCheckbox.checked = radio.checked; + runPipelineCheckboxWrapper.classList.toggle('hide', !radio.checked); + } + }); +} + +export default function initCiCdSettings() { + const radioWrappers = document.querySelectorAll('.js-auto-devops-enable-radio-wrapper'); + radioWrappers.forEach(radioWrapper => + radioWrapper.addEventListener('change', () => updateAutoDevopsRadios(radioWrappers)), + ); +} diff --git a/app/assets/javascripts/vue_shared/components/icon.vue b/app/assets/javascripts/vue_shared/components/icon.vue index 8f116233e72..4216660da8c 100644 --- a/app/assets/javascripts/vue_shared/components/icon.vue +++ b/app/assets/javascripts/vue_shared/components/icon.vue @@ -12,6 +12,9 @@ /> */ + // only allow classes in images.scss e.g. s12 + const validSizes = [8, 12, 16, 18, 24, 32, 48, 72]; + export default { props: { name: { @@ -22,7 +25,10 @@ size: { type: Number, required: false, - default: 0, + default: 16, + validator(value) { + return validSizes.includes(value); + }, }, cssClasses: { @@ -42,10 +48,11 @@ }, }; </script> + <template> <svg :class="[iconSizeClass, cssClasses]"> - <use + <use v-bind="{'xlink:href':spriteHref}"/> </svg> </template> diff --git a/app/assets/stylesheets/framework/animations.scss b/app/assets/stylesheets/framework/animations.scss index 374988bb590..728f9a27aca 100644 --- a/app/assets/stylesheets/framework/animations.scss +++ b/app/assets/stylesheets/framework/animations.scss @@ -125,7 +125,7 @@ @include transition(border-color); } -.note-action-button .link-highlight, +.note-action-button, .toolbar-btn, .dropdown-toggle-caret { @include transition(color); diff --git a/app/assets/stylesheets/framework/buttons.scss b/app/assets/stylesheets/framework/buttons.scss index dfa3d4c6fb9..cdc2aa196dd 100644 --- a/app/assets/stylesheets/framework/buttons.scss +++ b/app/assets/stylesheets/framework/buttons.scss @@ -88,17 +88,6 @@ border-color: $border-dark; color: $color; } - - svg { - - path { - fill: $color; - } - - use { - stroke: $color; - } - } } @mixin btn-green { @@ -142,6 +131,13 @@ } } +@mixin btn-svg { + height: $gl-padding; + width: $gl-padding; + top: 0; + vertical-align: text-top; +} + .btn { @include btn-default; @include btn-white; @@ -440,3 +436,7 @@ text-decoration: none; } } + +.btn-svg svg { + @include btn-svg; +} diff --git a/app/assets/stylesheets/framework/common.scss b/app/assets/stylesheets/framework/common.scss index 5e4ddf366ef..cb1aad90a9c 100644 --- a/app/assets/stylesheets/framework/common.scss +++ b/app/assets/stylesheets/framework/common.scss @@ -2,14 +2,43 @@ .cgray { color: $common-gray; } .clgray { color: $common-gray-light; } .cred { color: $common-red; } -svg.cred { fill: $common-red; } .cgreen { color: $common-green; } -svg.cgreen { fill: $common-green; } .cdark { color: $common-gray-dark; } + +.text-plain, +.text-plain:hover { + color: $gl-text-color; +} + .text-secondary { color: $gl-text-color-secondary; } +.text-primary, +.text-primary:hover { + color: $brand-primary; +} + +.text-success, +.text-success:hover { + color: $brand-success; +} + +.text-danger, +.text-danger:hover { + color: $brand-danger; +} + +.text-warning, +.text-warning:hover { + color: $brand-warning; +} + +.text-info, +.text-info:hover { + color: $brand-info; +} + .underlined-link { text-decoration: underline; } .hint { font-style: italic; color: $hint-color; } .light { color: $common-gray; } diff --git a/app/assets/stylesheets/framework/flash.scss b/app/assets/stylesheets/framework/flash.scss index e1b086ebb2b..88ce119ee3a 100644 --- a/app/assets/stylesheets/framework/flash.scss +++ b/app/assets/stylesheets/framework/flash.scss @@ -34,8 +34,15 @@ } } + .flash-success { + @extend .alert; + @extend .alert-success; + margin: 0; + } + .flash-notice, - .flash-alert { + .flash-alert, + .flash-success { border-radius: $border-radius-default; .container-fluid, @@ -48,7 +55,8 @@ margin-bottom: 0; .flash-notice, - .flash-alert { + .flash-alert, + .flash-success { border-radius: 0; } } diff --git a/app/assets/stylesheets/framework/icons.scss b/app/assets/stylesheets/framework/icons.scss index 1ab5e6a93f9..9e45ed52163 100644 --- a/app/assets/stylesheets/framework/icons.scss +++ b/app/assets/stylesheets/framework/icons.scss @@ -1,35 +1,63 @@ .ci-status-icon-success, .ci-status-icon-passed { - color: $green-500; + &, + &:hover, + &:focus { + color: $green-500; + } } .ci-status-icon-failed { - color: $gl-danger; + &, + &:hover, + &:focus { + color: $gl-danger; + } } .ci-status-icon-pending, .ci-status-icon-failed_with_warnings, .ci-status-icon-success_with_warnings { - color: $orange-500; + &, + &:hover, + &:focus { + color: $orange-500; + } } .ci-status-icon-running { - color: $blue-400; + &, + &:hover, + &:focus { + color: $blue-400; + } } .ci-status-icon-canceled, .ci-status-icon-disabled, .ci-status-icon-not-found { - color: $gl-text-color; + &, + &:hover, + &:focus { + color: $gl-text-color; + } } .ci-status-icon-created, .ci-status-icon-skipped { - color: $gray-darkest; + &, + &:hover, + &:focus { + color: $gray-darkest; + } } .ci-status-icon-manual { - color: $gl-text-color; + &, + &:hover, + &:focus { + color: $gl-text-color; + } } .icon-link { diff --git a/app/assets/stylesheets/framework/new-nav.scss b/app/assets/stylesheets/framework/new-nav.scss deleted file mode 100644 index e69de29bb2d..00000000000 --- a/app/assets/stylesheets/framework/new-nav.scss +++ /dev/null diff --git a/app/assets/stylesheets/framework/tw_bootstrap.scss b/app/assets/stylesheets/framework/tw_bootstrap.scss index d5c6ddbb4a5..1c6e2bf3074 100644 --- a/app/assets/stylesheets/framework/tw_bootstrap.scss +++ b/app/assets/stylesheets/framework/tw_bootstrap.scss @@ -195,33 +195,6 @@ summary { } } -// Typography ================================================================= - -.text-primary, -.text-primary:hover { - color: $brand-primary; -} - -.text-success, -.text-success:hover { - color: $brand-success; -} - -.text-danger, -.text-danger:hover { - color: $brand-danger; -} - -.text-warning, -.text-warning:hover { - color: $brand-warning; -} - -.text-info, -.text-info:hover { - color: $brand-info; -} - // Prevent datetimes on tooltips to break into two lines .local-timeago { white-space: nowrap; diff --git a/app/assets/stylesheets/pages/issuable.scss b/app/assets/stylesheets/pages/issuable.scss index 7131c18ce3d..63c51747f92 100644 --- a/app/assets/stylesheets/pages/issuable.scss +++ b/app/assets/stylesheets/pages/issuable.scss @@ -70,14 +70,13 @@ .title { padding: 0; - margin-bottom: 16px; + margin-bottom: $gl-padding; border-bottom: 0; } .btn-edit { margin-left: auto; - // Set height to match title height - height: 2em; + height: $gl-padding * 2; } // Border around images in issue and MR descriptions. diff --git a/app/assets/stylesheets/pages/notes.scss b/app/assets/stylesheets/pages/notes.scss index 2461b818219..4fe182c9fce 100644 --- a/app/assets/stylesheets/pages/notes.scss +++ b/app/assets/stylesheets/pages/notes.scss @@ -543,10 +543,7 @@ ul.notes { } svg { - height: 16px; - width: 16px; - top: 0; - vertical-align: text-top; + @include btn-svg; } .award-control-icon-positive, @@ -780,12 +777,6 @@ ul.notes { } } - svg { - fill: currentColor; - height: 16px; - width: 16px; - } - .loading { margin: 0; height: auto; diff --git a/app/assets/stylesheets/pages/projects.scss b/app/assets/stylesheets/pages/projects.scss index aaad6dbba8e..2c83b30500d 100644 --- a/app/assets/stylesheets/pages/projects.scss +++ b/app/assets/stylesheets/pages/projects.scss @@ -291,14 +291,7 @@ } svg { - - path { - fill: $layout-link-gray; - } - - use { - stroke: $layout-link-gray; - } + fill: $layout-link-gray; } .fa-caret-down { @@ -886,10 +879,6 @@ pre.light-well { font-size: $gl-font-size; } - a { - color: $gl-text-color; - } - .avatar-container, .controls { flex: 0 0 auto; diff --git a/app/controllers/projects/merge_requests/creations_controller.rb b/app/controllers/projects/merge_requests/creations_controller.rb index 764a9c7111e..1511fc08c89 100644 --- a/app/controllers/projects/merge_requests/creations_controller.rb +++ b/app/controllers/projects/merge_requests/creations_controller.rb @@ -65,7 +65,7 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap if params[:ref].present? @ref = params[:ref] - @commit = @repository.commit("refs/heads/#{@ref}") + @commit = @repository.commit(Gitlab::Git::BRANCH_REF_PREFIX + @ref) end render layout: false @@ -76,7 +76,7 @@ class Projects::MergeRequests::CreationsController < Projects::MergeRequests::Ap if params[:ref].present? @ref = params[:ref] - @commit = @target_project.commit("refs/heads/#{@ref}") + @commit = @target_project.commit(Gitlab::Git::BRANCH_REF_PREFIX + @ref) end render layout: false diff --git a/app/controllers/projects/pipelines_settings_controller.rb b/app/controllers/projects/pipelines_settings_controller.rb index abab2e2f0c9..b890818c475 100644 --- a/app/controllers/projects/pipelines_settings_controller.rb +++ b/app/controllers/projects/pipelines_settings_controller.rb @@ -6,11 +6,19 @@ class Projects::PipelinesSettingsController < Projects::ApplicationController end def update - if @project.update(update_params) - flash[:notice] = "Pipelines settings for '#{@project.name}' were successfully updated." - redirect_to project_settings_ci_cd_path(@project) - else - render 'show' + Projects::UpdateService.new(project, current_user, update_params).tap do |service| + if service.execute + flash[:notice] = "Pipelines settings for '#{@project.name}' were successfully updated." + + if service.run_auto_devops_pipeline? + CreatePipelineWorker.perform_async(project.id, current_user.id, project.default_branch, :web, ignore_skip_ci: true, save_on_errors: false) + flash[:success] = "A new Auto DevOps pipeline has been created, go to <a href=\"#{project_pipelines_path(@project)}\">Pipelines page</a> for details".html_safe + end + + redirect_to project_settings_ci_cd_path(@project) + else + render 'show' + end end end @@ -21,6 +29,7 @@ class Projects::PipelinesSettingsController < Projects::ApplicationController :runners_token, :builds_enabled, :build_allow_git_fetch, :build_timeout_in_minutes, :build_coverage_regex, :public_builds, :auto_cancel_pending_pipelines, :ci_config_path, + :run_auto_devops_pipeline_implicit, :run_auto_devops_pipeline_explicit, auto_devops_attributes: [:id, :domain, :enabled] ) end diff --git a/app/finders/notes_finder.rb b/app/finders/notes_finder.rb index 02eb983bf55..12157818bcd 100644 --- a/app/finders/notes_finder.rb +++ b/app/finders/notes_finder.rb @@ -104,8 +104,7 @@ class NotesFinder query = @params[:search] return notes unless query - pattern = "%#{query}%" - notes.where(Note.arel_table[:note].matches(pattern)) + notes.search(query) end # Notes changed since last fetch diff --git a/app/finders/runner_jobs_finder.rb b/app/finders/runner_jobs_finder.rb new file mode 100644 index 00000000000..52340f94523 --- /dev/null +++ b/app/finders/runner_jobs_finder.rb @@ -0,0 +1,22 @@ +class RunnerJobsFinder + attr_reader :runner, :params + + def initialize(runner, params = {}) + @runner = runner + @params = params + end + + def execute + items = @runner.builds + items = by_status(items) + items + end + + private + + def by_status(items) + return items unless HasStatus::AVAILABLE_STATUSES.include?(params[:status]) + + items.where(status: params[:status]) + end +end diff --git a/app/helpers/application_settings_helper.rb b/app/helpers/application_settings_helper.rb index 6fc4248b245..5bb84984142 100644 --- a/app/helpers/application_settings_helper.rb +++ b/app/helpers/application_settings_helper.rb @@ -177,6 +177,9 @@ module ApplicationSettingsHelper :ed25519_key_restriction, :email_author_in_body, :enabled_git_access_protocol, + :gitaly_timeout_default, + :gitaly_timeout_medium, + :gitaly_timeout_fast, :gravatar_enabled, :hashed_storage_enabled, :help_page_hide_commercial_content, diff --git a/app/helpers/auto_devops_helper.rb b/app/helpers/auto_devops_helper.rb index 483b957decb..069c29feb80 100644 --- a/app/helpers/auto_devops_helper.rb +++ b/app/helpers/auto_devops_helper.rb @@ -8,6 +8,22 @@ module AutoDevopsHelper !project.ci_service end + def show_run_auto_devops_pipeline_checkbox_for_instance_setting?(project) + return false if project.repository.gitlab_ci_yml + + if project&.auto_devops&.enabled.present? + !project.auto_devops.enabled && current_application_settings.auto_devops_enabled? + else + current_application_settings.auto_devops_enabled? + end + end + + def show_run_auto_devops_pipeline_checkbox_for_explicit_setting?(project) + return false if project.repository.gitlab_ci_yml + + !project.auto_devops_enabled? + end + def auto_devops_warning_message(project) missing_domain = !project.auto_devops&.has_domain? missing_service = !project.kubernetes_service&.active? diff --git a/app/helpers/issuables_helper.rb b/app/helpers/issuables_helper.rb index a9840d19178..4c60f4b0cd0 100644 --- a/app/helpers/issuables_helper.rb +++ b/app/helpers/issuables_helper.rb @@ -212,6 +212,7 @@ module IssuablesHelper def issuable_initial_data(issuable) data = { endpoint: issuable_path(issuable), + updateEndpoint: "#{issuable_path(issuable)}.json", canUpdate: can?(current_user, :"update_#{issuable.to_ability_name}", issuable), canDestroy: can?(current_user, :"destroy_#{issuable.to_ability_name}", issuable), issuableRef: issuable.to_reference, diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb index 01455a52d2a..3117c98c846 100644 --- a/app/models/application_setting.rb +++ b/app/models/application_setting.rb @@ -172,6 +172,27 @@ class ApplicationSetting < ActiveRecord::Base end end + validates :gitaly_timeout_default, + presence: true, + numericality: { only_integer: true, greater_than_or_equal_to: 0 } + + validates :gitaly_timeout_medium, + presence: true, + numericality: { only_integer: true, greater_than_or_equal_to: 0 } + validates :gitaly_timeout_medium, + numericality: { less_than_or_equal_to: :gitaly_timeout_default }, + if: :gitaly_timeout_default + validates :gitaly_timeout_medium, + numericality: { greater_than_or_equal_to: :gitaly_timeout_fast }, + if: :gitaly_timeout_fast + + validates :gitaly_timeout_fast, + presence: true, + numericality: { only_integer: true, greater_than_or_equal_to: 0 } + validates :gitaly_timeout_fast, + numericality: { less_than_or_equal_to: :gitaly_timeout_default }, + if: :gitaly_timeout_default + SUPPORTED_KEY_TYPES.each do |type| validates :"#{type}_key_restriction", presence: true, key_restriction: { type: type } end @@ -308,7 +329,10 @@ class ApplicationSetting < ActiveRecord::Base two_factor_grace_period: 48, user_default_external: false, polling_interval_multiplier: 1, - usage_ping_enabled: Settings.gitlab['usage_ping_enabled'] + usage_ping_enabled: Settings.gitlab['usage_ping_enabled'], + gitaly_timeout_fast: 10, + gitaly_timeout_medium: 30, + gitaly_timeout_default: 55 } end diff --git a/app/models/ci/runner.rb b/app/models/ci/runner.rb index c6509f89117..d39610a8995 100644 --- a/app/models/ci/runner.rb +++ b/app/models/ci/runner.rb @@ -1,6 +1,7 @@ module Ci class Runner < ActiveRecord::Base extend Gitlab::Ci::Model + include Gitlab::SQL::Pattern RUNNER_QUEUE_EXPIRY_TIME = 60.minutes ONLINE_CONTACT_TIMEOUT = 1.hour @@ -59,10 +60,7 @@ module Ci # # Returns an ActiveRecord::Relation. def self.search(query) - t = arel_table - pattern = "%#{query}%" - - where(t[:token].matches(pattern).or(t[:description].matches(pattern))) + fuzzy_search(query, [:token, :description]) end def self.contact_time_deadline diff --git a/app/models/concerns/has_variable.rb b/app/models/concerns/has_variable.rb index 9585b5583dc..8a241e4374a 100644 --- a/app/models/concerns/has_variable.rb +++ b/app/models/concerns/has_variable.rb @@ -16,6 +16,10 @@ module HasVariable key: Gitlab::Application.secrets.db_key_base, algorithm: 'aes-256-cbc' + def key=(new_key) + super(new_key.to_s.strip) + end + def to_runner_variable { key: key, value: value, public: false } end diff --git a/app/models/concerns/issuable.rb b/app/models/concerns/issuable.rb index e607707475f..5ca4a7086cb 100644 --- a/app/models/concerns/issuable.rb +++ b/app/models/concerns/issuable.rb @@ -122,9 +122,7 @@ module Issuable # # Returns an ActiveRecord::Relation. def search(query) - title = to_fuzzy_arel(:title, query) - - where(title) + fuzzy_search(query, [:title]) end # Searches for records with a matching title or description. @@ -135,10 +133,7 @@ module Issuable # # Returns an ActiveRecord::Relation. def full_search(query) - title = to_fuzzy_arel(:title, query) - description = to_fuzzy_arel(:description, query) - - where(title&.or(description)) + fuzzy_search(query, [:title, :description]) end def sort(method, excluded_labels: []) @@ -255,8 +250,10 @@ module Issuable participants(user).include?(user) end - def to_hook_data(user, old_labels: [], old_assignees: [], old_total_time_spent: nil) + def to_hook_data(user, old_associations: {}) changes = previous_changes + old_labels = old_associations.fetch(:labels, []) + old_assignees = old_associations.fetch(:assignees, []) if old_labels != labels changes[:labels] = [old_labels.map(&:hook_attrs), labels.map(&:hook_attrs)] @@ -270,8 +267,12 @@ module Issuable end end - if old_total_time_spent != total_time_spent - changes[:total_time_spent] = [old_total_time_spent, total_time_spent] + if self.respond_to?(:total_time_spent) + old_total_time_spent = old_associations.fetch(:total_time_spent, nil) + + if old_total_time_spent != total_time_spent + changes[:total_time_spent] = [old_total_time_spent, total_time_spent] + end end Gitlab::HookData::IssuableBuilder.new(self).build(user: user, changes: changes) diff --git a/app/models/email.rb b/app/models/email.rb index 2da8b050149..d6516761f0a 100644 --- a/app/models/email.rb +++ b/app/models/email.rb @@ -1,5 +1,6 @@ class Email < ActiveRecord::Base include Sortable + include Gitlab::SQL::Pattern belongs_to :user diff --git a/app/models/group.rb b/app/models/group.rb index dc4500360b9..76262acf50c 100644 --- a/app/models/group.rb +++ b/app/models/group.rb @@ -50,20 +50,6 @@ class Group < Namespace Gitlab::Database.postgresql? end - # Searches for groups matching the given query. - # - # This method uses ILIKE on PostgreSQL and LIKE on MySQL. - # - # query - The search query as a String - # - # Returns an ActiveRecord::Relation. - def search(query) - table = Namespace.arel_table - pattern = "%#{query}%" - - where(table[:name].matches(pattern).or(table[:path].matches(pattern))) - end - def sort(method) if method == 'storage_size_desc' # storage_size is a virtual column so we need to diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb index e4d8f486c77..a6b10d5349c 100644 --- a/app/models/merge_request.rb +++ b/app/models/merge_request.rb @@ -364,16 +364,28 @@ class MergeRequest < ActiveRecord::Base # We use these attributes to force these to the intended values. attr_writer :target_branch_sha, :source_branch_sha + def source_branch_ref + return @source_branch_sha if @source_branch_sha + return unless source_branch + + Gitlab::Git::BRANCH_REF_PREFIX + source_branch + end + + def target_branch_ref + return @target_branch_sha if @target_branch_sha + return unless target_branch + + Gitlab::Git::BRANCH_REF_PREFIX + target_branch + end + def source_branch_head return unless source_project - source_branch_ref = @source_branch_sha || source_branch source_project.repository.commit(source_branch_ref) if source_branch_ref end def target_branch_head - target_branch_ref = @target_branch_sha || target_branch - target_project.repository.commit(target_branch_ref) if target_branch_ref + target_project.repository.commit(target_branch_ref) end def branch_merge_base_commit diff --git a/app/models/milestone.rb b/app/models/milestone.rb index 01458120cda..c06ee8083f0 100644 --- a/app/models/milestone.rb +++ b/app/models/milestone.rb @@ -13,6 +13,7 @@ class Milestone < ActiveRecord::Base include Referable include StripAttribute include Milestoneish + include Gitlab::SQL::Pattern cache_markdown_field :title, pipeline: :single_line cache_markdown_field :description @@ -73,10 +74,7 @@ class Milestone < ActiveRecord::Base # # Returns an ActiveRecord::Relation. def search(query) - t = arel_table - pattern = "%#{query}%" - - where(t[:title].matches(pattern).or(t[:description].matches(pattern))) + fuzzy_search(query, [:title, :description]) end def filter_by_state(milestones, state) diff --git a/app/models/namespace.rb b/app/models/namespace.rb index 4d401e7ba18..fa76729a702 100644 --- a/app/models/namespace.rb +++ b/app/models/namespace.rb @@ -9,6 +9,7 @@ class Namespace < ActiveRecord::Base include Routable include AfterCommitQueue include Storage::LegacyNamespace + include Gitlab::SQL::Pattern # Prevent users from creating unreasonably deep level of nesting. # The number 20 was taken based on maximum nesting level of @@ -86,10 +87,7 @@ class Namespace < ActiveRecord::Base # # Returns an ActiveRecord::Relation def search(query) - t = arel_table - pattern = "%#{query}%" - - where(t[:name].matches(pattern).or(t[:path].matches(pattern))) + fuzzy_search(query, [:name, :path]) end def clean_path(path) diff --git a/app/models/note.rb b/app/models/note.rb index 50c9caf8529..340fe087f82 100644 --- a/app/models/note.rb +++ b/app/models/note.rb @@ -14,6 +14,7 @@ class Note < ActiveRecord::Base include ResolvableNote include IgnorableColumn include Editable + include Gitlab::SQL::Pattern module SpecialRole FIRST_TIME_CONTRIBUTOR = :first_time_contributor @@ -167,6 +168,10 @@ class Note < ActiveRecord::Base def has_special_role?(role, note) note.special_role == role end + + def search(query) + fuzzy_search(query, [:note]) + end end def cross_reference? diff --git a/app/models/project.rb b/app/models/project.rb index e276bd2422d..5a3f591c2e7 100644 --- a/app/models/project.rb +++ b/app/models/project.rb @@ -273,8 +273,9 @@ class Project < ActiveRecord::Base scope :pending_delete, -> { where(pending_delete: true) } scope :without_deleted, -> { where(pending_delete: false) } - scope :with_hashed_storage, -> { where('storage_version >= 1') } - scope :with_legacy_storage, -> { where(storage_version: [nil, 0]) } + scope :with_storage_feature, ->(feature) { where('storage_version >= :version', version: HASHED_STORAGE_FEATURES[feature]) } + scope :without_storage_feature, ->(feature) { where('storage_version < :version OR storage_version IS NULL', version: HASHED_STORAGE_FEATURES[feature]) } + scope :with_unmigrated_storage, -> { where('storage_version < :version OR storage_version IS NULL', version: LATEST_STORAGE_VERSION) } scope :sorted_by_activity, -> { reorder(last_activity_at: :desc) } scope :sorted_by_stars, -> { reorder('projects.star_count DESC') } @@ -425,17 +426,11 @@ class Project < ActiveRecord::Base # # query - The search query as a String. def search(query) - pattern = to_pattern(query) - - where( - arel_table[:path].matches(pattern) - .or(arel_table[:name].matches(pattern)) - .or(arel_table[:description].matches(pattern)) - ) + fuzzy_search(query, [:path, :name, :description]) end def search_by_title(query) - non_archived.where(arel_table[:name].matches(to_pattern(query))) + non_archived.fuzzy_search(query, [:name]) end def visibility_levels diff --git a/app/models/snippet.rb b/app/models/snippet.rb index 2a5f07a15c4..05a16f11b59 100644 --- a/app/models/snippet.rb +++ b/app/models/snippet.rb @@ -9,6 +9,7 @@ class Snippet < ActiveRecord::Base include Mentionable include Spammable include Editable + include Gitlab::SQL::Pattern extend Gitlab::CurrentSettings @@ -135,10 +136,7 @@ class Snippet < ActiveRecord::Base # # Returns an ActiveRecord::Relation. def search(query) - t = arel_table - pattern = "%#{query}%" - - where(t[:title].matches(pattern).or(t[:file_name].matches(pattern))) + fuzzy_search(query, [:title, :file_name]) end # Searches for snippets with matching content. @@ -149,10 +147,7 @@ class Snippet < ActiveRecord::Base # # Returns an ActiveRecord::Relation. def search_code(query) - table = Snippet.arel_table - pattern = "%#{query}%" - - where(table[:content].matches(pattern)) + fuzzy_search(query, [:content]) end end end diff --git a/app/models/storage/hashed_project.rb b/app/models/storage/hashed_project.rb index f025f40994e..fae1b64961a 100644 --- a/app/models/storage/hashed_project.rb +++ b/app/models/storage/hashed_project.rb @@ -4,7 +4,6 @@ module Storage delegate :gitlab_shell, :repository_storage_path, to: :project ROOT_PATH_PREFIX = '@hashed'.freeze - STORAGE_VERSION = 1 def initialize(project) @project = project diff --git a/app/models/user.rb b/app/models/user.rb index cf6b36559a8..14941fd7f98 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -313,9 +313,6 @@ class User < ActiveRecord::Base # # Returns an ActiveRecord::Relation. def search(query) - table = arel_table - pattern = User.to_pattern(query) - order = <<~SQL CASE WHEN users.name = %{query} THEN 0 @@ -325,11 +322,8 @@ class User < ActiveRecord::Base END SQL - where( - table[:name].matches(pattern) - .or(table[:email].matches(pattern)) - .or(table[:username].matches(pattern)) - ).reorder(order % { query: ActiveRecord::Base.connection.quote(query) }, :name) + fuzzy_search(query, [:name, :email, :username]) + .reorder(order % { query: ActiveRecord::Base.connection.quote(query) }, :name) end # searches user by given pattern @@ -337,16 +331,16 @@ class User < ActiveRecord::Base # This method uses ILIKE on PostgreSQL and LIKE on MySQL. def search_with_secondary_emails(query) - table = arel_table email_table = Email.arel_table - pattern = "%#{query}%" - matched_by_emails_user_ids = email_table.project(email_table[:user_id]).where(email_table[:email].matches(pattern)) + matched_by_emails_user_ids = email_table + .project(email_table[:user_id]) + .where(Email.fuzzy_arel_match(:email, query)) where( - table[:name].matches(pattern) - .or(table[:email].matches(pattern)) - .or(table[:username].matches(pattern)) - .or(table[:id].in(matched_by_emails_user_ids)) + fuzzy_arel_match(:name, query) + .or(fuzzy_arel_match(:email, query)) + .or(fuzzy_arel_match(:username, query)) + .or(arel_table[:id].in(matched_by_emails_user_ids)) ) end diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb index 8af9738d75c..a2518bc1080 100644 --- a/app/policies/group_policy.rb +++ b/app/policies/group_policy.rb @@ -34,6 +34,8 @@ class GroupPolicy < BasePolicy rule { admin } .enable :read_group rule { has_projects } .enable :read_group + rule { has_access }.enable :read_namespace + rule { developer }.enable :admin_milestones rule { reporter }.enable :admin_label diff --git a/app/policies/namespace_policy.rb b/app/policies/namespace_policy.rb index 92213f0155e..eb01218eb0a 100644 --- a/app/policies/namespace_policy.rb +++ b/app/policies/namespace_policy.rb @@ -8,6 +8,7 @@ class NamespacePolicy < BasePolicy rule { owner | admin }.policy do enable :create_projects enable :admin_namespace + enable :read_namespace end rule { personal_project & ~can_create_personal_project }.prevent :create_projects diff --git a/app/services/compare_service.rb b/app/services/compare_service.rb index 53f16a236d2..1db91c3c90c 100644 --- a/app/services/compare_service.rb +++ b/app/services/compare_service.rb @@ -1,17 +1,17 @@ require 'securerandom' -# Compare 2 branches for one repo or between repositories +# Compare 2 refs for one repo or between repositories # and return Gitlab::Git::Compare object that responds to commits and diffs class CompareService - attr_reader :start_project, :start_branch_name + attr_reader :start_project, :start_ref_name - def initialize(new_start_project, new_start_branch_name) + def initialize(new_start_project, new_start_ref_name) @start_project = new_start_project - @start_branch_name = new_start_branch_name + @start_ref_name = new_start_ref_name end - def execute(target_project, target_branch, straight: false) - raw_compare = target_project.repository.compare_source_branch(target_branch, start_project.repository, start_branch_name, straight: straight) + def execute(target_project, target_ref, straight: false) + raw_compare = target_project.repository.compare_source_branch(target_ref, start_project.repository, start_ref_name, straight: straight) Compare.new(raw_compare, target_project, straight: straight) if raw_compare end diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb index 39a7299ff60..2c51ac13815 100644 --- a/app/services/issuable_base_service.rb +++ b/app/services/issuable_base_service.rb @@ -169,10 +169,7 @@ class IssuableBaseService < BaseService change_todo(issuable) toggle_award(issuable) filter_params(issuable) - old_labels = issuable.labels.to_a - old_mentioned_users = issuable.mentioned_users.to_a - old_assignees = issuable.assignees.to_a - old_total_time_spent = issuable.total_time_spent + old_associations = associations_before_update(issuable) label_ids = process_label_ids(params, existing_label_ids: issuable.label_ids) params[:label_ids] = label_ids if labels_changing?(issuable.label_ids, label_ids) @@ -193,18 +190,13 @@ class IssuableBaseService < BaseService if issuable.with_transaction_returning_status { issuable.save } # We do not touch as it will affect a update on updated_at field ActiveRecord::Base.no_touching do - Issuable::CommonSystemNotesService.new(project, current_user).execute(issuable, old_labels) + Issuable::CommonSystemNotesService.new(project, current_user).execute(issuable, old_associations[:labels]) end - handle_changes( - issuable, - old_labels: old_labels, - old_mentioned_users: old_mentioned_users, - old_assignees: old_assignees - ) + handle_changes(issuable, old_associations: old_associations) new_assignees = issuable.assignees.to_a - affected_assignees = (old_assignees + new_assignees) - (old_assignees & new_assignees) + affected_assignees = (old_associations[:assignees] + new_assignees) - (old_associations[:assignees] & new_assignees) invalidate_cache_counts(issuable, users: affected_assignees.compact) after_update(issuable) @@ -212,9 +204,8 @@ class IssuableBaseService < BaseService execute_hooks( issuable, 'update', - old_labels: old_labels, - old_assignees: old_assignees, - old_total_time_spent: old_total_time_spent) + old_associations: old_associations + ) issuable.update_project_counter_caches if update_project_counters end @@ -267,6 +258,18 @@ class IssuableBaseService < BaseService end end + def associations_before_update(issuable) + associations = + { + labels: issuable.labels.to_a, + mentioned_users: issuable.mentioned_users.to_a, + assignees: issuable.assignees.to_a + } + associations[:total_time_spent] = issuable.total_time_spent if issuable.respond_to?(:total_time_spent) + + associations + end + def has_changes?(issuable, old_labels: [], old_assignees: []) valid_attrs = [:title, :description, :assignee_id, :milestone_id, :target_branch] diff --git a/app/services/issues/base_service.rb b/app/services/issues/base_service.rb index 0f711bcc3cf..9f6cfc0f6d3 100644 --- a/app/services/issues/base_service.rb +++ b/app/services/issues/base_service.rb @@ -1,7 +1,7 @@ module Issues class BaseService < ::IssuableBaseService - def hook_data(issue, action, old_labels: [], old_assignees: [], old_total_time_spent: nil) - hook_data = issue.to_hook_data(current_user, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) + def hook_data(issue, action, old_associations: {}) + hook_data = issue.to_hook_data(current_user, old_associations: old_associations) hook_data[:object_attributes][:action] = action hook_data @@ -22,8 +22,8 @@ module Issues issue, issue.project, current_user, old_assignees) end - def execute_hooks(issue, action = 'open', old_labels: [], old_assignees: [], old_total_time_spent: nil) - issue_data = hook_data(issue, action, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) + def execute_hooks(issue, action = 'open', old_associations: {}) + issue_data = hook_data(issue, action, old_associations: old_associations) hooks_scope = issue.confidential? ? :confidential_issue_hooks : :issue_hooks issue.project.execute_hooks(issue_data, hooks_scope) issue.project.execute_services(issue_data, hooks_scope) diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb index 1b7b5927c5a..d7aa7e2347e 100644 --- a/app/services/issues/update_service.rb +++ b/app/services/issues/update_service.rb @@ -14,9 +14,10 @@ module Issues end def handle_changes(issue, options) - old_labels = options[:old_labels] || [] - old_mentioned_users = options[:old_mentioned_users] || [] - old_assignees = options[:old_assignees] || [] + old_associations = options.fetch(:old_associations, {}) + old_labels = old_associations.fetch(:labels, []) + old_mentioned_users = old_associations.fetch(:mentioned_users, []) + old_assignees = old_associations.fetch(:assignees, []) if has_changes?(issue, old_labels: old_labels, old_assignees: old_assignees) todo_service.mark_pending_todos_as_done(issue, current_user) diff --git a/app/services/merge_requests/base_service.rb b/app/services/merge_requests/base_service.rb index f6ffd48deae..6b32d65a74b 100644 --- a/app/services/merge_requests/base_service.rb +++ b/app/services/merge_requests/base_service.rb @@ -4,8 +4,8 @@ module MergeRequests SystemNoteService.change_status(merge_request, merge_request.target_project, current_user, state, nil) end - def hook_data(merge_request, action, old_rev: nil, old_labels: [], old_assignees: [], old_total_time_spent: nil) - hook_data = merge_request.to_hook_data(current_user, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) + def hook_data(merge_request, action, old_rev: nil, old_associations: {}) + hook_data = merge_request.to_hook_data(current_user, old_associations: old_associations) hook_data[:object_attributes][:action] = action if old_rev && !Gitlab::Git.blank_ref?(old_rev) hook_data[:object_attributes][:oldrev] = old_rev @@ -14,9 +14,9 @@ module MergeRequests hook_data end - def execute_hooks(merge_request, action = 'open', old_rev: nil, old_labels: [], old_assignees: [], old_total_time_spent: nil) + def execute_hooks(merge_request, action = 'open', old_rev: nil, old_associations: {}) if merge_request.project - merge_data = hook_data(merge_request, action, old_rev: old_rev, old_labels: old_labels, old_assignees: old_assignees, old_total_time_spent: old_total_time_spent) + merge_data = hook_data(merge_request, action, old_rev: old_rev, old_associations: old_associations) merge_request.project.execute_hooks(merge_data, :merge_request_hooks) merge_request.project.execute_services(merge_data, :merge_request_hooks) end diff --git a/app/services/merge_requests/build_service.rb b/app/services/merge_requests/build_service.rb index f3b99e1ec8c..8223e5fed7f 100644 --- a/app/services/merge_requests/build_service.rb +++ b/app/services/merge_requests/build_service.rb @@ -18,7 +18,17 @@ module MergeRequests attr_accessor :merge_request - delegate :target_branch, :source_branch, :source_project, :target_project, :compare_commits, :wip_title, :description, :errors, to: :merge_request + delegate :target_branch, + :target_branch_ref, + :target_project, + :source_branch, + :source_branch_ref, + :source_project, + :compare_commits, + :wip_title, + :description, + :errors, + to: :merge_request def find_source_project return source_project if source_project.present? && can?(current_user, :read_project, source_project) @@ -54,10 +64,10 @@ module MergeRequests def compare_branches compare = CompareService.new( source_project, - source_branch + source_branch_ref ).execute( target_project, - target_branch + target_branch_ref ) if compare diff --git a/app/services/merge_requests/update_service.rb b/app/services/merge_requests/update_service.rb index 1f394cacc64..c153872c874 100644 --- a/app/services/merge_requests/update_service.rb +++ b/app/services/merge_requests/update_service.rb @@ -22,8 +22,9 @@ module MergeRequests end def handle_changes(merge_request, options) - old_labels = options[:old_labels] || [] - old_mentioned_users = options[:old_mentioned_users] || [] + old_associations = options.fetch(:old_associations, {}) + old_labels = old_associations.fetch(:labels, []) + old_mentioned_users = old_associations.fetch(:mentioned_users, []) if has_changes?(merge_request, old_labels: old_labels) todo_service.mark_pending_todos_as_done(merge_request, current_user) diff --git a/app/services/projects/hashed_storage/migrate_attachments_service.rb b/app/services/projects/hashed_storage/migrate_attachments_service.rb new file mode 100644 index 00000000000..f8aaec8a9c0 --- /dev/null +++ b/app/services/projects/hashed_storage/migrate_attachments_service.rb @@ -0,0 +1,54 @@ +module Projects + module HashedStorage + AttachmentMigrationError = Class.new(StandardError) + + class MigrateAttachmentsService < BaseService + attr_reader :logger, :old_path, :new_path + + def initialize(project, logger = nil) + @project = project + @logger = logger || Rails.logger + end + + def execute + @old_path = project.full_path + @new_path = project.disk_path + + origin = FileUploader.dynamic_path_segment(project) + project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments] + target = FileUploader.dynamic_path_segment(project) + + result = move_folder!(origin, target) + project.save! + + if result && block_given? + yield + end + + result + end + + private + + def move_folder!(old_path, new_path) + unless File.directory?(old_path) + logger.info("Skipped attachments migration from '#{old_path}' to '#{new_path}', source path doesn't exist or is not a directory (PROJECT_ID=#{project.id})") + return + end + + if File.exist?(new_path) + logger.error("Cannot migrate attachments from '#{old_path}' to '#{new_path}', target path already exist (PROJECT_ID=#{project.id})") + raise AttachmentMigrationError, "Target path '#{new_path}' already exist" + end + + # Create hashed storage base path folder + FileUtils.mkdir_p(File.dirname(new_path)) + + FileUtils.mv(old_path, new_path) + logger.info("Migrated project attachments from '#{old_path}' to '#{new_path}' (PROJECT_ID=#{project.id})") + + true + end + end + end +end diff --git a/app/services/projects/hashed_storage/migrate_repository_service.rb b/app/services/projects/hashed_storage/migrate_repository_service.rb new file mode 100644 index 00000000000..7212e7524ab --- /dev/null +++ b/app/services/projects/hashed_storage/migrate_repository_service.rb @@ -0,0 +1,70 @@ +module Projects + module HashedStorage + class MigrateRepositoryService < BaseService + include Gitlab::ShellAdapter + + attr_reader :old_disk_path, :new_disk_path, :old_wiki_disk_path, :old_storage_version, :logger + + def initialize(project, logger = nil) + @project = project + @logger = logger || Rails.logger + end + + def execute + @old_disk_path = project.disk_path + has_wiki = project.wiki.repository_exists? + + @old_storage_version = project.storage_version + project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:repository] + project.ensure_storage_path_exists + + @new_disk_path = project.disk_path + + result = move_repository(@old_disk_path, @new_disk_path) + + if has_wiki + @old_wiki_disk_path = "#{@old_disk_path}.wiki" + result &&= move_repository("#{@old_wiki_disk_path}", "#{@new_disk_path}.wiki") + end + + unless result + rollback_folder_move + project.storage_version = nil + end + + project.repository_read_only = false + project.save! + + if result && block_given? + yield + end + + result + end + + private + + def move_repository(from_name, to_name) + from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git") + to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git") + + # If we don't find the repository on either original or target we should log that as it could be an issue if the + # project was not originally empty. + if !from_exists && !to_exists + logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..." + return false + elsif !from_exists + # Repository have been moved already. + return true + end + + gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name) + end + + def rollback_folder_move + move_repository(@new_disk_path, @old_disk_path) + move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki") + end + end + end +end diff --git a/app/services/projects/hashed_storage_migration_service.rb b/app/services/projects/hashed_storage_migration_service.rb index f5945f3b87f..662702c1db5 100644 --- a/app/services/projects/hashed_storage_migration_service.rb +++ b/app/services/projects/hashed_storage_migration_service.rb @@ -1,68 +1,22 @@ module Projects class HashedStorageMigrationService < BaseService - include Gitlab::ShellAdapter - - attr_reader :old_disk_path, :new_disk_path + attr_reader :logger def initialize(project, logger = nil) @project = project - @logger ||= Rails.logger + @logger = logger || Rails.logger end def execute - return if project.hashed_storage?(:repository) - - @old_disk_path = project.disk_path - has_wiki = project.wiki.repository_exists? - - project.storage_version = Storage::HashedProject::STORAGE_VERSION - project.ensure_storage_path_exists - - @new_disk_path = project.disk_path - - result = move_repository(@old_disk_path, @new_disk_path) - - if has_wiki - result &&= move_repository("#{@old_disk_path}.wiki", "#{@new_disk_path}.wiki") - end - - unless result - rollback_folder_move - return + # Migrate repository from Legacy to Hashed Storage + unless project.hashed_storage?(:repository) + return unless HashedStorage::MigrateRepositoryService.new(project, logger).execute end - project.repository_read_only = false - project.save! - - block_given? ? yield : result - end - - private - - def move_repository(from_name, to_name) - from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git") - to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git") - - # If we don't find the repository on either original or target we should log that as it could be an issue if the - # project was not originally empty. - if !from_exists && !to_exists - logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..." - return false - elsif !from_exists - # Repository have been moved already. - return true + # Migrate attachments from Legacy to Hashed Storage + unless project.hashed_storage?(:attachments) + HashedStorage::MigrateAttachmentsService.new(project, logger).execute end - - gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name) - end - - def rollback_folder_move - move_repository(@new_disk_path, @old_disk_path) - move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki") - end - - def logger - @logger end end end diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb index 13e292a18bf..72eecc61c96 100644 --- a/app/services/projects/update_service.rb +++ b/app/services/projects/update_service.rb @@ -15,7 +15,7 @@ module Projects return error("Could not set the default branch") unless project.change_head(params[:default_branch]) end - if project.update_attributes(params.except(:default_branch)) + if project.update_attributes(update_params) if project.previous_changes.include?('path') project.rename_repo else @@ -31,8 +31,16 @@ module Projects end end + def run_auto_devops_pipeline? + params.dig(:run_auto_devops_pipeline_explicit) == 'true' || params.dig(:run_auto_devops_pipeline_implicit) == 'true' + end + private + def update_params + params.except(:default_branch, :run_auto_devops_pipeline_explicit, :run_auto_devops_pipeline_implicit) + end + def renaming_project_with_container_registry_tags? new_path = params[:path] diff --git a/app/uploaders/file_uploader.rb b/app/uploaders/file_uploader.rb index f4a5cf75018..71658df5b41 100644 --- a/app/uploaders/file_uploader.rb +++ b/app/uploaders/file_uploader.rb @@ -31,12 +31,19 @@ class FileUploader < GitlabUploader # Returns a String without a trailing slash def self.dynamic_path_segment(project) if project.hashed_storage?(:attachments) - File.join(CarrierWave.root, base_dir, project.disk_path) + dynamic_path_builder(project.disk_path) else - File.join(CarrierWave.root, base_dir, project.full_path) + dynamic_path_builder(project.full_path) end end + # Auxiliary method to build dynamic path segment when not using a project model + # + # Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic + def self.dynamic_path_builder(path) + File.join(CarrierWave.root, base_dir, path) + end + attr_accessor :model attr_reader :secret diff --git a/app/views/admin/application_settings/_form.html.haml b/app/views/admin/application_settings/_form.html.haml index 64249c91dd0..a9d0503bc73 100644 --- a/app/views/admin/application_settings/_form.html.haml +++ b/app/views/admin/application_settings/_form.html.haml @@ -732,6 +732,30 @@ Number of Git pushes after which 'git gc' is run. %fieldset + %legend Gitaly Timeouts + .form-group + = f.label :gitaly_timeout_default, 'Default Timeout Period', class: 'control-label col-sm-2' + .col-sm-10 + = f.number_field :gitaly_timeout_default, class: 'form-control' + .help-block + Timeout for Gitaly calls from the GitLab application (in seconds). This timeout is not enforced + for git fetch/push operations or Sidekiq jobs. + .form-group + = f.label :gitaly_timeout_fast, 'Fast Timeout Period', class: 'control-label col-sm-2' + .col-sm-10 + = f.number_field :gitaly_timeout_fast, class: 'form-control' + .help-block + Fast operation timeout (in seconds). Some Gitaly operations are expected to be fast. + If they exceed this threshold, there may be a problem with a storage shard and 'failing fast' + can help maintain the stability of the GitLab instance. + .form-group + = f.label :gitaly_timeout_medium, 'Medium Timeout Period', class: 'control-label col-sm-2' + .col-sm-10 + = f.number_field :gitaly_timeout_medium, class: 'form-control' + .help-block + Medium operation timeout (in seconds). This should be a value between the Fast and the Default timeout. + + %fieldset %legend Web terminal .form-group = f.label :terminal_max_session_time, 'Max session time', class: 'control-label col-sm-2' diff --git a/app/views/layouts/_flash.html.haml b/app/views/layouts/_flash.html.haml index baa8036de10..1db32379df3 100644 --- a/app/views/layouts/_flash.html.haml +++ b/app/views/layouts/_flash.html.haml @@ -1,10 +1,6 @@ .flash-container.flash-container-page - - if alert - .flash-alert + -# We currently only support `alert`, `notice`, `success` + - flash.each do |key, value| + %div{ class: "flash-#{key}" } %div{ class: (container_class) } - %span= alert - - - elsif notice - .flash-notice - %div{ class: (container_class) } - %span= notice + %span= value diff --git a/app/views/projects/commit/_commit_box.html.haml b/app/views/projects/commit/_commit_box.html.haml index 8b9c1bbb602..5f607c2ab25 100644 --- a/app/views/projects/commit/_commit_box.html.haml +++ b/app/views/projects/commit/_commit_box.html.haml @@ -67,8 +67,8 @@ - if @commit.last_pipeline - last_pipeline = @commit.last_pipeline .well-segment.pipeline-info - .status-icon-container{ class: "ci-status-icon-#{last_pipeline.status}" } - = link_to project_pipeline_path(@project, last_pipeline.id) do + .status-icon-container + = link_to project_pipeline_path(@project, last_pipeline.id), class: "ci-status-icon-#{last_pipeline.status}" do = ci_icon_for_status(last_pipeline.status) #{ _('Pipeline') } = link_to "##{last_pipeline.id}", project_pipeline_path(@project, last_pipeline.id) diff --git a/app/views/projects/pipelines_settings/_show.html.haml b/app/views/projects/pipelines_settings/_show.html.haml index 77211099830..ee4fa663b9f 100644 --- a/app/views/projects/pipelines_settings/_show.html.haml +++ b/app/views/projects/pipelines_settings/_show.html.haml @@ -13,29 +13,39 @@ %p.settings-message.text-center = message.html_safe = f.fields_for :auto_devops_attributes, @auto_devops do |form| - .radio + .radio.js-auto-devops-enable-radio-wrapper = form.label :enabled_true do - = form.radio_button :enabled, 'true' + = form.radio_button :enabled, 'true', class: 'js-auto-devops-enable-radio' %strong Enable Auto DevOps %br %span.descr The Auto DevOps pipeline configuration will be used when there is no <code>.gitlab-ci.yml</code> in the project. - .radio + - if show_run_auto_devops_pipeline_checkbox_for_explicit_setting?(@project) + .checkbox.hide.js-run-auto-devops-pipeline-checkbox-wrapper + = label_tag 'project[run_auto_devops_pipeline_explicit]' do + = check_box_tag 'project[run_auto_devops_pipeline_explicit]', true, false, class: 'js-run-auto-devops-pipeline-checkbox' + = s_('ProjectSettings|Immediately run a pipeline on the default branch') + + .radio.js-auto-devops-enable-radio-wrapper = form.label :enabled_false do - = form.radio_button :enabled, 'false' + = form.radio_button :enabled, 'false', class: 'js-auto-devops-enable-radio' %strong Disable Auto DevOps %br %span.descr An explicit <code>.gitlab-ci.yml</code> needs to be specified before you can begin using Continuous Integration and Delivery. - .radio - = form.label :enabled_nil do - = form.radio_button :enabled, '' + .radio.js-auto-devops-enable-radio-wrapper + = form.label :enabled_ do + = form.radio_button :enabled, '', class: 'js-auto-devops-enable-radio' %strong Instance default (#{current_application_settings.auto_devops_enabled? ? 'enabled' : 'disabled'}) %br %span.descr Follow the instance default to either have Auto DevOps enabled or disabled when there is no project specific <code>.gitlab-ci.yml</code>. - %br + - if show_run_auto_devops_pipeline_checkbox_for_instance_setting?(@project) + .checkbox.hide.js-run-auto-devops-pipeline-checkbox-wrapper + = label_tag 'project[run_auto_devops_pipeline_implicit]' do + = check_box_tag 'project[run_auto_devops_pipeline_implicit]', true, false, class: 'js-run-auto-devops-pipeline-checkbox' + = s_('ProjectSettings|Immediately run a pipeline on the default branch') %p You need to specify a domain if you want to use Auto Review Apps and Auto Deploy stages. = form.text_field :domain, class: 'form-control', placeholder: 'domain.com' diff --git a/app/views/shared/projects/_project.html.haml b/app/views/shared/projects/_project.html.haml index 52a8fe8bb67..98bfc7c4d36 100644 --- a/app/views/shared/projects/_project.html.haml +++ b/app/views/shared/projects/_project.html.haml @@ -20,7 +20,7 @@ = project_icon(project, alt: '', class: 'avatar project-avatar s40') .project-details %h3.prepend-top-0.append-bottom-0 - = link_to project_path(project), class: dom_class(project) do + = link_to project_path(project), class: 'text-plain' do %span.project-full-name %span.namespace-name - if project.namespace && !skip_namespace diff --git a/app/workers/create_pipeline_worker.rb b/app/workers/create_pipeline_worker.rb new file mode 100644 index 00000000000..865ad1ba420 --- /dev/null +++ b/app/workers/create_pipeline_worker.rb @@ -0,0 +1,16 @@ +class CreatePipelineWorker + include Sidekiq::Worker + include PipelineQueue + + enqueue_in group: :creation + + def perform(project_id, user_id, ref, source, params = {}) + project = Project.find(project_id) + user = User.find(user_id) + params = params.deep_symbolize_keys + + Ci::CreatePipelineService + .new(project, user, ref: ref) + .execute(source, **params) + end +end diff --git a/app/workers/project_migrate_hashed_storage_worker.rb b/app/workers/project_migrate_hashed_storage_worker.rb index ca276d7801c..127aa6b9d7d 100644 --- a/app/workers/project_migrate_hashed_storage_worker.rb +++ b/app/workers/project_migrate_hashed_storage_worker.rb @@ -2,10 +2,34 @@ class ProjectMigrateHashedStorageWorker include Sidekiq::Worker include DedicatedSidekiqQueue + LEASE_TIMEOUT = 30.seconds.to_i + def perform(project_id) project = Project.find_by(id: project_id) return if project.nil? || project.pending_delete? - ::Projects::HashedStorageMigrationService.new(project, logger).execute + uuid = lease_for(project_id).try_obtain + if uuid + ::Projects::HashedStorageMigrationService.new(project, logger).execute + else + false + end + rescue => ex + cancel_lease_for(project_id, uuid) if uuid + raise ex + end + + def lease_for(project_id) + Gitlab::ExclusiveLease.new(lease_key(project_id), timeout: LEASE_TIMEOUT) + end + + private + + def lease_key(project_id) + "project_migrate_hashed_storage_worker:#{project_id}" + end + + def cancel_lease_for(project_id, uuid) + Gitlab::ExclusiveLease.cancel(lease_key(project_id), uuid) end end diff --git a/changelogs/unreleased/15588-fix-empty-dropdown-on-create-new-pipeline-in-case-of-validation-errors.yml b/changelogs/unreleased/15588-fix-empty-dropdown-on-create-new-pipeline-in-case-of-validation-errors.yml new file mode 100644 index 00000000000..a4934c8896d --- /dev/null +++ b/changelogs/unreleased/15588-fix-empty-dropdown-on-create-new-pipeline-in-case-of-validation-errors.yml @@ -0,0 +1,5 @@ +--- +title: Initializes the branches dropdown when the 'Start new pipeline' failed due to validation errors +merge_request: 15588 +author: Christiaan Van den Poel +type: fixed diff --git a/changelogs/unreleased/38962-automatically-run-a-pipeline-when-auto-devops-is-turned-on-in-project-settings.yml b/changelogs/unreleased/38962-automatically-run-a-pipeline-when-auto-devops-is-turned-on-in-project-settings.yml new file mode 100644 index 00000000000..a4d703bc69f --- /dev/null +++ b/changelogs/unreleased/38962-automatically-run-a-pipeline-when-auto-devops-is-turned-on-in-project-settings.yml @@ -0,0 +1,5 @@ +--- +title: Add the option to automatically run a pipeline after updating AutoDevOps settings +merge_request: 15380 +author: +type: changed diff --git a/changelogs/unreleased/40373-fix-issue-note-submit-disabled-on-paste.yml b/changelogs/unreleased/40373-fix-issue-note-submit-disabled-on-paste.yml new file mode 100644 index 00000000000..e683e60397e --- /dev/null +++ b/changelogs/unreleased/40373-fix-issue-note-submit-disabled-on-paste.yml @@ -0,0 +1,6 @@ +--- +title: Fix Issue comment submit button being disabled when pasting content from another + GFM note +merge_request: 15530 +author: +type: fixed diff --git a/changelogs/unreleased/40530-merge-request-generates-wrong-diff-when-branch-and-tag-have-the-same-name.yml b/changelogs/unreleased/40530-merge-request-generates-wrong-diff-when-branch-and-tag-have-the-same-name.yml new file mode 100644 index 00000000000..e9fae6fe0d7 --- /dev/null +++ b/changelogs/unreleased/40530-merge-request-generates-wrong-diff-when-branch-and-tag-have-the-same-name.yml @@ -0,0 +1,5 @@ +--- +title: Fix merge requests where the source or target branch name matches a tag name +merge_request: 15591 +author: +type: fixed diff --git a/changelogs/unreleased/40561-environment-scope-value-is-not-trimmed.yml b/changelogs/unreleased/40561-environment-scope-value-is-not-trimmed.yml new file mode 100644 index 00000000000..e0e3ddbdaa8 --- /dev/null +++ b/changelogs/unreleased/40561-environment-scope-value-is-not-trimmed.yml @@ -0,0 +1,5 @@ +--- +title: Strip leading & trailing whitespaces in CI/CD secret variable keys +merge_request: 15615 +author: +type: fixed diff --git a/changelogs/unreleased/40568-bump-seed-fu-to-2-3-7.yml b/changelogs/unreleased/40568-bump-seed-fu-to-2-3-7.yml new file mode 100644 index 00000000000..708269d5c83 --- /dev/null +++ b/changelogs/unreleased/40568-bump-seed-fu-to-2-3-7.yml @@ -0,0 +1,5 @@ +--- +title: Upgrade seed-fu to 2.3.7 +merge_request: 15607 +author: Takuya Noguchi +type: other diff --git a/changelogs/unreleased/an-gitaly-timeouts.yml b/changelogs/unreleased/an-gitaly-timeouts.yml new file mode 100644 index 00000000000..e18d82b2704 --- /dev/null +++ b/changelogs/unreleased/an-gitaly-timeouts.yml @@ -0,0 +1,5 @@ +--- +title: Add timeouts for Gitaly calls +merge_request: 15047 +author: +type: performance diff --git a/changelogs/unreleased/default-values-for-mr-states.yml b/changelogs/unreleased/default-values-for-mr-states.yml new file mode 100644 index 00000000000..f873a5335d0 --- /dev/null +++ b/changelogs/unreleased/default-values-for-mr-states.yml @@ -0,0 +1,5 @@ +--- +title: Fix defaults for MR states and merge statuses +merge_request: +author: +type: fixed diff --git a/changelogs/unreleased/dm-search-pattern.yml b/changelogs/unreleased/dm-search-pattern.yml new file mode 100644 index 00000000000..1670d8c4b9a --- /dev/null +++ b/changelogs/unreleased/dm-search-pattern.yml @@ -0,0 +1,5 @@ +--- +title: Use fuzzy search with minimum length of 3 characters where appropriate +merge_request: +author: +type: performance diff --git a/changelogs/unreleased/hashed-storage-attachments-migration-path.yml b/changelogs/unreleased/hashed-storage-attachments-migration-path.yml new file mode 100644 index 00000000000..32535437046 --- /dev/null +++ b/changelogs/unreleased/hashed-storage-attachments-migration-path.yml @@ -0,0 +1,5 @@ +--- +title: Hashed Storage migration script now supports migrating project attachments +merge_request: 15352 +author: +type: added diff --git a/changelogs/unreleased/tm-feature-list-runners-jobs-api.yml b/changelogs/unreleased/tm-feature-list-runners-jobs-api.yml new file mode 100644 index 00000000000..d75a2b68c30 --- /dev/null +++ b/changelogs/unreleased/tm-feature-list-runners-jobs-api.yml @@ -0,0 +1,5 @@ +--- +title: New API endpoint - list jobs for a specified runner +merge_request: 15432 +author: +type: added diff --git a/changelogs/unreleased/tm-feature-namespace-by-id-api.yml b/changelogs/unreleased/tm-feature-namespace-by-id-api.yml new file mode 100644 index 00000000000..bc4a8949d28 --- /dev/null +++ b/changelogs/unreleased/tm-feature-namespace-by-id-api.yml @@ -0,0 +1,5 @@ +--- +title: Add new API endpoint - get a namespace by ID +merge_request: 15442 +author: +type: added diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml index a8b918177de..bc7c431731a 100644 --- a/config/sidekiq_queues.yml +++ b/config/sidekiq_queues.yml @@ -28,6 +28,7 @@ - [build, 2] - [pipeline, 2] - [pipeline_processing, 5] + - [pipeline_creation, 4] - [pipeline_default, 3] - [pipeline_cache, 3] - [pipeline_hooks, 2] diff --git a/db/migrate/20171101130535_add_gitaly_timeout_properties_to_application_settings.rb b/db/migrate/20171101130535_add_gitaly_timeout_properties_to_application_settings.rb new file mode 100644 index 00000000000..de621e7111c --- /dev/null +++ b/db/migrate/20171101130535_add_gitaly_timeout_properties_to_application_settings.rb @@ -0,0 +1,31 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class AddGitalyTimeoutPropertiesToApplicationSettings < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + DOWNTIME = false + + disable_ddl_transaction! + + def up + add_column_with_default :application_settings, + :gitaly_timeout_default, + :integer, + default: 55 + add_column_with_default :application_settings, + :gitaly_timeout_medium, + :integer, + default: 30 + add_column_with_default :application_settings, + :gitaly_timeout_fast, + :integer, + default: 10 + end + + def down + remove_column :application_settings, :gitaly_timeout_default + remove_column :application_settings, :gitaly_timeout_medium + remove_column :application_settings, :gitaly_timeout_fast + end +end diff --git a/db/migrate/20171124125042_add_default_values_to_merge_request_states.rb b/db/migrate/20171124125042_add_default_values_to_merge_request_states.rb new file mode 100644 index 00000000000..d08863c3b78 --- /dev/null +++ b/db/migrate/20171124125042_add_default_values_to_merge_request_states.rb @@ -0,0 +1,19 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class AddDefaultValuesToMergeRequestStates < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + # Set this constant to true if this migration requires downtime. + DOWNTIME = false + + def up + change_column_default :merge_requests, :state, :opened + change_column_default :merge_requests, :merge_status, :unchecked + end + + def down + change_column_default :merge_requests, :state, nil + change_column_default :merge_requests, :merge_status, nil + end +end diff --git a/db/migrate/20171124125748_populate_missing_merge_request_statuses.rb b/db/migrate/20171124125748_populate_missing_merge_request_statuses.rb new file mode 100644 index 00000000000..72fbab59f4c --- /dev/null +++ b/db/migrate/20171124125748_populate_missing_merge_request_statuses.rb @@ -0,0 +1,50 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class PopulateMissingMergeRequestStatuses < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + # Set this constant to true if this migration requires downtime. + DOWNTIME = false + + disable_ddl_transaction! + + class MergeRequest < ActiveRecord::Base + include EachBatch + + self.table_name = 'merge_requests' + end + + def up + say 'Populating missing merge_requests.state values' + + # GitLab.com has no rows where "state" is NULL, and technically this should + # never happen. However it doesn't hurt to be 100% certain. + MergeRequest.where(state: nil).each_batch do |batch| + batch.update_all(state: 'opened') + end + + say 'Populating missing merge_requests.merge_status values. ' \ + 'This will take a few minutes...' + + # GitLab.com has 66 880 rows where "merge_status" is NULL, dating back all + # the way to 2011. + MergeRequest.where(merge_status: nil).each_batch(of: 10_000) do |batch| + batch.update_all(merge_status: 'unchecked') + + # We want to give PostgreSQL some time to vacuum any dead tuples. In + # production we see it takes roughly 1 minute for a vacuuming run to clear + # out 10-20k dead tuples, so we'll wait for 90 seconds between every + # batch. + sleep(90) if sleep? + end + end + + def down + # Reverting this makes no sense. + end + + def sleep? + Rails.env.staging? || Rails.env.production? + end +end diff --git a/db/migrate/20171124132536_make_merge_request_statuses_not_null.rb b/db/migrate/20171124132536_make_merge_request_statuses_not_null.rb new file mode 100644 index 00000000000..4bb09126036 --- /dev/null +++ b/db/migrate/20171124132536_make_merge_request_statuses_not_null.rb @@ -0,0 +1,14 @@ +# See http://doc.gitlab.com/ce/development/migration_style_guide.html +# for more information on how to write migrations for GitLab. + +class MakeMergeRequestStatusesNotNull < ActiveRecord::Migration + include Gitlab::Database::MigrationHelpers + + # Set this constant to true if this migration requires downtime. + DOWNTIME = false + + def change + change_column_null :merge_requests, :state, false + change_column_null :merge_requests, :merge_status, false + end +end diff --git a/db/schema.rb b/db/schema.rb index d10561099b7..1e524621b4f 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -11,7 +11,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 20171121144800) do +ActiveRecord::Schema.define(version: 20171124132536) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" @@ -150,6 +150,9 @@ ActiveRecord::Schema.define(version: 20171121144800) do t.integer "throttle_authenticated_web_period_in_seconds", default: 3600, null: false t.boolean "password_authentication_enabled_for_web" t.boolean "password_authentication_enabled_for_git", default: true + t.integer "gitaly_timeout_default", default: 55, null: false + t.integer "gitaly_timeout_medium", default: 30, null: false + t.integer "gitaly_timeout_fast", default: 10, null: false end create_table "audit_events", force: :cascade do |t| @@ -1049,8 +1052,8 @@ ActiveRecord::Schema.define(version: 20171121144800) do t.datetime "created_at" t.datetime "updated_at" t.integer "milestone_id" - t.string "state" - t.string "merge_status" + t.string "state", default: "opened", null: false + t.string "merge_status", default: "unchecked", null: false t.integer "target_project_id", null: false t.integer "iid" t.text "description" diff --git a/doc/administration/raketasks/storage.md b/doc/administration/raketasks/storage.md index bac8fa4bd9d..6ec5baeb6e3 100644 --- a/doc/administration/raketasks/storage.md +++ b/doc/administration/raketasks/storage.md @@ -1,10 +1,43 @@ # Repository Storage Rake Tasks This is a collection of rake tasks you can use to help you list and migrate -existing projects from Legacy storage to the new Hashed storage type. +existing projects and attachments associated with it from Legacy storage to +the new Hashed storage type. You can read more about the storage types [here][storage-types]. +## Migrate existing projects to Hashed storage + +Before migrating your existing projects, you should +[enable hashed storage][storage-migration] for the new projects as well. + +This task will schedule all your existing projects and attachments associated with it to be migrated to the +**Hashed** storage type: + +**Omnibus Installation** + +```bash +gitlab-rake gitlab:storage:migrate_to_hashed +``` + +**Source Installation** + +```bash +rake gitlab:storage:migrate_to_hashed + +``` + +You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen. +There is a specific Queue you can watch to see how long it will take to finish: **project_migrate_hashed_storage** + +After it reaches zero, you can confirm every project has been migrated by running the commands bellow. +If you find it necessary, you can run this migration script again to schedule missing projects. + +Any error or warning will be logged in the sidekiq's log file. + +You only need the `gitlab:storage:migrate_to_hashed` rake task to migrate your repositories, but we have additional +commands below that helps you inspect projects and attachments in both legacy and hashed storage. + ## List projects on Legacy storage To have a simple summary of projects using **Legacy** storage: @@ -73,35 +106,73 @@ rake gitlab:storage:list_hashed_projects ``` -## Migrate existing projects to Hashed storage +## List attachments on Legacy storage -Before migrating your existing projects, you should -[enable hashed storage][storage-migration] for the new projects as well. +To have a simple summary of project attachments using **Legacy** storage: -This task will schedule all your existing projects to be migrated to the -**Hashed** storage type: +**Omnibus Installation** + +```bash +gitlab-rake gitlab:storage:legacy_attachments +``` + +**Source Installation** + +```bash +rake gitlab:storage:legacy_attachments + +``` + +------ + +To list project attachments using **Legacy** storage: **Omnibus Installation** ```bash -gitlab-rake gitlab:storage:migrate_to_hashed +gitlab-rake gitlab:storage:list_legacy_attachments ``` **Source Installation** ```bash -rake gitlab:storage:migrate_to_hashed +rake gitlab:storage:list_legacy_attachments ``` -You can monitor the progress in the _Admin > Monitoring > Background jobs_ screen. -There is a specific Queue you can watch to see how long it will take to finish: **project_migrate_hashed_storage** +## List attachments on Hashed storage -After it reaches zero, you can confirm every project has been migrated by running the commands above. -If you find it necessary, you can run this migration script again to schedule missing projects. +To have a simple summary of project attachments using **Hashed** storage: + +**Omnibus Installation** + +```bash +gitlab-rake gitlab:storage:hashed_attachments +``` -Any error or warning will be logged in the sidekiq log file. +**Source Installation** + +```bash +rake gitlab:storage:hashed_attachments + +``` + +------ + +To list project attachments using **Hashed** storage: + +**Omnibus Installation** +```bash +gitlab-rake gitlab:storage:list_hashed_attachments +``` + +**Source Installation** + +```bash +rake gitlab:storage:list_hashed_attachments + +``` [storage-types]: ../repository_storage_types.md [storage-migration]: ../repository_storage_types.md#how-to-migrate-to-hashed-storage diff --git a/doc/administration/troubleshooting/sidekiq.md b/doc/administration/troubleshooting/sidekiq.md index b71f8fabbc8..9d157720ad2 100644 --- a/doc/administration/troubleshooting/sidekiq.md +++ b/doc/administration/troubleshooting/sidekiq.md @@ -11,7 +11,7 @@ troubleshooting steps that will help you diagnose the bottleneck. debug steps with GitLab Support so the backtraces can be analyzed by our team. It may reveal a bug or necessary improvement in GitLab. -> **Note:** In any of the backtraces, be weary of suspecting cases where every +> **Note:** In any of the backtraces, be wary of suspecting cases where every thread appears to be waiting in the database, Redis, or waiting to acquire a mutex. This **may** mean there's contention in the database, for example, but look for one thread that is different than the rest. This other thread diff --git a/doc/api/namespaces.md b/doc/api/namespaces.md index 5c0bebbaeb0..25cae5ce1f9 100644 --- a/doc/api/namespaces.md +++ b/doc/api/namespaces.md @@ -89,3 +89,55 @@ Example response: } ] ``` + +## Get namespace by ID + +Get a namespace by ID. + +``` +GET /namespaces/:id +``` + +| Attribute | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `id` | integer/string | yes | ID or path of the namespace | + +Example request: + +```bash +curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/namespaces/2 +``` + +Example response: + +```json +{ + "id": 2, + "name": "group1", + "path": "group1", + "kind": "group", + "full_path": "group1", + "parent_id": "null", + "members_count_with_descendants": 2 +} +``` + +Example request: + +```bash +curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" https://gitlab.example.com/api/v4/namespaces/group1 +``` + +Example response: + +```json +{ + "id": 2, + "name": "group1", + "path": "group1", + "kind": "group", + "full_path": "group1", + "parent_id": "null", + "members_count_with_descendants": 2 +} +``` diff --git a/doc/api/runners.md b/doc/api/runners.md index 6304a496f94..015b09a745e 100644 --- a/doc/api/runners.md +++ b/doc/api/runners.md @@ -215,6 +215,91 @@ DELETE /runners/:id curl --request DELETE --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/6" ``` +## List runner's jobs + +List jobs that are being processed or were processed by specified Runner. + +``` +GET /runners/:id/jobs +``` + +| Attribute | Type | Required | Description | +|-----------|---------|----------|---------------------| +| `id` | integer | yes | The ID of a runner | +| `status` | string | no | Status of the job; one of: `running`, `success`, `failed`, `canceled` | + +``` +curl --header "PRIVATE-TOKEN: 9koXpg98eAheJpvBs5tK" "https://gitlab.example.com/api/v4/runners/1/jobs?status=running" +``` + +Example response: + +```json +[ + { + "id": 2, + "status": "running", + "stage": "test", + "name": "test", + "ref": "master", + "tag": false, + "coverage": null, + "created_at": "2017-11-16T08:50:29.000Z", + "started_at": "2017-11-16T08:51:29.000Z", + "finished_at": "2017-11-16T08:53:29.000Z", + "duration": 120, + "user": { + "id": 1, + "name": "John Doe2", + "username": "user2", + "state": "active", + "avatar_url": "http://www.gravatar.com/avatar/c922747a93b40d1ea88262bf1aebee62?s=80&d=identicon", + "web_url": "http://localhost/user2", + "created_at": "2017-11-16T18:38:46.000Z", + "bio": null, + "location": null, + "skype": "", + "linkedin": "", + "twitter": "", + "website_url": "", + "organization": null + }, + "commit": { + "id": "97de212e80737a608d939f648d959671fb0a0142", + "short_id": "97de212e", + "title": "Update configuration\r", + "created_at": "2017-11-16T08:50:28.000Z", + "parent_ids": [ + "1b12f15a11fc6e62177bef08f47bc7b5ce50b141", + "498214de67004b1da3d820901307bed2a68a8ef6" + ], + "message": "See merge request !123", + "author_name": "John Doe2", + "author_email": "user2@example.org", + "authored_date": "2017-11-16T08:50:27.000Z", + "committer_name": "John Doe2", + "committer_email": "user2@example.org", + "committed_date": "2017-11-16T08:50:27.000Z" + }, + "pipeline": { + "id": 2, + "sha": "97de212e80737a608d939f648d959671fb0a0142", + "ref": "master", + "status": "running" + }, + "project": { + "id": 1, + "description": null, + "name": "project1", + "name_with_namespace": "John Doe2 / project1", + "path": "project1", + "path_with_namespace": "namespace1/project1", + "created_at": "2017-11-16T18:38:46.620Z" + } + } +] +``` + ## List project's runners List all runners (specific and shared) available in the project. Shared runners diff --git a/doc/topics/autodevops/img/auto_devops_settings.png b/doc/topics/autodevops/img/auto_devops_settings.png Binary files differnew file mode 100644 index 00000000000..b572cc5b855 --- /dev/null +++ b/doc/topics/autodevops/img/auto_devops_settings.png diff --git a/doc/topics/autodevops/index.md b/doc/topics/autodevops/index.md index 28308fc905c..914217772b8 100644 --- a/doc/topics/autodevops/index.md +++ b/doc/topics/autodevops/index.md @@ -121,7 +121,7 @@ Google Cloud. ## Enabling Auto DevOps -NOTE: **Note:** +**Note:** If you haven't done already, read the [prerequisites](#prerequisites) to make full use of Auto DevOps. If this is your fist time, we recommend you follow the [quick start guide](#quick-start). @@ -129,10 +129,14 @@ full use of Auto DevOps. If this is your fist time, we recommend you follow the 1. Go to your project's **Settings > CI/CD > General pipelines settings** and find the Auto DevOps section 1. Select "Enable Auto DevOps" +1. After selecting an option to enable Auto DevOps, a checkbox will appear below + so you can immediately run a pipeline on the default branch 1. Optionally, but recommended, add in the [base domain](#auto-devops-base-domain) that will be used by Kubernetes to deploy your application 1. Hit **Save changes** for the changes to take effect + + Now that it's enabled, there are a few more steps depending on whether your project has a `.gitlab-ci.yml` or not: diff --git a/lib/api/entities.rb b/lib/api/entities.rb index 7d5d68c8f14..ce332fe85d2 100644 --- a/lib/api/entities.rb +++ b/lib/api/entities.rb @@ -80,16 +80,21 @@ module API expose :group_access, as: :group_access_level end - class BasicProjectDetails < Grape::Entity - expose :id, :description, :default_branch, :tag_list - expose :ssh_url_to_repo, :http_url_to_repo, :web_url + class ProjectIdentity < Grape::Entity + expose :id, :description expose :name, :name_with_namespace expose :path, :path_with_namespace + expose :created_at + end + + class BasicProjectDetails < ProjectIdentity + expose :default_branch, :tag_list + expose :ssh_url_to_repo, :http_url_to_repo, :web_url expose :avatar_url do |project, options| project.avatar_url(only_path: false) end expose :star_count, :forks_count - expose :created_at, :last_activity_at + expose :last_activity_at end class Project < BasicProjectDetails @@ -827,17 +832,24 @@ module API expose :id, :sha, :ref, :status end - class Job < Grape::Entity + class JobBasic < Grape::Entity expose :id, :status, :stage, :name, :ref, :tag, :coverage expose :created_at, :started_at, :finished_at expose :duration expose :user, with: User - expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? } expose :commit, with: Commit - expose :runner, with: Runner expose :pipeline, with: PipelineBasic end + class Job < JobBasic + expose :artifacts_file, using: JobArtifactFile, if: -> (job, opts) { job.artifacts? } + expose :runner, with: Runner + end + + class JobBasicWithProject < JobBasic + expose :project, with: ProjectIdentity + end + class Trigger < Grape::Entity expose :id expose :token, :description diff --git a/lib/api/helpers.rb b/lib/api/helpers.rb index b26c61ab8da..686bf7a3c2b 100644 --- a/lib/api/helpers.rb +++ b/lib/api/helpers.rb @@ -50,6 +50,10 @@ module API initial_current_user != current_user end + def user_namespace + @user_namespace ||= find_namespace!(params[:id]) + end + def user_group @group ||= find_group!(params[:id]) end @@ -112,6 +116,24 @@ module API end end + def find_namespace(id) + if id.to_s =~ /^\d+$/ + Namespace.find_by(id: id) + else + Namespace.find_by_full_path(id) + end + end + + def find_namespace!(id) + namespace = find_namespace(id) + + if can?(current_user, :read_namespace, namespace) + namespace + else + not_found!('Namespace') + end + end + def find_project_label(id) label = available_labels.find_by_id(id) || available_labels.find_by_title(id) label || not_found!('Label') diff --git a/lib/api/helpers/internal_helpers.rb b/lib/api/helpers/internal_helpers.rb index 4b3c473b0bb..d6dea4c30e3 100644 --- a/lib/api/helpers/internal_helpers.rb +++ b/lib/api/helpers/internal_helpers.rb @@ -2,8 +2,8 @@ module API module Helpers module InternalHelpers SSH_GITALY_FEATURES = { - 'git-receive-pack' => :ssh_receive_pack, - 'git-upload-pack' => :ssh_upload_pack + 'git-receive-pack' => [:ssh_receive_pack, Gitlab::GitalyClient::MigrationStatus::OPT_IN], + 'git-upload-pack' => [:ssh_upload_pack, Gitlab::GitalyClient::MigrationStatus::OPT_OUT] }.freeze def wiki? @@ -102,8 +102,8 @@ module API # Return the Gitaly Address if it is enabled def gitaly_payload(action) - feature = SSH_GITALY_FEATURES[action] - return unless feature && Gitlab::GitalyClient.feature_enabled?(feature) + feature, status = SSH_GITALY_FEATURES[action] + return unless feature && Gitlab::GitalyClient.feature_enabled?(feature, status: status) { repository: repository.gitaly_repository, diff --git a/lib/api/namespaces.rb b/lib/api/namespaces.rb index f1eaff6b0eb..32b77aedba8 100644 --- a/lib/api/namespaces.rb +++ b/lib/api/namespaces.rb @@ -19,6 +19,16 @@ module API present paginate(namespaces), with: Entities::Namespace, current_user: current_user end + + desc 'Get a namespace by ID' do + success Entities::Namespace + end + params do + requires :id, type: String, desc: "Namespace's ID or path" + end + get ':id' do + present user_namespace, with: Entities::Namespace, current_user: current_user + end end end end diff --git a/lib/api/runners.rb b/lib/api/runners.rb index e816fcdd928..996457c5dfe 100644 --- a/lib/api/runners.rb +++ b/lib/api/runners.rb @@ -84,6 +84,23 @@ module API destroy_conditionally!(runner) end + + desc 'List jobs running on a runner' do + success Entities::JobBasicWithProject + end + params do + requires :id, type: Integer, desc: 'The ID of the runner' + optional :status, type: String, desc: 'Status of the job', values: Ci::Build::AVAILABLE_STATUSES + use :pagination + end + get ':id/jobs' do + runner = get_runner(params[:id]) + authenticate_list_runners_jobs!(runner) + + jobs = RunnerJobsFinder.new(runner, params).execute + + present paginate(jobs), with: Entities::JobBasicWithProject + end end params do @@ -192,6 +209,12 @@ module API forbidden!("No access granted") unless user_can_access_runner?(runner) end + def authenticate_list_runners_jobs!(runner) + return if current_user.admin? + + forbidden!("No access granted") unless user_can_access_runner?(runner) + end + def user_can_access_runner?(runner) current_user.ci_authorized_runners.exists?(runner.id) end diff --git a/lib/api/settings.rb b/lib/api/settings.rb index 06373fe5069..cee4d309816 100644 --- a/lib/api/settings.rb +++ b/lib/api/settings.rb @@ -123,6 +123,9 @@ module API end optional :terminal_max_session_time, type: Integer, desc: 'Maximum time for web terminal websocket connection (in seconds). Set to 0 for unlimited time.' optional :polling_interval_multiplier, type: BigDecimal, desc: 'Interval multiplier used by endpoints that perform polling. Set to 0 to disable polling.' + optional :gitaly_timeout_default, type: Integer, desc: 'Default Gitaly timeout, in seconds. Set to 0 to disable timeouts.' + optional :gitaly_timeout_medium, type: Integer, desc: 'Medium Gitaly timeout, in seconds. Set to 0 to disable timeouts.' + optional :gitaly_timeout_fast, type: Integer, desc: 'Gitaly fast operation timeout, in seconds. Set to 0 to disable timeouts.' ApplicationSetting::SUPPORTED_KEY_TYPES.each do |type| optional :"#{type}_key_restriction", diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb index a6e7c410bdd..d399636bb28 100644 --- a/lib/gitlab/git/repository.rb +++ b/lib/gitlab/git/repository.rb @@ -1046,9 +1046,15 @@ module Gitlab end def with_repo_tmp_commit(start_repository, start_branch_name, sha) + source_ref = start_branch_name + + unless Gitlab::Git.branch_ref?(source_ref) + source_ref = "#{Gitlab::Git::BRANCH_REF_PREFIX}#{source_ref}" + end + tmp_ref = fetch_ref( start_repository, - source_ref: "#{Gitlab::Git::BRANCH_REF_PREFIX}#{start_branch_name}", + source_ref: source_ref, target_ref: "refs/tmp/#{SecureRandom.hex}" ) diff --git a/lib/gitlab/gitaly_client.rb b/lib/gitlab/gitaly_client.rb index 572f4c892f6..f27cd800bdd 100644 --- a/lib/gitlab/gitaly_client.rb +++ b/lib/gitlab/gitaly_client.rb @@ -31,14 +31,38 @@ module Gitlab CLIENT_NAME = (Sidekiq.server? ? 'gitlab-sidekiq' : 'gitlab-web').freeze MUTEX = Mutex.new - private_constant :MUTEX + METRICS_MUTEX = Mutex.new + private_constant :MUTEX, :METRICS_MUTEX class << self - attr_accessor :query_time, :migrate_histogram + attr_accessor :query_time end self.query_time = 0 - self.migrate_histogram = Gitlab::Metrics.histogram(:gitaly_migrate_call_duration, "Gitaly migration call execution timings") + + def self.migrate_histogram + @migrate_histogram ||= + METRICS_MUTEX.synchronize do + # If a thread was blocked on the mutex, the value was set already + return @migrate_histogram if @migrate_histogram + + Gitlab::Metrics.histogram(:gitaly_migrate_call_duration_seconds, + "Gitaly migration call execution timings", + gitaly_enabled: nil, feature: nil) + end + end + + def self.gitaly_call_histogram + @gitaly_call_histogram ||= + METRICS_MUTEX.synchronize do + # If a thread was blocked on the mutex, the value was set already + return @gitaly_call_histogram if @gitaly_call_histogram + + Gitlab::Metrics.histogram(:gitaly_controller_action_duration_seconds, + "Gitaly endpoint histogram by controller and action combination", + Gitlab::Metrics::Transaction::BASE_LABELS.merge(gitaly_service: nil, rpc: nil)) + end + end def self.stub(name, storage) MUTEX.synchronize do @@ -93,19 +117,30 @@ module Gitlab # kwargs.merge(deadline: Time.now + 10) # end # - def self.call(storage, service, rpc, request, remote_storage: nil) - start = Process.clock_gettime(Process::CLOCK_MONOTONIC) + def self.call(storage, service, rpc, request, remote_storage: nil, timeout: nil) + start = Gitlab::Metrics::System.monotonic_time enforce_gitaly_request_limits(:call) - kwargs = request_kwargs(storage, remote_storage: remote_storage) + kwargs = request_kwargs(storage, timeout, remote_storage: remote_storage) kwargs = yield(kwargs) if block_given? stub(service, storage).__send__(rpc, request, kwargs) # rubocop:disable GitlabSecurity/PublicSend ensure - self.query_time += Process.clock_gettime(Process::CLOCK_MONOTONIC) - start + duration = Gitlab::Metrics::System.monotonic_time - start + + # Keep track, seperately, for the performance bar + self.query_time += duration + gitaly_call_histogram.observe( + current_transaction_labels.merge(gitaly_service: service.to_s, rpc: rpc.to_s), + duration) end - def self.request_kwargs(storage, remote_storage: nil) + def self.current_transaction_labels + Gitlab::Metrics::Transaction.current&.labels || {} + end + private_class_method :current_transaction_labels + + def self.request_kwargs(storage, timeout, remote_storage: nil) encoded_token = Base64.strict_encode64(token(storage).to_s) metadata = { 'authorization' => "Bearer #{encoded_token}", @@ -117,7 +152,22 @@ module Gitlab metadata['call_site'] = feature.to_s if feature metadata['gitaly-servers'] = address_metadata(remote_storage) if remote_storage - { metadata: metadata } + result = { metadata: metadata } + + # nil timeout indicates that we should use the default + timeout = default_timeout if timeout.nil? + + return result unless timeout > 0 + + # Do not use `Time.now` for deadline calculation, since it + # will be affected by Timecop in some tests, but grpc's c-core + # uses system time instead of timecop's time, so tests will fail + # `Time.at(Process.clock_gettime(Process::CLOCK_REALTIME))` will + # circumvent timecop + deadline = Time.at(Process.clock_gettime(Process::CLOCK_REALTIME)) + timeout + result[:deadline] = deadline + + result end def self.token(storage) @@ -178,10 +228,10 @@ module Gitlab feature_stack = Thread.current[:gitaly_feature_stack] ||= [] feature_stack.unshift(feature) begin - start = Process.clock_gettime(Process::CLOCK_MONOTONIC) + start = Gitlab::Metrics::System.monotonic_time yield is_enabled ensure - total_time = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start + total_time = Gitlab::Metrics::System.monotonic_time - start migrate_histogram.observe({ gitaly_enabled: is_enabled, feature: feature }, total_time) feature_stack.shift Thread.current[:gitaly_feature_stack] = nil if feature_stack.empty? @@ -290,6 +340,26 @@ module Gitlab Google::Protobuf::RepeatedField.new(:bytes, a.map { |s| self.encode(s) } ) end + # The default timeout on all Gitaly calls + def self.default_timeout + return 0 if Sidekiq.server? + + timeout(:gitaly_timeout_default) + end + + def self.fast_timeout + timeout(:gitaly_timeout_fast) + end + + def self.medium_timeout + timeout(:gitaly_timeout_medium) + end + + def self.timeout(timeout_name) + Gitlab::CurrentSettings.current_application_settings[timeout_name] + end + private_class_method :timeout + # Count a stack. Used for n+1 detection def self.count_stack return unless RequestStore.active? diff --git a/lib/gitlab/gitaly_client/commit_service.rb b/lib/gitlab/gitaly_client/commit_service.rb index da5505cb2fe..34807d280e5 100644 --- a/lib/gitlab/gitaly_client/commit_service.rb +++ b/lib/gitlab/gitaly_client/commit_service.rb @@ -16,7 +16,7 @@ module Gitlab revision: GitalyClient.encode(revision) ) - response = GitalyClient.call(@repository.storage, :commit_service, :list_files, request) + response = GitalyClient.call(@repository.storage, :commit_service, :list_files, request, timeout: GitalyClient.medium_timeout) response.flat_map do |msg| msg.paths.map { |d| EncodingHelper.encode!(d.dup) } end @@ -29,7 +29,7 @@ module Gitlab child_id: child_id ) - GitalyClient.call(@repository.storage, :commit_service, :commit_is_ancestor, request).value + GitalyClient.call(@repository.storage, :commit_service, :commit_is_ancestor, request, timeout: GitalyClient.fast_timeout).value end def diff(from, to, options = {}) @@ -77,7 +77,7 @@ module Gitlab limit: limit.to_i ) - response = GitalyClient.call(@repository.storage, :commit_service, :tree_entry, request) + response = GitalyClient.call(@repository.storage, :commit_service, :tree_entry, request, timeout: GitalyClient.medium_timeout) entry = nil data = '' @@ -102,7 +102,7 @@ module Gitlab path: path.present? ? GitalyClient.encode(path) : '.' ) - response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request) + response = GitalyClient.call(@repository.storage, :commit_service, :get_tree_entries, request, timeout: GitalyClient.medium_timeout) response.flat_map do |message| message.entries.map do |gitaly_tree_entry| @@ -129,7 +129,7 @@ module Gitlab request.before = Google::Protobuf::Timestamp.new(seconds: options[:before].to_i) if options[:before].present? request.path = options[:path] if options[:path].present? - GitalyClient.call(@repository.storage, :commit_service, :count_commits, request).count + GitalyClient.call(@repository.storage, :commit_service, :count_commits, request, timeout: GitalyClient.medium_timeout).count end def last_commit_for_path(revision, path) @@ -139,7 +139,7 @@ module Gitlab path: GitalyClient.encode(path.to_s) ) - gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request).commit + gitaly_commit = GitalyClient.call(@repository.storage, :commit_service, :last_commit_for_path, request, timeout: GitalyClient.fast_timeout).commit return unless gitaly_commit Gitlab::Git::Commit.new(@repository, gitaly_commit) @@ -152,7 +152,7 @@ module Gitlab to: to ) - response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request) + response = GitalyClient.call(@repository.storage, :commit_service, :commits_between, request, timeout: GitalyClient.medium_timeout) consume_commits_response(response) end @@ -165,7 +165,7 @@ module Gitlab ) request.order = opts[:order].upcase if opts[:order].present? - response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request) + response = GitalyClient.call(@repository.storage, :commit_service, :find_all_commits, request, timeout: GitalyClient.medium_timeout) consume_commits_response(response) end @@ -179,7 +179,7 @@ module Gitlab offset: offset.to_i ) - response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request) + response = GitalyClient.call(@repository.storage, :commit_service, :commits_by_message, request, timeout: GitalyClient.medium_timeout) consume_commits_response(response) end @@ -197,7 +197,7 @@ module Gitlab path: GitalyClient.encode(path) ) - response = GitalyClient.call(@repository.storage, :commit_service, :raw_blame, request) + response = GitalyClient.call(@repository.storage, :commit_service, :raw_blame, request, timeout: GitalyClient.medium_timeout) response.reduce("") { |memo, msg| memo << msg.data } end @@ -207,7 +207,7 @@ module Gitlab revision: GitalyClient.encode(revision) ) - response = GitalyClient.call(@repository.storage, :commit_service, :find_commit, request) + response = GitalyClient.call(@repository.storage, :commit_service, :find_commit, request, timeout: GitalyClient.medium_timeout) response.commit end @@ -217,7 +217,7 @@ module Gitlab repository: @gitaly_repo, revision: GitalyClient.encode(revision) ) - response = GitalyClient.call(@repository.storage, :diff_service, :commit_patch, request) + response = GitalyClient.call(@repository.storage, :diff_service, :commit_patch, request, timeout: GitalyClient.medium_timeout) response.sum(&:data) end @@ -227,7 +227,7 @@ module Gitlab repository: @gitaly_repo, revision: GitalyClient.encode(revision) ) - GitalyClient.call(@repository.storage, :commit_service, :commit_stats, request) + GitalyClient.call(@repository.storage, :commit_service, :commit_stats, request, timeout: GitalyClient.medium_timeout) end def find_commits(options) @@ -245,7 +245,7 @@ module Gitlab request.paths = GitalyClient.encode_repeated(Array(options[:path])) if options[:path].present? - response = GitalyClient.call(@repository.storage, :commit_service, :find_commits, request) + response = GitalyClient.call(@repository.storage, :commit_service, :find_commits, request, timeout: GitalyClient.medium_timeout) consume_commits_response(response) end @@ -259,7 +259,7 @@ module Gitlab request_params.merge!(Gitlab::Git::DiffCollection.collection_limits(options).to_h) request = Gitaly::CommitDiffRequest.new(request_params) - response = GitalyClient.call(@repository.storage, :diff_service, :commit_diff, request) + response = GitalyClient.call(@repository.storage, :diff_service, :commit_diff, request, timeout: GitalyClient.medium_timeout) GitalyClient::DiffStitcher.new(response) end diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb index 31b04bc2650..066e4e183c0 100644 --- a/lib/gitlab/gitaly_client/ref_service.rb +++ b/lib/gitlab/gitaly_client/ref_service.rb @@ -46,7 +46,8 @@ module Gitlab commit_id: commit_id, prefix: ref_prefix ) - encode!(GitalyClient.call(@storage, :ref_service, :find_ref_name, request).name.dup) + response = GitalyClient.call(@storage, :ref_service, :find_ref_name, request, timeout: GitalyClient.medium_timeout) + encode!(response.name.dup) end def count_tag_names diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb index 70cb16bd810..b9e606592d7 100644 --- a/lib/gitlab/gitaly_client/repository_service.rb +++ b/lib/gitlab/gitaly_client/repository_service.rb @@ -10,7 +10,9 @@ module Gitlab def exists? request = Gitaly::RepositoryExistsRequest.new(repository: @gitaly_repo) - GitalyClient.call(@storage, :repository_service, :repository_exists, request).exists + response = GitalyClient.call(@storage, :repository_service, :repository_exists, request, timeout: GitalyClient.fast_timeout) + + response.exists end def garbage_collect(create_bitmap) @@ -30,7 +32,8 @@ module Gitlab def repository_size request = Gitaly::RepositorySizeRequest.new(repository: @gitaly_repo) - GitalyClient.call(@storage, :repository_service, :repository_size, request).size + response = GitalyClient.call(@storage, :repository_service, :repository_size, request) + response.size end def apply_gitattributes(revision) @@ -61,7 +64,7 @@ module Gitlab def has_local_branches? request = Gitaly::HasLocalBranchesRequest.new(repository: @gitaly_repo) - response = GitalyClient.call(@storage, :repository_service, :has_local_branches, request) + response = GitalyClient.call(@storage, :repository_service, :has_local_branches, request, timeout: GitalyClient.fast_timeout) response.value end diff --git a/lib/gitlab/sql/pattern.rb b/lib/gitlab/sql/pattern.rb index 7c2d1d8f887..5f0c98cb5a4 100644 --- a/lib/gitlab/sql/pattern.rb +++ b/lib/gitlab/sql/pattern.rb @@ -4,9 +4,15 @@ module Gitlab extend ActiveSupport::Concern MIN_CHARS_FOR_PARTIAL_MATCHING = 3 - REGEX_QUOTED_WORD = /(?<=^| )"[^"]+"(?= |$)/ + REGEX_QUOTED_WORD = /(?<=\A| )"[^"]+"(?= |\z)/ class_methods do + def fuzzy_search(query, columns) + matches = columns.map { |col| fuzzy_arel_match(col, query) }.compact.reduce(:or) + + where(matches) + end + def to_pattern(query) if partial_matching?(query) "%#{sanitize_sql_like(query)}%" @@ -19,12 +25,19 @@ module Gitlab query.length >= MIN_CHARS_FOR_PARTIAL_MATCHING end - def to_fuzzy_arel(column, query) - words = select_fuzzy_words(query) + def fuzzy_arel_match(column, query) + query = query.squish + return nil unless query.present? - matches = words.map { |word| arel_table[column].matches(to_pattern(word)) } + words = select_fuzzy_words(query) - matches.reduce { |result, match| result.and(match) } + if words.any? + words.map { |word| arel_table[column].matches(to_pattern(word)) }.reduce(:and) + else + # No words of at least 3 chars, but we can search for an exact + # case insensitive match with the query as a whole + arel_table[column].matches(sanitize_sql_like(query)) + end end def select_fuzzy_words(query) @@ -32,7 +45,7 @@ module Gitlab query = quoted_words.reduce(query) { |q, quoted_word| q.sub(quoted_word, '') } - words = query.split(/\s+/) + words = query.split quoted_words.map! { |quoted_word| quoted_word[1..-2] } diff --git a/lib/tasks/gitlab/storage.rake b/lib/tasks/gitlab/storage.rake index e05be4a3405..8ac73bc8ff2 100644 --- a/lib/tasks/gitlab/storage.rake +++ b/lib/tasks/gitlab/storage.rake @@ -2,10 +2,10 @@ namespace :gitlab do namespace :storage do desc 'GitLab | Storage | Migrate existing projects to Hashed Storage' task migrate_to_hashed: :environment do - legacy_projects_count = Project.with_legacy_storage.count + legacy_projects_count = Project.with_unmigrated_storage.count if legacy_projects_count == 0 - puts 'There are no projects using legacy storage. Nothing to do!' + puts 'There are no projects requiring storage migration. Nothing to do!' next end @@ -23,22 +23,42 @@ namespace :gitlab do desc 'Gitlab | Storage | Summary of existing projects using Legacy Storage' task legacy_projects: :environment do - projects_summary(Project.with_legacy_storage) + relation_summary('projects', Project.without_storage_feature(:repository)) end desc 'Gitlab | Storage | List existing projects using Legacy Storage' task list_legacy_projects: :environment do - projects_list(Project.with_legacy_storage) + projects_list('projects using Legacy Storage', Project.without_storage_feature(:repository)) end desc 'Gitlab | Storage | Summary of existing projects using Hashed Storage' task hashed_projects: :environment do - projects_summary(Project.with_hashed_storage) + relation_summary('projects using Hashed Storage', Project.with_storage_feature(:repository)) end desc 'Gitlab | Storage | List existing projects using Hashed Storage' task list_hashed_projects: :environment do - projects_list(Project.with_hashed_storage) + projects_list('projects using Hashed Storage', Project.with_storage_feature(:repository)) + end + + desc 'Gitlab | Storage | Summary of project attachments using Legacy Storage' + task legacy_attachments: :environment do + relation_summary('attachments using Legacy Storage', legacy_attachments_relation) + end + + desc 'Gitlab | Storage | List existing project attachments using Legacy Storage' + task list_legacy_attachments: :environment do + attachments_list('attachments using Legacy Storage', legacy_attachments_relation) + end + + desc 'Gitlab | Storage | Summary of project attachments using Hashed Storage' + task hashed_attachments: :environment do + relation_summary('attachments using Hashed Storage', hashed_attachments_relation) + end + + desc 'Gitlab | Storage | List existing project attachments using Hashed Storage' + task list_hashed_attachments: :environment do + attachments_list('attachments using Hashed Storage', hashed_attachments_relation) end def batch_size @@ -46,29 +66,43 @@ namespace :gitlab do end def project_id_batches(&block) - Project.with_legacy_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches + Project.with_unmigrated_storage.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches ids = relation.pluck(:id) yield ids.min, ids.max end end - def projects_summary(relation) - projects_count = relation.count - puts "* Found #{projects_count} projects".color(:green) + def legacy_attachments_relation + Upload.joins(<<~SQL).where('projects.storage_version < :version OR projects.storage_version IS NULL', version: Project::HASHED_STORAGE_FEATURES[:attachments]) + JOIN projects + ON (uploads.model_type='Project' AND uploads.model_id=projects.id) + SQL + end + + def hashed_attachments_relation + Upload.joins(<<~SQL).where('projects.storage_version >= :version', version: Project::HASHED_STORAGE_FEATURES[:attachments]) + JOIN projects + ON (uploads.model_type='Project' AND uploads.model_id=projects.id) + SQL + end + + def relation_summary(relation_name, relation) + relation_count = relation.count + puts "* Found #{relation_count} #{relation_name}".color(:green) - projects_count + relation_count end - def projects_list(relation) - projects_count = projects_summary(relation) + def projects_list(relation_name, relation) + relation_count = relation_summary(relation_name, relation) projects = relation.with_route limit = ENV.fetch('LIMIT', 500).to_i - return unless projects_count > 0 + return unless relation_count > 0 - puts " ! Displaying first #{limit} projects..." if projects_count > limit + puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit counter = 0 projects.find_in_batches(batch_size: batch_size) do |batch| @@ -81,5 +115,26 @@ namespace :gitlab do end end end + + def attachments_list(relation_name, relation) + relation_count = relation_summary(relation_name, relation) + + limit = ENV.fetch('LIMIT', 500).to_i + + return unless relation_count > 0 + + puts " ! Displaying first #{limit} #{relation_name}..." if relation_count > limit + + counter = 0 + relation.find_in_batches(batch_size: batch_size) do |batch| + batch.each do |upload| + counter += 1 + + puts " - #{upload.path} (id: #{upload.id})".color(:red) + + return if counter >= limit # rubocop:disable Lint/NonLocalExitFromIterator + end + end + end end end diff --git a/spec/controllers/projects/pipelines_settings_controller_spec.rb b/spec/controllers/projects/pipelines_settings_controller_spec.rb index 21b6a6d45f5..b2d83a02290 100644 --- a/spec/controllers/projects/pipelines_settings_controller_spec.rb +++ b/spec/controllers/projects/pipelines_settings_controller_spec.rb @@ -12,19 +12,22 @@ describe Projects::PipelinesSettingsController do end describe 'PATCH update' do - before do + subject do patch :update, namespace_id: project.namespace.to_param, project_id: project, - project: { - auto_devops_attributes: params - } + project: { auto_devops_attributes: params, + run_auto_devops_pipeline_implicit: 'false', + run_auto_devops_pipeline_explicit: auto_devops_pipeline } end context 'when updating the auto_devops settings' do let(:params) { { enabled: '', domain: 'mepmep.md' } } + let(:auto_devops_pipeline) { 'false' } it 'redirects to the settings page' do + subject + expect(response).to have_gitlab_http_status(302) expect(flash[:notice]).to eq("Pipelines settings for '#{project.name}' were successfully updated.") end @@ -33,11 +36,32 @@ describe Projects::PipelinesSettingsController do let(:params) { { enabled: '' } } it 'allows enabled to be set to nil' do + subject project_auto_devops.reload expect(project_auto_devops.enabled).to be_nil end end + + context 'when run_auto_devops_pipeline is true' do + let(:auto_devops_pipeline) { 'true' } + + it 'queues a CreatePipelineWorker' do + expect(CreatePipelineWorker).to receive(:perform_async).with(project.id, user.id, project.default_branch, :web, any_args) + + subject + end + end + + context 'when run_auto_devops_pipeline is not true' do + let(:auto_devops_pipeline) { 'false' } + + it 'does not queue a CreatePipelineWorker' do + expect(CreatePipelineWorker).not_to receive(:perform_async).with(project.id, user.id, :web, any_args) + + subject + end + end end end end diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb index 50f8f13d261..a1b1d94ae06 100644 --- a/spec/features/projects/pipelines/pipelines_spec.rb +++ b/spec/features/projects/pipelines/pipelines_spec.rb @@ -500,6 +500,18 @@ describe 'Pipelines', :js do end it { expect(page).to have_content('Missing .gitlab-ci.yml file') } + it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file is available when trying again' do + click_button project.default_branch + + stub_ci_pipeline_to_return_yaml_file + + page.within '.dropdown-menu' do + click_link 'master' + end + + expect { click_on 'Create pipeline' } + .to change { Ci::Pipeline.count }.by(1) + end end end end diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb index ea8f997409d..eb8e7265dd3 100644 --- a/spec/features/projects/settings/pipelines_settings_spec.rb +++ b/spec/features/projects/settings/pipelines_settings_spec.rb @@ -8,13 +8,14 @@ feature "Pipelines settings" do background do sign_in(user) project.team << [user, role] - visit project_pipelines_settings_path(project) end context 'for developer' do given(:role) { :developer } scenario 'to be disallowed to view' do + visit project_settings_ci_cd_path(project) + expect(page.status_code).to eq(404) end end @@ -23,6 +24,8 @@ feature "Pipelines settings" do given(:role) { :master } scenario 'be allowed to change' do + visit project_settings_ci_cd_path(project) + fill_in('Test coverage parsing', with: 'coverage_regex') click_on 'Save changes' @@ -32,6 +35,8 @@ feature "Pipelines settings" do end scenario 'updates auto_cancel_pending_pipelines' do + visit project_settings_ci_cd_path(project) + page.check('Auto-cancel redundant, pending pipelines') click_on 'Save changes' @@ -42,14 +47,119 @@ feature "Pipelines settings" do expect(checkbox).to be_checked end - scenario 'update auto devops settings' do - fill_in('project_auto_devops_attributes_domain', with: 'test.com') - page.choose('project_auto_devops_attributes_enabled_false') - click_on 'Save changes' + describe 'Auto DevOps' do + it 'update auto devops settings' do + visit project_settings_ci_cd_path(project) - expect(page.status_code).to eq(200) - expect(project.auto_devops).to be_present - expect(project.auto_devops).not_to be_enabled + fill_in('project_auto_devops_attributes_domain', with: 'test.com') + page.choose('project_auto_devops_attributes_enabled_false') + click_on 'Save changes' + + expect(page.status_code).to eq(200) + expect(project.auto_devops).to be_present + expect(project.auto_devops).not_to be_enabled + end + + describe 'Immediately run pipeline checkbox option', :js do + context 'when auto devops is set to instance default (enabled)' do + before do + stub_application_setting(auto_devops_enabled: true) + project.create_auto_devops!(enabled: nil) + visit project_settings_ci_cd_path(project) + end + + it 'does not show checkboxes on page-load' do + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper.hide', count: 1, visible: false) + end + + it 'selecting explicit disabled hides all checkboxes' do + page.choose('project_auto_devops_attributes_enabled_false') + + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper.hide', count: 1, visible: false) + end + + it 'selecting explicit enabled hides all checkboxes because we are already enabled' do + page.choose('project_auto_devops_attributes_enabled_true') + + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper.hide', count: 1, visible: false) + end + end + + context 'when auto devops is set to instance default (disabled)' do + before do + stub_application_setting(auto_devops_enabled: false) + project.create_auto_devops!(enabled: nil) + visit project_settings_ci_cd_path(project) + end + + it 'does not show checkboxes on page-load' do + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper.hide', count: 1, visible: false) + end + + it 'selecting explicit disabled hides all checkboxes' do + page.choose('project_auto_devops_attributes_enabled_false') + + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper.hide', count: 1, visible: false) + end + + it 'selecting explicit enabled shows a checkbox' do + page.choose('project_auto_devops_attributes_enabled_true') + + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper:not(.hide)', count: 1) + end + end + + context 'when auto devops is set to explicit disabled' do + before do + stub_application_setting(auto_devops_enabled: true) + project.create_auto_devops!(enabled: false) + visit project_settings_ci_cd_path(project) + end + + it 'does not show checkboxes on page-load' do + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper.hide', count: 2, visible: false) + end + + it 'selecting explicit enabled shows a checkbox' do + page.choose('project_auto_devops_attributes_enabled_true') + + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper:not(.hide)', count: 1) + end + + it 'selecting instance default (enabled) shows a checkbox' do + page.choose('project_auto_devops_attributes_enabled_') + + expect(page).to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper:not(.hide)', count: 1) + end + end + + context 'when auto devops is set to explicit enabled' do + before do + stub_application_setting(auto_devops_enabled: false) + project.create_auto_devops!(enabled: true) + visit project_settings_ci_cd_path(project) + end + + it 'does not have any checkboxes' do + expect(page).not_to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper', visible: false) + end + end + + context 'when master contains a .gitlab-ci.yml file' do + let(:project) { create(:project, :repository) } + + before do + project.repository.create_file(user, '.gitlab-ci.yml', "script: ['test']", message: 'test', branch_name: project.default_branch) + stub_application_setting(auto_devops_enabled: true) + project.create_auto_devops!(enabled: false) + visit project_settings_ci_cd_path(project) + end + + it 'does not have any checkboxes' do + expect(page).not_to have_selector('.js-run-auto-devops-pipeline-checkbox-wrapper', visible: false) + end + end + end end end end diff --git a/spec/finders/runner_jobs_finder_spec.rb b/spec/finders/runner_jobs_finder_spec.rb new file mode 100644 index 00000000000..4275b1a7ff1 --- /dev/null +++ b/spec/finders/runner_jobs_finder_spec.rb @@ -0,0 +1,39 @@ +require 'spec_helper' + +describe RunnerJobsFinder do + let(:project) { create(:project) } + let(:runner) { create(:ci_runner, :shared) } + + subject { described_class.new(runner, params).execute } + + describe '#execute' do + context 'when params is empty' do + let(:params) { {} } + let!(:job) { create(:ci_build, runner: runner, project: project) } + let!(:job1) { create(:ci_build, project: project) } + + it 'returns all jobs assigned to Runner' do + is_expected.to match_array(job) + is_expected.not_to match_array(job1) + end + end + + context 'when params contains status' do + HasStatus::AVAILABLE_STATUSES.each do |target_status| + context "when status is #{target_status}" do + let(:params) { { status: target_status } } + let!(:job) { create(:ci_build, runner: runner, project: project, status: target_status) } + + before do + exception_status = HasStatus::AVAILABLE_STATUSES - [target_status] + create(:ci_build, runner: runner, project: project, status: exception_status.first) + end + + it 'returns matched job' do + is_expected.to eq([job]) + end + end + end + end + end +end diff --git a/spec/helpers/auto_devops_helper_spec.rb b/spec/helpers/auto_devops_helper_spec.rb index 5e272af6073..7266e1b84d1 100644 --- a/spec/helpers/auto_devops_helper_spec.rb +++ b/spec/helpers/auto_devops_helper_spec.rb @@ -82,4 +82,104 @@ describe AutoDevopsHelper do it { is_expected.to eq(false) } end end + + describe '.show_run_auto_devops_pipeline_checkbox_for_instance_setting?' do + subject { helper.show_run_auto_devops_pipeline_checkbox_for_instance_setting?(project) } + + context 'when master contains a .gitlab-ci.yml file' do + before do + allow(project.repository).to receive(:gitlab_ci_yml).and_return("script: ['test']") + end + + it { is_expected.to eq(false) } + end + + context 'when auto devops is explicitly enabled' do + before do + project.create_auto_devops!(enabled: true) + end + + it { is_expected.to eq(false) } + end + + context 'when auto devops is explicitly disabled' do + before do + project.create_auto_devops!(enabled: false) + end + + context 'when auto devops is enabled system-wide' do + before do + stub_application_setting(auto_devops_enabled: true) + end + + it { is_expected.to eq(true) } + end + + context 'when auto devops is disabled system-wide' do + before do + stub_application_setting(auto_devops_enabled: false) + end + + it { is_expected.to eq(false) } + end + end + + context 'when auto devops is set to instance setting' do + before do + project.create_auto_devops!(enabled: nil) + end + + it { is_expected.to eq(false) } + end + end + + describe '.show_run_auto_devops_pipeline_checkbox_for_explicit_setting?' do + subject { helper.show_run_auto_devops_pipeline_checkbox_for_explicit_setting?(project) } + + context 'when master contains a .gitlab-ci.yml file' do + before do + allow(project.repository).to receive(:gitlab_ci_yml).and_return("script: ['test']") + end + + it { is_expected.to eq(false) } + end + + context 'when auto devops is explicitly enabled' do + before do + project.create_auto_devops!(enabled: true) + end + + it { is_expected.to eq(false) } + end + + context 'when auto devops is explicitly disabled' do + before do + project.create_auto_devops!(enabled: false) + end + + it { is_expected.to eq(true) } + end + + context 'when auto devops is set to instance setting' do + before do + project.create_auto_devops!(enabled: nil) + end + + context 'when auto devops is enabled system-wide' do + before do + stub_application_setting(auto_devops_enabled: true) + end + + it { is_expected.to eq(false) } + end + + context 'when auto devops is disabled system-wide' do + before do + stub_application_setting(auto_devops_enabled: false) + end + + it { is_expected.to eq(true) } + end + end + end end diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb index cb851d828f2..d601cbdb39b 100644 --- a/spec/helpers/issuables_helper_spec.rb +++ b/spec/helpers/issuables_helper_spec.rb @@ -174,6 +174,7 @@ describe IssuablesHelper do expected_data = { 'endpoint' => "/#{@project.full_path}/issues/#{issue.iid}", + 'updateEndpoint' => "/#{@project.full_path}/issues/#{issue.iid}.json", 'canUpdate' => true, 'canDestroy' => true, 'issuableRef' => "##{issue.iid}", diff --git a/spec/javascripts/flash_spec.js b/spec/javascripts/flash_spec.js index b669aabcee4..97e3ab682c5 100644 --- a/spec/javascripts/flash_spec.js +++ b/spec/javascripts/flash_spec.js @@ -278,7 +278,7 @@ describe('Flash', () => { removeFlashClickListener(flashEl, false); - flashEl.parentNode.click(); + flashEl.click(); setTimeout(() => { expect(document.querySelector('.flash')).toBeNull(); diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js index 5662c7387fb..b47a8bf705f 100644 --- a/spec/javascripts/issue_show/components/app_spec.js +++ b/spec/javascripts/issue_show/components/app_spec.js @@ -35,11 +35,12 @@ describe('Issuable output', () => { canUpdate: true, canDestroy: true, endpoint: '/gitlab-org/gitlab-shell/issues/9/realtime_changes', + updateEndpoint: gl.TEST_HOST, issuableRef: '#1', initialTitleHtml: '', initialTitleText: '', - initialDescriptionHtml: '', - initialDescriptionText: '', + initialDescriptionHtml: 'test', + initialDescriptionText: 'test', markdownPreviewPath: '/', markdownDocsPath: '/', projectNamespace: '/', diff --git a/spec/javascripts/issue_show/components/description_spec.js b/spec/javascripts/issue_show/components/description_spec.js index 360691a3546..163e5cdd062 100644 --- a/spec/javascripts/issue_show/components/description_spec.js +++ b/spec/javascripts/issue_show/components/description_spec.js @@ -1,11 +1,22 @@ import Vue from 'vue'; import descriptionComponent from '~/issue_show/components/description.vue'; +import * as taskList from '~/task_list'; +import mountComponent from '../../helpers/vue_mount_component_helper'; describe('Description component', () => { let vm; + let DescriptionComponent; + const props = { + canUpdate: true, + descriptionHtml: 'test', + descriptionText: 'test', + updatedAt: new Date().toString(), + taskStatus: '', + updateUrl: gl.TEST_HOST, + }; beforeEach(() => { - const Component = Vue.extend(descriptionComponent); + DescriptionComponent = Vue.extend(descriptionComponent); if (!document.querySelector('.issuable-meta')) { const metaData = document.createElement('div'); @@ -15,15 +26,11 @@ describe('Description component', () => { document.body.appendChild(metaData); } - vm = new Component({ - propsData: { - canUpdate: true, - descriptionHtml: 'test', - descriptionText: 'test', - updatedAt: new Date().toString(), - taskStatus: '', - }, - }).$mount(); + vm = mountComponent(DescriptionComponent, props); + }); + + afterEach(() => { + vm.$destroy(); }); it('animates description changes', (done) => { @@ -44,34 +51,46 @@ describe('Description component', () => { }); }); - // TODO: gl.TaskList no longer exists. rewrite these tests once we have a way to rewire ES modules - - // it('re-inits the TaskList when description changed', (done) => { - // spyOn(gl, 'TaskList'); - // vm.descriptionHtml = 'changed'; - // - // setTimeout(() => { - // expect( - // gl.TaskList, - // ).toHaveBeenCalled(); - // - // done(); - // }); - // }); - - // it('does not re-init the TaskList when canUpdate is false', (done) => { - // spyOn(gl, 'TaskList'); - // vm.canUpdate = false; - // vm.descriptionHtml = 'changed'; - // - // setTimeout(() => { - // expect( - // gl.TaskList, - // ).not.toHaveBeenCalled(); - // - // done(); - // }); - // }); + describe('TaskList', () => { + beforeEach(() => { + vm = mountComponent(DescriptionComponent, Object.assign({}, props, { + issuableType: 'issuableType', + })); + spyOn(taskList, 'default'); + }); + + it('re-inits the TaskList when description changed', (done) => { + vm.descriptionHtml = 'changed'; + + setTimeout(() => { + expect(taskList.default).toHaveBeenCalled(); + done(); + }); + }); + + it('does not re-init the TaskList when canUpdate is false', (done) => { + vm.canUpdate = false; + vm.descriptionHtml = 'changed'; + + setTimeout(() => { + expect(taskList.default).not.toHaveBeenCalled(); + done(); + }); + }); + + it('calls with issuableType dataType', (done) => { + vm.descriptionHtml = 'changed'; + + setTimeout(() => { + expect(taskList.default).toHaveBeenCalledWith({ + dataType: 'issuableType', + fieldName: 'description', + selector: '.detail-page-description', + }); + done(); + }); + }); + }); describe('taskStatus', () => { it('adds full taskStatus', (done) => { @@ -126,4 +145,8 @@ describe('Description component', () => { }); }); }); + + it('sets data-update-url', () => { + expect(vm.$el.querySelector('textarea').dataset.updateUrl).toEqual(gl.TEST_HOST); + }); }); diff --git a/spec/javascripts/issue_show/components/title_spec.js b/spec/javascripts/issue_show/components/title_spec.js index c1edc785d0f..5370f4e1fea 100644 --- a/spec/javascripts/issue_show/components/title_spec.js +++ b/spec/javascripts/issue_show/components/title_spec.js @@ -80,19 +80,19 @@ describe('Title component', () => { }); it('should not show by default', () => { - expect(vm.$el.querySelector('.note-action-button')).toBeNull(); + expect(vm.$el.querySelector('.btn-edit')).toBeNull(); }); it('should not show if canUpdate is false', () => { vm.showInlineEditButton = true; vm.canUpdate = false; - expect(vm.$el.querySelector('.note-action-button')).toBeNull(); + expect(vm.$el.querySelector('.btn-edit')).toBeNull(); }); it('should show if showInlineEditButton and canUpdate', () => { vm.showInlineEditButton = true; vm.canUpdate = true; - expect(vm.$el.querySelector('.note-action-button')).toBeDefined(); + expect(vm.$el.querySelector('.btn-edit')).toBeDefined(); }); it('should trigger open.form event when clicked', () => { @@ -100,7 +100,7 @@ describe('Title component', () => { vm.canUpdate = true; Vue.nextTick(() => { - vm.$el.querySelector('.note-action-button').click(); + vm.$el.querySelector('.btn-edit').click(); expect(eventHub.$emit).toHaveBeenCalledWith('open.form'); }); }); diff --git a/spec/javascripts/vue_shared/components/icon_spec.js b/spec/javascripts/vue_shared/components/icon_spec.js index 104da4473ce..a22b6bd3a67 100644 --- a/spec/javascripts/vue_shared/components/icon_spec.js +++ b/spec/javascripts/vue_shared/components/icon_spec.js @@ -11,7 +11,7 @@ describe('Sprite Icon Component', function () { icon = mountComponent(IconComponent, { name: 'test', - size: 99, + size: 32, cssClasses: 'extraclasses', }); }); @@ -34,12 +34,18 @@ describe('Sprite Icon Component', function () { }); it('should properly compute iconSizeClass', function () { - expect(icon.iconSizeClass).toBe('s99'); + expect(icon.iconSizeClass).toBe('s32'); + }); + + it('forbids invalid size prop', () => { + expect(icon.$options.props.size.validator(NaN)).toBeFalsy(); + expect(icon.$options.props.size.validator(0)).toBeFalsy(); + expect(icon.$options.props.size.validator(9001)).toBeFalsy(); }); it('should properly render img css', function () { const classList = icon.$el.classList; - const containsSizeClass = classList.contains('s99'); + const containsSizeClass = classList.contains('s32'); const containsCustomClass = classList.contains('extraclasses'); expect(containsSizeClass).toBe(true); expect(containsCustomClass).toBe(true); diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb new file mode 100644 index 00000000000..3c4deba4712 --- /dev/null +++ b/spec/lib/api/helpers_spec.rb @@ -0,0 +1,109 @@ +require 'spec_helper' + +describe API::Helpers do + subject { Class.new.include(described_class).new } + + describe '#find_namespace' do + let(:namespace) { create(:namespace) } + + shared_examples 'namespace finder' do + context 'when namespace exists' do + it 'returns requested namespace' do + expect(subject.find_namespace(existing_id)).to eq(namespace) + end + end + + context "when namespace doesn't exists" do + it 'returns nil' do + expect(subject.find_namespace(non_existing_id)).to be_nil + end + end + end + + context 'when ID is used as an argument' do + let(:existing_id) { namespace.id } + let(:non_existing_id) { 9999 } + + it_behaves_like 'namespace finder' + end + + context 'when PATH is used as an argument' do + let(:existing_id) { namespace.path } + let(:non_existing_id) { 'non-existing-path' } + + it_behaves_like 'namespace finder' + end + end + + shared_examples 'user namespace finder' do + let(:user1) { create(:user) } + + before do + allow(subject).to receive(:current_user).and_return(user1) + allow(subject).to receive(:header).and_return(nil) + allow(subject).to receive(:not_found!).and_raise('404 Namespace not found') + end + + context 'when namespace is group' do + let(:namespace) { create(:group) } + + context 'when user has access to group' do + before do + namespace.add_guest(user1) + namespace.save! + end + + it 'returns requested namespace' do + expect(namespace_finder).to eq(namespace) + end + end + + context "when user doesn't have access to group" do + it 'raises not found error' do + expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found') + end + end + end + + context "when namespace is user's personal namespace" do + let(:namespace) { create(:namespace) } + + context 'when user owns the namespace' do + before do + namespace.owner = user1 + namespace.save! + end + + it 'returns requested namespace' do + expect(namespace_finder).to eq(namespace) + end + end + + context "when user doesn't own the namespace" do + it 'raises not found error' do + expect { namespace_finder }.to raise_error(RuntimeError, '404 Namespace not found') + end + end + end + end + + describe '#find_namespace!' do + let(:namespace_finder) do + subject.find_namespace!(namespace.id) + end + + it_behaves_like 'user namespace finder' + end + + describe '#user_namespace' do + let(:namespace_finder) do + subject.user_namespace + end + + before do + allow(subject).to receive(:params).and_return({ id: namespace.id }) + end + + it_behaves_like 'user namespace finder' + end +end diff --git a/spec/lib/gitlab/diff/position_tracer_spec.rb b/spec/lib/gitlab/diff/position_tracer_spec.rb index e5138705443..ddc4f6c5b5c 100644 --- a/spec/lib/gitlab/diff/position_tracer_spec.rb +++ b/spec/lib/gitlab/diff/position_tracer_spec.rb @@ -1771,9 +1771,9 @@ describe Gitlab::Diff::PositionTracer do describe "merge of target branch" do let(:merge_commit) do - update_file_again_commit + second_create_file_commit - merge_request = create(:merge_request, source_branch: second_create_file_commit.sha, target_branch: branch_name, source_project: project) + merge_request = create(:merge_request, source_branch: second_branch_name, target_branch: branch_name, source_project: project) repository.merge(current_user, merge_request.diff_head_sha, merge_request, "Merge branches") diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb index a1f4e65b8d4..a871ed0df0e 100644 --- a/spec/lib/gitlab/gitaly_client_spec.rb +++ b/spec/lib/gitlab/gitaly_client_spec.rb @@ -278,4 +278,20 @@ describe Gitlab::GitalyClient, skip_gitaly_mock: true do end end end + + describe 'timeouts' do + context 'with default values' do + before do + stub_application_setting(gitaly_timeout_default: 55) + stub_application_setting(gitaly_timeout_medium: 30) + stub_application_setting(gitaly_timeout_fast: 10) + end + + it 'returns expected values' do + expect(described_class.default_timeout).to be(55) + expect(described_class.medium_timeout).to be(30) + expect(described_class.fast_timeout).to be(10) + end + end + end end diff --git a/spec/lib/gitlab/sql/pattern_spec.rb b/spec/lib/gitlab/sql/pattern_spec.rb index 48d56628ed5..ef51e3cc8df 100644 --- a/spec/lib/gitlab/sql/pattern_spec.rb +++ b/spec/lib/gitlab/sql/pattern_spec.rb @@ -137,22 +137,22 @@ describe Gitlab::SQL::Pattern do end end - describe '.to_fuzzy_arel' do - subject(:to_fuzzy_arel) { Issue.to_fuzzy_arel(:title, query) } + describe '.fuzzy_arel_match' do + subject(:fuzzy_arel_match) { Issue.fuzzy_arel_match(:title, query) } context 'with a word equal to 3 chars' do let(:query) { 'foo' } it 'returns a single ILIKE condition' do - expect(to_fuzzy_arel.to_sql).to match(/title.*I?LIKE '\%foo\%'/) + expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE '\%foo\%'/) end end context 'with a word shorter than 3 chars' do let(:query) { 'fo' } - it 'returns nil' do - expect(to_fuzzy_arel).to be_nil + it 'returns a single equality condition' do + expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE 'fo'/) end end @@ -160,7 +160,23 @@ describe Gitlab::SQL::Pattern do let(:query) { 'foo baz' } it 'returns a joining LIKE condition using a AND' do - expect(to_fuzzy_arel.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%'/) + expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%'/) + end + end + + context 'with two words both shorter than 3 chars' do + let(:query) { 'fo ba' } + + it 'returns a single ILIKE condition' do + expect(fuzzy_arel_match.to_sql).to match(/title.*I?LIKE 'fo ba'/) + end + end + + context 'with two words, one shorter 3 chars' do + let(:query) { 'foo ba' } + + it 'returns a single ILIKE condition using the longer word' do + expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%'/) end end @@ -168,7 +184,7 @@ describe Gitlab::SQL::Pattern do let(:query) { 'foo "really bar" baz' } it 'returns a joining LIKE condition using a AND' do - expect(to_fuzzy_arel.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%' AND .*title.*I?LIKE '\%really bar\%'/) + expect(fuzzy_arel_match.to_sql).to match(/title.+I?LIKE '\%foo\%' AND .*title.*I?LIKE '\%baz\%' AND .*title.*I?LIKE '\%really bar\%'/) end end end diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb index 51bf4e65e5d..0b7e16cc33c 100644 --- a/spec/models/application_setting_spec.rb +++ b/spec/models/application_setting_spec.rb @@ -219,6 +219,65 @@ describe ApplicationSetting do expect(subject).to be_valid end end + + context 'gitaly timeouts' do + [:gitaly_timeout_default, :gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name| + it do + is_expected.to validate_presence_of(timeout_name) + is_expected.to validate_numericality_of(timeout_name).only_integer + .is_greater_than_or_equal_to(0) + end + end + + [:gitaly_timeout_medium, :gitaly_timeout_fast].each do |timeout_name| + it "validates that #{timeout_name} is lower than timeout_default" do + subject[:gitaly_timeout_default] = 50 + subject[timeout_name] = 100 + + expect(subject).to be_invalid + end + end + + it 'accepts all timeouts equal' do + subject.gitaly_timeout_default = 0 + subject.gitaly_timeout_medium = 0 + subject.gitaly_timeout_fast = 0 + + expect(subject).to be_valid + end + + it 'accepts timeouts in descending order' do + subject.gitaly_timeout_default = 50 + subject.gitaly_timeout_medium = 30 + subject.gitaly_timeout_fast = 20 + + expect(subject).to be_valid + end + + it 'rejects timeouts in ascending order' do + subject.gitaly_timeout_default = 20 + subject.gitaly_timeout_medium = 30 + subject.gitaly_timeout_fast = 50 + + expect(subject).to be_invalid + end + + it 'rejects medium timeout larger than default' do + subject.gitaly_timeout_default = 30 + subject.gitaly_timeout_medium = 50 + subject.gitaly_timeout_fast = 20 + + expect(subject).to be_invalid + end + + it 'rejects medium timeout smaller than fast' do + subject.gitaly_timeout_default = 30 + subject.gitaly_timeout_medium = 15 + subject.gitaly_timeout_fast = 20 + + expect(subject).to be_invalid + end + end end describe '.current' do diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb index 584dfe9a5c1..a93e7e233a8 100644 --- a/spec/models/ci/runner_spec.rb +++ b/spec/models/ci/runner_spec.rb @@ -473,7 +473,7 @@ describe Ci::Runner do end describe '.search' do - let(:runner) { create(:ci_runner, token: '123abc') } + let(:runner) { create(:ci_runner, token: '123abc', description: 'test runner') } it 'returns runners with a matching token' do expect(described_class.search(runner.token)).to eq([runner]) diff --git a/spec/models/concerns/has_variable_spec.rb b/spec/models/concerns/has_variable_spec.rb index f4b24e6d1d9..f87869a2fdc 100644 --- a/spec/models/concerns/has_variable_spec.rb +++ b/spec/models/concerns/has_variable_spec.rb @@ -9,6 +9,24 @@ describe HasVariable do it { is_expected.not_to allow_value('foo bar').for(:key) } it { is_expected.not_to allow_value('foo/bar').for(:key) } + describe '#key=' do + context 'when the new key is nil' do + it 'strips leading and trailing whitespaces' do + subject.key = nil + + expect(subject.key).to eq('') + end + end + + context 'when the new key has leadind and trailing whitespaces' do + it 'strips leading and trailing whitespaces' do + subject.key = ' my key ' + + expect(subject.key).to eq('my key') + end + end + end + describe '#value' do before do subject.value = 'secret' diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb index 4dfbb14952e..a53b59c4e08 100644 --- a/spec/models/concerns/issuable_spec.rb +++ b/spec/models/concerns/issuable_spec.rb @@ -67,6 +67,7 @@ describe Issuable do describe ".search" do let!(:searchable_issue) { create(:issue, title: "Searchable awesome issue") } + let!(:searchable_issue2) { create(:issue, title: 'Aw') } it 'returns issues with a matching title' do expect(issuable_class.search(searchable_issue.title)) @@ -86,8 +87,8 @@ describe Issuable do expect(issuable_class.search('searchable issue')).to eq([searchable_issue]) end - it 'returns all issues with a query shorter than 3 chars' do - expect(issuable_class.search('zz')).to eq(issuable_class.all) + it 'returns issues with a matching title for a query shorter than 3 chars' do + expect(issuable_class.search(searchable_issue2.title.downcase)).to eq([searchable_issue2]) end end @@ -95,6 +96,7 @@ describe Issuable do let!(:searchable_issue) do create(:issue, title: "Searchable awesome issue", description: 'Many cute kittens') end + let!(:searchable_issue2) { create(:issue, title: "Aw", description: "Cu") } it 'returns issues with a matching title' do expect(issuable_class.full_search(searchable_issue.title)) @@ -133,8 +135,8 @@ describe Issuable do expect(issuable_class.full_search('many kittens')).to eq([searchable_issue]) end - it 'returns all issues with a query shorter than 3 chars' do - expect(issuable_class.search('zz')).to eq(issuable_class.all) + it 'returns issues with a matching description for a query shorter than 3 chars' do + expect(issuable_class.full_search(searchable_issue2.description.downcase)).to eq([searchable_issue2]) end end @@ -283,7 +285,7 @@ describe Issuable do 'labels' => [[labels[0].hook_attrs], [labels[1].hook_attrs]] )) - issue.to_hook_data(user, old_labels: [labels[0]]) + issue.to_hook_data(user, old_associations: { labels: [labels[0]] }) end end @@ -302,7 +304,7 @@ describe Issuable do 'total_time_spent' => [1, 2] )) - issue.to_hook_data(user, old_total_time_spent: 1) + issue.to_hook_data(user, old_associations: { total_time_spent: 1 }) end end @@ -322,7 +324,7 @@ describe Issuable do 'assignees' => [[user.hook_attrs], [user.hook_attrs, user2.hook_attrs]] )) - issue.to_hook_data(user, old_assignees: [user]) + issue.to_hook_data(user, old_associations: { assignees: [user] }) end end @@ -345,7 +347,7 @@ describe Issuable do 'assignee' => [user.hook_attrs, user2.hook_attrs] )) - merge_request.to_hook_data(user, old_assignees: [user]) + merge_request.to_hook_data(user, old_associations: { assignees: [user] }) end end end diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb index 3cf8fc816ff..728028746d8 100644 --- a/spec/models/merge_request_spec.rb +++ b/spec/models/merge_request_spec.rb @@ -259,7 +259,7 @@ describe MergeRequest do end describe '#source_branch_sha' do - let(:last_branch_commit) { subject.source_project.repository.commit(subject.source_branch) } + let(:last_branch_commit) { subject.source_project.repository.commit(Gitlab::Git::BRANCH_REF_PREFIX + subject.source_branch) } context 'with diffs' do subject { create(:merge_request, :with_diffs) } @@ -273,6 +273,21 @@ describe MergeRequest do it 'returns the sha of the source branch last commit' do expect(subject.source_branch_sha).to eq(last_branch_commit.sha) end + + context 'when there is a tag name matching the branch name' do + let(:tag_name) { subject.source_branch } + + it 'returns the sha of the source branch last commit' do + subject.source_project.repository.add_tag(subject.author, + tag_name, + subject.target_branch_sha, + 'Add a tag') + + expect(subject.source_branch_sha).to eq(last_branch_commit.sha) + + subject.source_project.repository.rm_tag(subject.author, tag_name) + end + end end context 'when the merge request is being created' do @@ -933,7 +948,7 @@ describe MergeRequest do context 'with a completely different branch' do before do - subject.update(target_branch: 'v1.0.0') + subject.update(target_branch: 'csv') end it_behaves_like 'returning all SHA' @@ -941,7 +956,7 @@ describe MergeRequest do context 'with a branch having no difference' do before do - subject.update(target_branch: 'v1.1.0') + subject.update(target_branch: 'branch-merged') subject.reload # make sure commits were not cached end diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb index de3ca300ae3..e09d89d235d 100644 --- a/spec/models/snippet_spec.rb +++ b/spec/models/snippet_spec.rb @@ -88,7 +88,7 @@ describe Snippet do end describe '.search' do - let(:snippet) { create(:snippet) } + let(:snippet) { create(:snippet, title: 'test snippet') } it 'returns snippets with a matching title' do expect(described_class.search(snippet.title)).to eq([snippet]) diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb index 17dc3bb4f48..4f4e634829d 100644 --- a/spec/policies/group_policy_spec.rb +++ b/spec/policies/group_policy_spec.rb @@ -56,6 +56,7 @@ describe GroupPolicy do expect_disallowed(*developer_permissions) expect_disallowed(*master_permissions) expect_disallowed(*owner_permissions) + expect_disallowed(:read_namespace) end end @@ -63,7 +64,7 @@ describe GroupPolicy do let(:current_user) { guest } it do - expect_allowed(:read_group) + expect_allowed(:read_group, :read_namespace) expect_disallowed(*reporter_permissions) expect_disallowed(*developer_permissions) expect_disallowed(*master_permissions) @@ -75,7 +76,7 @@ describe GroupPolicy do let(:current_user) { reporter } it do - expect_allowed(:read_group) + expect_allowed(:read_group, :read_namespace) expect_allowed(*reporter_permissions) expect_disallowed(*developer_permissions) expect_disallowed(*master_permissions) @@ -87,7 +88,7 @@ describe GroupPolicy do let(:current_user) { developer } it do - expect_allowed(:read_group) + expect_allowed(:read_group, :read_namespace) expect_allowed(*reporter_permissions) expect_allowed(*developer_permissions) expect_disallowed(*master_permissions) @@ -99,7 +100,7 @@ describe GroupPolicy do let(:current_user) { master } it do - expect_allowed(:read_group) + expect_allowed(:read_group, :read_namespace) expect_allowed(*reporter_permissions) expect_allowed(*developer_permissions) expect_allowed(*master_permissions) @@ -113,7 +114,7 @@ describe GroupPolicy do it do allow(Group).to receive(:supports_nested_groups?).and_return(true) - expect_allowed(:read_group) + expect_allowed(:read_group, :read_namespace) expect_allowed(*reporter_permissions) expect_allowed(*developer_permissions) expect_allowed(*master_permissions) @@ -127,7 +128,7 @@ describe GroupPolicy do it do allow(Group).to receive(:supports_nested_groups?).and_return(true) - expect_allowed(:read_group) + expect_allowed(:read_group, :read_namespace) expect_allowed(*reporter_permissions) expect_allowed(*developer_permissions) expect_allowed(*master_permissions) diff --git a/spec/policies/namespace_policy_spec.rb b/spec/policies/namespace_policy_spec.rb index e52ff02e5f0..1fdf95ad716 100644 --- a/spec/policies/namespace_policy_spec.rb +++ b/spec/policies/namespace_policy_spec.rb @@ -1,20 +1,42 @@ require 'spec_helper' describe NamespacePolicy do - let(:current_user) { create(:user) } - let(:namespace) { current_user.namespace } + let(:user) { create(:user) } + let(:owner) { create(:user) } + let(:admin) { create(:admin) } + let(:namespace) { create(:namespace, owner: owner) } + + let(:owner_permissions) { [:create_projects, :admin_namespace, :read_namespace] } subject { described_class.new(current_user, namespace) } - context "create projects" do - context "user namespace" do - it { is_expected.to be_allowed(:create_projects) } - end + context 'with no user' do + let(:current_user) { nil } + + it { is_expected.to be_banned } + end + + context 'regular user' do + let(:current_user) { user } + + it { is_expected.to be_disallowed(*owner_permissions) } + end + + context 'owner' do + let(:current_user) { owner } + + it { is_expected.to be_allowed(*owner_permissions) } - context "user who has exceeded project limit" do - let(:current_user) { create(:user, projects_limit: 0) } + context 'user who has exceeded project limit' do + let(:owner) { create(:user, projects_limit: 0) } it { is_expected.not_to be_allowed(:create_projects) } end end + + context 'admin' do + let(:current_user) { admin } + + it { is_expected.to be_allowed(*owner_permissions) } + end end diff --git a/spec/requests/api/internal_spec.rb b/spec/requests/api/internal_spec.rb index 34ecdd1e164..67e1539cbc3 100644 --- a/spec/requests/api/internal_spec.rb +++ b/spec/requests/api/internal_spec.rb @@ -269,9 +269,8 @@ describe API::Internal do end context "git pull" do - context "gitaly disabled" do + context "gitaly disabled", :disable_gitaly do it "has the correct payload" do - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_upload_pack).and_return(false) pull(key, project) expect(response).to have_gitlab_http_status(200) @@ -285,7 +284,6 @@ describe API::Internal do context "gitaly enabled" do it "has the correct payload" do - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_upload_pack).and_return(true) pull(key, project) expect(response).to have_gitlab_http_status(200) @@ -304,9 +302,8 @@ describe API::Internal do end context "git push" do - context "gitaly disabled" do + context "gitaly disabled", :disable_gitaly do it "has the correct payload" do - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_receive_pack).and_return(false) push(key, project) expect(response).to have_gitlab_http_status(200) @@ -320,7 +317,6 @@ describe API::Internal do context "gitaly enabled" do it "has the correct payload" do - allow(Gitlab::GitalyClient).to receive(:feature_enabled?).with(:ssh_receive_pack).and_return(true) push(key, project) expect(response).to have_gitlab_http_status(200) diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb index e60716d46d7..98102fcd6a7 100644 --- a/spec/requests/api/namespaces_spec.rb +++ b/spec/requests/api/namespaces_spec.rb @@ -91,4 +91,127 @@ describe API::Namespaces do end end end + + describe 'GET /namespaces/:id' do + let(:owned_group) { group1 } + let(:user2) { create(:user) } + + shared_examples 'can access namespace' do + it 'returns namespace details' do + get api("/namespaces/#{namespace_id}", request_actor) + + expect(response).to have_gitlab_http_status(200) + + expect(json_response['id']).to eq(requested_namespace.id) + expect(json_response['path']).to eq(requested_namespace.path) + expect(json_response['name']).to eq(requested_namespace.name) + end + end + + shared_examples 'namespace reader' do + let(:requested_namespace) { owned_group } + + before do + owned_group.add_owner(request_actor) + end + + context 'when namespace exists' do + context 'when requested by ID' do + context 'when requesting group' do + let(:namespace_id) { owned_group.id } + + it_behaves_like 'can access namespace' + end + + context 'when requesting personal namespace' do + let(:namespace_id) { request_actor.namespace.id } + let(:requested_namespace) { request_actor.namespace } + + it_behaves_like 'can access namespace' + end + end + + context 'when requested by path' do + context 'when requesting group' do + let(:namespace_id) { owned_group.path } + + it_behaves_like 'can access namespace' + end + + context 'when requesting personal namespace' do + let(:namespace_id) { request_actor.namespace.path } + let(:requested_namespace) { request_actor.namespace } + + it_behaves_like 'can access namespace' + end + end + end + + context "when namespace doesn't exist" do + it 'returns not-found' do + get api('/namespaces/9999', request_actor) + + expect(response).to have_gitlab_http_status(404) + end + end + end + + context 'when unauthenticated' do + it 'returns authentication error' do + get api("/namespaces/#{group1.id}") + + expect(response).to have_gitlab_http_status(401) + end + end + + context 'when authenticated as regular user' do + let(:request_actor) { user } + + context 'when requested namespace is not owned by user' do + context 'when requesting group' do + it 'returns not-found' do + get api("/namespaces/#{group2.id}", request_actor) + + expect(response).to have_gitlab_http_status(404) + end + end + + context 'when requesting personal namespace' do + it 'returns not-found' do + get api("/namespaces/#{user2.namespace.id}", request_actor) + + expect(response).to have_gitlab_http_status(404) + end + end + end + + context 'when requested namespace is owned by user' do + it_behaves_like 'namespace reader' + end + end + + context 'when authenticated as admin' do + let(:request_actor) { admin } + + context 'when requested namespace is not owned by user' do + context 'when requesting group' do + let(:namespace_id) { group2.id } + let(:requested_namespace) { group2 } + + it_behaves_like 'can access namespace' + end + + context 'when requesting personal namespace' do + let(:namespace_id) { user2.namespace.id } + let(:requested_namespace) { user2.namespace } + + it_behaves_like 'can access namespace' + end + end + + context 'when requested namespace is owned by user' do + it_behaves_like 'namespace reader' + end + end + end end diff --git a/spec/requests/api/runners_spec.rb b/spec/requests/api/runners_spec.rb index fe38a7b3251..ec5cad4f4fd 100644 --- a/spec/requests/api/runners_spec.rb +++ b/spec/requests/api/runners_spec.rb @@ -354,6 +354,140 @@ describe API::Runners do end end + describe 'GET /runners/:id/jobs' do + set(:job_1) { create(:ci_build) } + let!(:job_2) { create(:ci_build, :running, runner: shared_runner, project: project) } + let!(:job_3) { create(:ci_build, :failed, runner: shared_runner, project: project) } + let!(:job_4) { create(:ci_build, :running, runner: specific_runner, project: project) } + let!(:job_5) { create(:ci_build, :failed, runner: specific_runner, project: project) } + + context 'admin user' do + context 'when runner exists' do + context 'when runner is shared' do + it 'return jobs' do + get api("/runners/#{shared_runner.id}/jobs", admin) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + + expect(json_response).to be_an(Array) + expect(json_response.length).to eq(2) + end + end + + context 'when runner is specific' do + it 'return jobs' do + get api("/runners/#{specific_runner.id}/jobs", admin) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + + expect(json_response).to be_an(Array) + expect(json_response.length).to eq(2) + end + end + + context 'when valid status is provided' do + it 'return filtered jobs' do + get api("/runners/#{specific_runner.id}/jobs?status=failed", admin) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + + expect(json_response).to be_an(Array) + expect(json_response.length).to eq(1) + expect(json_response.first).to include('id' => job_5.id) + end + end + + context 'when invalid status is provided' do + it 'return 400' do + get api("/runners/#{specific_runner.id}/jobs?status=non-existing", admin) + + expect(response).to have_gitlab_http_status(400) + end + end + end + + context "when runner doesn't exist" do + it 'returns 404' do + get api('/runners/9999/jobs', admin) + + expect(response).to have_gitlab_http_status(404) + end + end + end + + context "runner project's administrative user" do + context 'when runner exists' do + context 'when runner is shared' do + it 'returns 403' do + get api("/runners/#{shared_runner.id}/jobs", user) + + expect(response).to have_gitlab_http_status(403) + end + end + + context 'when runner is specific' do + it 'return jobs' do + get api("/runners/#{specific_runner.id}/jobs", user) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + + expect(json_response).to be_an(Array) + expect(json_response.length).to eq(2) + end + end + + context 'when valid status is provided' do + it 'return filtered jobs' do + get api("/runners/#{specific_runner.id}/jobs?status=failed", user) + + expect(response).to have_gitlab_http_status(200) + expect(response).to include_pagination_headers + + expect(json_response).to be_an(Array) + expect(json_response.length).to eq(1) + expect(json_response.first).to include('id' => job_5.id) + end + end + + context 'when invalid status is provided' do + it 'return 400' do + get api("/runners/#{specific_runner.id}/jobs?status=non-existing", user) + + expect(response).to have_gitlab_http_status(400) + end + end + end + + context "when runner doesn't exist" do + it 'returns 404' do + get api('/runners/9999/jobs', user) + + expect(response).to have_gitlab_http_status(404) + end + end + end + + context 'other authorized user' do + it 'does not return jobs' do + get api("/runners/#{specific_runner.id}/jobs", user2) + + expect(response).to have_gitlab_http_status(403) + end + end + + context 'unauthorized user' do + it 'does not return jobs' do + get api("/runners/#{specific_runner.id}/jobs") + + expect(response).to have_gitlab_http_status(401) + end + end + end + describe 'GET /projects/:id/runners' do context 'authorized user with master privileges' do it "returns project's runners" do diff --git a/spec/services/merge_requests/build_service_spec.rb b/spec/services/merge_requests/build_service_spec.rb index b46c419de14..fee293760f5 100644 --- a/spec/services/merge_requests/build_service_spec.rb +++ b/spec/services/merge_requests/build_service_spec.rb @@ -29,13 +29,27 @@ describe MergeRequests::BuildService do before do project.team << [user, :guest] + end + def stub_compare allow(CompareService).to receive_message_chain(:new, :execute).and_return(compare) allow(project).to receive(:commit).and_return(commit_1) allow(project).to receive(:commit).and_return(commit_2) end - describe 'execute' do + describe '#execute' do + it 'calls the compare service with the correct arguments' do + expect(CompareService).to receive(:new) + .with(project, Gitlab::Git::BRANCH_REF_PREFIX + source_branch) + .and_call_original + + expect_any_instance_of(CompareService).to receive(:execute) + .with(project, Gitlab::Git::BRANCH_REF_PREFIX + target_branch) + .and_call_original + + merge_request + end + context 'missing source branch' do let(:source_branch) { '' } @@ -52,6 +66,10 @@ describe MergeRequests::BuildService do let(:target_branch) { nil } let(:commits) { Commit.decorate([commit_1], project) } + before do + stub_compare + end + it 'creates compare object with target branch as default branch' do expect(merge_request.compare).to be_present expect(merge_request.target_branch).to eq(project.default_branch) @@ -77,6 +95,10 @@ describe MergeRequests::BuildService do context 'no commits in the diff' do let(:commits) { [] } + before do + stub_compare + end + it 'allows the merge request to be created' do expect(merge_request.can_be_created).to eq(true) end @@ -89,6 +111,10 @@ describe MergeRequests::BuildService do context 'one commit in the diff' do let(:commits) { Commit.decorate([commit_1], project) } + before do + stub_compare + end + it 'allows the merge request to be created' do expect(merge_request.can_be_created).to eq(true) end @@ -149,6 +175,10 @@ describe MergeRequests::BuildService do context 'more than one commit in the diff' do let(:commits) { Commit.decorate([commit_1, commit_2], project) } + before do + stub_compare + end + it 'allows the merge request to be created' do expect(merge_request.can_be_created).to eq(true) end diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb index 5ce6ca70c83..7a66b809550 100644 --- a/spec/services/merge_requests/update_service_spec.rb +++ b/spec/services/merge_requests/update_service_spec.rb @@ -65,7 +65,7 @@ describe MergeRequests::UpdateService, :mailer do end end - it 'mathces base expectations' do + it 'matches base expectations' do expect(@merge_request).to be_valid expect(@merge_request.title).to eq('New title') expect(@merge_request.assignee).to eq(user2) @@ -78,9 +78,17 @@ describe MergeRequests::UpdateService, :mailer do end it 'executes hooks with update action' do - expect(service) - .to have_received(:execute_hooks) - .with(@merge_request, 'update', old_labels: [], old_assignees: [user3], old_total_time_spent: 0) + expect(service).to have_received(:execute_hooks) + .with( + @merge_request, + 'update', + old_associations: { + labels: [], + mentioned_users: [user2], + assignees: [user3], + total_time_spent: 0 + } + ) end it 'sends email to user2 about assign of new merge request and email to user3 about merge request unassignment' do diff --git a/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb new file mode 100644 index 00000000000..50e59954f73 --- /dev/null +++ b/spec/services/projects/hashed_storage/migrate_attachments_service_spec.rb @@ -0,0 +1,63 @@ +require 'spec_helper' + +describe Projects::HashedStorage::MigrateAttachmentsService do + subject(:service) { described_class.new(project) } + let(:project) { create(:project) } + let(:legacy_storage) { Storage::LegacyProject.new(project) } + let(:hashed_storage) { Storage::HashedProject.new(project) } + + let!(:upload) { Upload.find_by(path: file_uploader.relative_path) } + let(:file_uploader) { build(:file_uploader, project: project) } + let(:old_path) { File.join(base_path(legacy_storage), upload.path) } + let(:new_path) { File.join(base_path(hashed_storage), upload.path) } + + context '#execute' do + context 'when succeeds' do + it 'moves attachments to hashed storage layout' do + expect(File.file?(old_path)).to be_truthy + expect(File.file?(new_path)).to be_falsey + expect(File.exist?(base_path(legacy_storage))).to be_truthy + expect(File.exist?(base_path(hashed_storage))).to be_falsey + expect(FileUtils).to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)).and_call_original + + service.execute + + expect(File.exist?(base_path(hashed_storage))).to be_truthy + expect(File.exist?(base_path(legacy_storage))).to be_falsey + expect(File.file?(old_path)).to be_falsey + expect(File.file?(new_path)).to be_truthy + end + end + + context 'when original folder does not exist anymore' do + before do + FileUtils.rm_rf(base_path(legacy_storage)) + end + + it 'skips moving folders and go to next' do + expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)) + + service.execute + + expect(File.exist?(base_path(hashed_storage))).to be_falsey + expect(File.file?(new_path)).to be_falsey + end + end + + context 'when target folder already exists' do + before do + FileUtils.mkdir_p(base_path(hashed_storage)) + end + + it 'raises AttachmentMigrationError' do + expect(FileUtils).not_to receive(:mv).with(base_path(legacy_storage), base_path(hashed_storage)) + + expect { service.execute }.to raise_error(Projects::HashedStorage::AttachmentMigrationError) + end + end + end + + def base_path(storage) + FileUploader.dynamic_path_builder(storage.disk_path) + end +end diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb new file mode 100644 index 00000000000..3a3e47fd9c0 --- /dev/null +++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb @@ -0,0 +1,76 @@ +require 'spec_helper' + +describe Projects::HashedStorage::MigrateRepositoryService do + let(:gitlab_shell) { Gitlab::Shell.new } + let(:project) { create(:project, :empty_repo, :wiki_repo) } + let(:service) { described_class.new(project) } + let(:legacy_storage) { Storage::LegacyProject.new(project) } + let(:hashed_storage) { Storage::HashedProject.new(project) } + + describe '#execute' do + before do + allow(service).to receive(:gitlab_shell) { gitlab_shell } + end + + context 'when succeeds' do + it 'renames project and wiki repositories' do + service.execute + + expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy + expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy + end + + it 'updates project to be hashed and not read-only' do + service.execute + + expect(project.hashed_storage?(:repository)).to be_truthy + expect(project.repository_read_only).to be_falsey + end + + it 'move operation is called for both repositories' do + expect_move_repository(project.disk_path, hashed_storage.disk_path) + expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki") + + service.execute + end + end + + context 'when one move fails' do + it 'rollsback repositories to original name' do + from_name = project.disk_path + to_name = hashed_storage.disk_path + allow(service).to receive(:move_repository).and_call_original + allow(service).to receive(:move_repository).with(from_name, to_name).once { false } # will disable first move only + + expect(service).to receive(:rollback_folder_move).and_call_original + + service.execute + + expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey + expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey + expect(project.repository_read_only?).to be_falsey + end + + context 'when rollback fails' do + let(:from_name) { legacy_storage.disk_path } + let(:to_name) { hashed_storage.disk_path } + + before do + hashed_storage.ensure_storage_path_exists + gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name) + end + + it 'does not try to move nil repository over hashed' do + expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name) + expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki") + + service.execute + end + end + end + + def expect_move_repository(from_name, to_name) + expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original + end + end +end diff --git a/spec/services/projects/hashed_storage_migration_service_spec.rb b/spec/services/projects/hashed_storage_migration_service_spec.rb index b71b47c59b6..466f0b5d7c2 100644 --- a/spec/services/projects/hashed_storage_migration_service_spec.rb +++ b/spec/services/projects/hashed_storage_migration_service_spec.rb @@ -1,74 +1,44 @@ require 'spec_helper' describe Projects::HashedStorageMigrationService do - let(:gitlab_shell) { Gitlab::Shell.new } let(:project) { create(:project, :empty_repo, :wiki_repo) } - let(:service) { described_class.new(project) } - let(:legacy_storage) { Storage::LegacyProject.new(project) } - let(:hashed_storage) { Storage::HashedProject.new(project) } + subject(:service) { described_class.new(project) } describe '#execute' do - before do - allow(service).to receive(:gitlab_shell) { gitlab_shell } - end - - context 'when succeeds' do - it 'renames project and wiki repositories' do - service.execute + context 'repository migration' do + let(:repository_service) { Projects::HashedStorage::MigrateRepositoryService.new(project, subject.logger) } - expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy - expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy - end + it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do + expect(Projects::HashedStorage::MigrateRepositoryService).to receive(:new).with(project, subject.logger).and_return(repository_service) + expect(repository_service).to receive(:execute) - it 'updates project to be hashed and not read-only' do service.execute - - expect(project.hashed_storage?(:repository)).to be_truthy - expect(project.repository_read_only).to be_falsey end - it 'move operation is called for both repositories' do - expect_move_repository(project.disk_path, hashed_storage.disk_path) - expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki") + it 'does not delegate migration if repository is already migrated' do + project.storage_version = ::Project::LATEST_STORAGE_VERSION + expect(Projects::HashedStorage::MigrateRepositoryService).not_to receive(:new) service.execute end end - context 'when one move fails' do - it 'rollsback repositories to original name' do - from_name = project.disk_path - to_name = hashed_storage.disk_path - allow(service).to receive(:move_repository).and_call_original - allow(service).to receive(:move_repository).with(from_name, to_name).once { false } # will disable first move only + context 'attachments migration' do + let(:attachments_service) { Projects::HashedStorage::MigrateAttachmentsService.new(project, subject.logger) } - expect(service).to receive(:rollback_folder_move).and_call_original + it 'delegates migration to Projects::HashedStorage::MigrateRepositoryService' do + expect(Projects::HashedStorage::MigrateAttachmentsService).to receive(:new).with(project, subject.logger).and_return(attachments_service) + expect(attachments_service).to receive(:execute) service.execute - - expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey - expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey end - context 'when rollback fails' do - before do - from_name = legacy_storage.disk_path - to_name = hashed_storage.disk_path + it 'does not delegate migration if attachments are already migrated' do + project.storage_version = ::Project::LATEST_STORAGE_VERSION + expect(Projects::HashedStorage::MigrateAttachmentsService).not_to receive(:new) - hashed_storage.ensure_storage_path_exists - gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name) - end - - it 'does not try to move nil repository over hashed' do - expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki") - - service.execute - end + service.execute end end - - def expect_move_repository(from_name, to_name) - expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original - end end end diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb index 3da222e2ed8..fcd71857af3 100644 --- a/spec/services/projects/update_service_spec.rb +++ b/spec/services/projects/update_service_spec.rb @@ -1,198 +1,222 @@ require 'spec_helper' -describe Projects::UpdateService, '#execute' do +describe Projects::UpdateService do include ProjectForksHelper - let(:gitlab_shell) { Gitlab::Shell.new } let(:user) { create(:user) } - let(:admin) { create(:admin) } - let(:project) do create(:project, creator: user, namespace: user.namespace) end - context 'when changing visibility level' do - context 'when visibility_level is INTERNAL' do - it 'updates the project to internal' do - result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL) - - expect(result).to eq({ status: :success }) - expect(project).to be_internal - end - end - - context 'when visibility_level is PUBLIC' do - it 'updates the project to public' do - result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC) - expect(result).to eq({ status: :success }) - expect(project).to be_public - end - end - - context 'when visibility levels are restricted to PUBLIC only' do - before do - stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC]) - end + describe '#execute' do + let(:gitlab_shell) { Gitlab::Shell.new } + let(:admin) { create(:admin) } + context 'when changing visibility level' do context 'when visibility_level is INTERNAL' do it 'updates the project to internal' do result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL) + expect(result).to eq({ status: :success }) expect(project).to be_internal end end context 'when visibility_level is PUBLIC' do - it 'does not update the project to public' do + it 'updates the project to public' do result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC) + expect(result).to eq({ status: :success }) + expect(project).to be_public + end + end - expect(result).to eq({ status: :error, message: 'New visibility level not allowed!' }) - expect(project).to be_private + context 'when visibility levels are restricted to PUBLIC only' do + before do + stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC]) end - context 'when updated by an admin' do - it 'updates the project to public' do - result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC) + context 'when visibility_level is INTERNAL' do + it 'updates the project to internal' do + result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::INTERNAL) expect(result).to eq({ status: :success }) - expect(project).to be_public + expect(project).to be_internal end end - end - end - context 'When project visibility is higher than parent group' do - let(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::INTERNAL) } + context 'when visibility_level is PUBLIC' do + it 'does not update the project to public' do + result = update_project(project, user, visibility_level: Gitlab::VisibilityLevel::PUBLIC) - before do - project.update(namespace: group, visibility_level: group.visibility_level) + expect(result).to eq({ status: :error, message: 'New visibility level not allowed!' }) + expect(project).to be_private + end + + context 'when updated by an admin' do + it 'updates the project to public' do + result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC) + expect(result).to eq({ status: :success }) + expect(project).to be_public + end + end + end end - it 'does not update project visibility level' do - result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC) + context 'When project visibility is higher than parent group' do + let(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::INTERNAL) } + + before do + project.update(namespace: group, visibility_level: group.visibility_level) + end + + it 'does not update project visibility level' do + result = update_project(project, admin, visibility_level: Gitlab::VisibilityLevel::PUBLIC) - expect(result).to eq({ status: :error, message: 'Visibility level public is not allowed in a internal group.' }) - expect(project.reload).to be_internal + expect(result).to eq({ status: :error, message: 'Visibility level public is not allowed in a internal group.' }) + expect(project.reload).to be_internal + end end end - end - describe 'when updating project that has forks' do - let(:project) { create(:project, :internal) } - let(:forked_project) { fork_project(project) } + describe 'when updating project that has forks' do + let(:project) { create(:project, :internal) } + let(:forked_project) { fork_project(project) } - it 'updates forks visibility level when parent set to more restrictive' do - opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE } + it 'updates forks visibility level when parent set to more restrictive' do + opts = { visibility_level: Gitlab::VisibilityLevel::PRIVATE } - expect(project).to be_internal - expect(forked_project).to be_internal + expect(project).to be_internal + expect(forked_project).to be_internal - expect(update_project(project, admin, opts)).to eq({ status: :success }) + expect(update_project(project, admin, opts)).to eq({ status: :success }) - expect(project).to be_private - expect(forked_project.reload).to be_private - end + expect(project).to be_private + expect(forked_project.reload).to be_private + end - it 'does not update forks visibility level when parent set to less restrictive' do - opts = { visibility_level: Gitlab::VisibilityLevel::PUBLIC } + it 'does not update forks visibility level when parent set to less restrictive' do + opts = { visibility_level: Gitlab::VisibilityLevel::PUBLIC } - expect(project).to be_internal - expect(forked_project).to be_internal + expect(project).to be_internal + expect(forked_project).to be_internal - expect(update_project(project, admin, opts)).to eq({ status: :success }) + expect(update_project(project, admin, opts)).to eq({ status: :success }) - expect(project).to be_public - expect(forked_project.reload).to be_internal + expect(project).to be_public + expect(forked_project.reload).to be_internal + end end - end - context 'when updating a default branch' do - let(:project) { create(:project, :repository) } + context 'when updating a default branch' do + let(:project) { create(:project, :repository) } - it 'changes a default branch' do - update_project(project, admin, default_branch: 'feature') + it 'changes a default branch' do + update_project(project, admin, default_branch: 'feature') - expect(Project.find(project.id).default_branch).to eq 'feature' - end + expect(Project.find(project.id).default_branch).to eq 'feature' + end - it 'does not change a default branch' do - # The branch 'unexisted-branch' does not exist. - update_project(project, admin, default_branch: 'unexisted-branch') + it 'does not change a default branch' do + # The branch 'unexisted-branch' does not exist. + update_project(project, admin, default_branch: 'unexisted-branch') - expect(Project.find(project.id).default_branch).to eq 'master' + expect(Project.find(project.id).default_branch).to eq 'master' + end end - end - context 'when updating a project that contains container images' do - before do - stub_container_registry_config(enabled: true) - stub_container_registry_tags(repository: /image/, tags: %w[rc1]) - create(:container_repository, project: project, name: :image) - end + context 'when updating a project that contains container images' do + before do + stub_container_registry_config(enabled: true) + stub_container_registry_tags(repository: /image/, tags: %w[rc1]) + create(:container_repository, project: project, name: :image) + end - it 'does not allow to rename the project' do - result = update_project(project, admin, path: 'renamed') + it 'does not allow to rename the project' do + result = update_project(project, admin, path: 'renamed') - expect(result).to include(status: :error) - expect(result[:message]).to match(/contains container registry tags/) - end + expect(result).to include(status: :error) + expect(result[:message]).to match(/contains container registry tags/) + end - it 'allows to update other settings' do - result = update_project(project, admin, public_builds: true) + it 'allows to update other settings' do + result = update_project(project, admin, public_builds: true) - expect(result[:status]).to eq :success - expect(project.reload.public_builds).to be true + expect(result[:status]).to eq :success + expect(project.reload.public_builds).to be true + end end - end - context 'when renaming a project' do - let(:repository_storage) { 'default' } - let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } + context 'when renaming a project' do + let(:repository_storage) { 'default' } + let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage]['path'] } - context 'with legacy storage' do - before do - gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing") - end + context 'with legacy storage' do + before do + gitlab_shell.add_repository(repository_storage, "#{user.namespace.full_path}/existing") + end + + after do + gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing") + end + + it 'does not allow renaming when new path matches existing repository on disk' do + result = update_project(project, admin, path: 'existing') - after do - gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing") + expect(result).to include(status: :error) + expect(result[:message]).to match('There is already a repository with that name on disk') + expect(project).not_to be_valid + expect(project.errors.messages).to have_key(:base) + expect(project.errors.messages[:base]).to include('There is already a repository with that name on disk') + end end - it 'does not allow renaming when new path matches existing repository on disk' do - result = update_project(project, admin, path: 'existing') + context 'with hashed storage' do + let(:project) { create(:project, :repository, creator: user, namespace: user.namespace) } - expect(result).to include(status: :error) - expect(result[:message]).to match('There is already a repository with that name on disk') - expect(project).not_to be_valid - expect(project.errors.messages).to have_key(:base) - expect(project.errors.messages[:base]).to include('There is already a repository with that name on disk') + before do + stub_application_setting(hashed_storage_enabled: true) + end + + it 'does not check if new path matches existing repository on disk' do + expect(project).not_to receive(:repository_with_same_path_already_exists?) + + result = update_project(project, admin, path: 'existing') + + expect(result).to include(status: :success) + end end end - context 'with hashed storage' do - let(:project) { create(:project, :repository, creator: user, namespace: user.namespace) } + context 'when passing invalid parameters' do + it 'returns an error result when record cannot be updated' do + result = update_project(project, admin, { name: 'foo&bar' }) - before do - stub_application_setting(hashed_storage_enabled: true) + expect(result).to eq({ + status: :error, + message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." + }) end + end + end - it 'does not check if new path matches existing repository on disk' do - expect(project).not_to receive(:repository_with_same_path_already_exists?) + describe '#run_auto_devops_pipeline?' do + subject { described_class.new(project, user, params).run_auto_devops_pipeline? } - result = update_project(project, admin, path: 'existing') + context 'when neither pipeline setting is true' do + let(:params) { {} } - expect(result).to include(status: :success) - end + it { is_expected.to eq(false) } + end + + context 'when run_auto_devops_pipeline_explicit is true' do + let(:params) { { run_auto_devops_pipeline_explicit: 'true' } } + + it { is_expected.to eq(true) } end - end - context 'when passing invalid parameters' do - it 'returns an error result when record cannot be updated' do - result = update_project(project, admin, { name: 'foo&bar' }) + context 'when run_auto_devops_pipeline_implicit is true' do + let(:params) { { run_auto_devops_pipeline_implicit: 'true' } } - expect(result).to eq({ - status: :error, - message: "Name can contain only letters, digits, emojis, '_', '.', dash, space. It must start with letter, digit, emoji or '_'." - }) + it { is_expected.to eq(true) } end end diff --git a/spec/workers/create_pipeline_worker_spec.rb b/spec/workers/create_pipeline_worker_spec.rb new file mode 100644 index 00000000000..02cb0f46cb4 --- /dev/null +++ b/spec/workers/create_pipeline_worker_spec.rb @@ -0,0 +1,36 @@ +require 'spec_helper' + +describe CreatePipelineWorker do + describe '#perform' do + let(:worker) { described_class.new } + + context 'when a project not found' do + it 'does not call the Service' do + expect(Ci::CreatePipelineService).not_to receive(:new) + expect { worker.perform(99, create(:user).id, 'master', :web) }.to raise_error(ActiveRecord::RecordNotFound) + end + end + + context 'when a user not found' do + let(:project) { create(:project) } + + it 'does not call the Service' do + expect(Ci::CreatePipelineService).not_to receive(:new) + expect { worker.perform(project.id, 99, project.default_branch, :web) }.to raise_error(ActiveRecord::RecordNotFound) + end + end + + context 'when everything is ok' do + let(:project) { create(:project) } + let(:user) { create(:user) } + let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService) } + + it 'calls the Service' do + expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: project.default_branch).and_return(create_pipeline_service) + expect(create_pipeline_service).to receive(:execute).with(:web, any_args) + + worker.perform(project.id, user.id, project.default_branch, :web) + end + end + end +end diff --git a/spec/workers/project_migrate_hashed_storage_worker_spec.rb b/spec/workers/project_migrate_hashed_storage_worker_spec.rb index f5226dee0ad..2e3951e7afc 100644 --- a/spec/workers/project_migrate_hashed_storage_worker_spec.rb +++ b/spec/workers/project_migrate_hashed_storage_worker_spec.rb @@ -1,29 +1,53 @@ require 'spec_helper' -describe ProjectMigrateHashedStorageWorker do +describe ProjectMigrateHashedStorageWorker, :clean_gitlab_redis_shared_state do describe '#perform' do let(:project) { create(:project, :empty_repo) } let(:pending_delete_project) { create(:project, :empty_repo, pending_delete: true) } - it 'skips when project no longer exists' do - nonexistent_id = 999999999999 + context 'when have exclusive lease' do + before do + lease = subject.lease_for(project.id) - expect(::Projects::HashedStorageMigrationService).not_to receive(:new) - subject.perform(nonexistent_id) - end + allow(Gitlab::ExclusiveLease).to receive(:new).and_return(lease) + allow(lease).to receive(:try_obtain).and_return(true) + end + + it 'skips when project no longer exists' do + nonexistent_id = 999999999999 + + expect(::Projects::HashedStorageMigrationService).not_to receive(:new) + subject.perform(nonexistent_id) + end + + it 'skips when project is pending delete' do + expect(::Projects::HashedStorageMigrationService).not_to receive(:new) - it 'skips when project is pending delete' do - expect(::Projects::HashedStorageMigrationService).not_to receive(:new) + subject.perform(pending_delete_project.id) + end - subject.perform(pending_delete_project.id) + it 'delegates removal to service class' do + service = double('service') + expect(::Projects::HashedStorageMigrationService).to receive(:new).with(project, subject.logger).and_return(service) + expect(service).to receive(:execute) + + subject.perform(project.id) + end end - it 'delegates removal to service class' do - service = double('service') - expect(::Projects::HashedStorageMigrationService).to receive(:new).with(project, subject.logger).and_return(service) - expect(service).to receive(:execute) + context 'when dont have exclusive lease' do + before do + lease = subject.lease_for(project.id) + + allow(Gitlab::ExclusiveLease).to receive(:new).and_return(lease) + allow(lease).to receive(:try_obtain).and_return(false) + end + + it 'skips when dont have lease' do + expect(::Projects::HashedStorageMigrationService).not_to receive(:new) - subject.perform(project.id) + subject.perform(project.id) + end end end end |
