summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-01-21 14:21:10 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2020-01-21 14:21:10 +0000
commitcb0d23c455b73486fd1015f8ca9479b5b7e3585d (patch)
treed7dc129a407fd74266d2dc561bebf24665197c2f /app/services
parentc3e911be175c0aabfea1eb030f9e0ef23f5f3887 (diff)
downloadgitlab-ce-cb0d23c455b73486fd1015f8ca9479b5b7e3585d.tar.gz
Add latest changes from gitlab-org/gitlab@12-7-stable-ee
Diffstat (limited to 'app/services')
-rw-r--r--app/services/akismet_service.rb17
-rw-r--r--app/services/boards/issues/list_service.rb4
-rw-r--r--app/services/boards/list_service.rb17
-rw-r--r--app/services/ci/create_pipeline_service.rb7
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service.rb118
-rw-r--r--app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb135
-rw-r--r--app/services/ci/pipeline_processing/legacy_processing_service.rb119
-rw-r--r--app/services/ci/prepare_build_service.rb2
-rw-r--r--app/services/ci/process_pipeline_service.rb108
-rw-r--r--app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb17
-rw-r--r--app/services/ci/retry_build_service.rb15
-rw-r--r--app/services/clusters/applications/base_service.rb10
-rw-r--r--app/services/clusters/applications/check_installation_progress_service.rb2
-rw-r--r--app/services/clusters/kubernetes/create_or_update_namespace_service.rb5
-rw-r--r--app/services/clusters/kubernetes/create_or_update_service_account_service.rb11
-rw-r--r--app/services/concerns/akismet_methods.rb25
-rw-r--r--app/services/concerns/spam_check_methods.rb (renamed from app/services/spam_check_service.rb)8
-rw-r--r--app/services/container_expiration_policy_service.rb15
-rw-r--r--app/services/create_snippet_service.rb35
-rw-r--r--app/services/deployments/after_create_service.rb9
-rw-r--r--app/services/deployments/link_merge_requests_service.rb5
-rw-r--r--app/services/error_tracking/issue_update_service.rb22
-rw-r--r--app/services/event_create_service.rb2
-rw-r--r--app/services/git/base_hooks_service.rb2
-rw-r--r--app/services/ham_service.rb4
-rw-r--r--app/services/issuable/clone/attributes_rewriter.rb15
-rw-r--r--app/services/issues/create_service.rb2
-rw-r--r--app/services/issues/update_service.rb2
-rw-r--r--app/services/members/update_service.rb3
-rw-r--r--app/services/merge_requests/get_urls_service.rb2
-rw-r--r--app/services/merge_requests/rebase_service.rb5
-rw-r--r--app/services/metrics/dashboard/clone_dashboard_service.rb113
-rw-r--r--app/services/metrics/sample_metrics_service.rb16
-rw-r--r--app/services/notes/create_service.rb14
-rw-r--r--app/services/notes/destroy_service.rb2
-rw-r--r--app/services/pages_domains/create_acme_order_service.rb5
-rw-r--r--app/services/projects/operations/update_service.rb2
-rw-r--r--app/services/projects/update_pages_service.rb13
-rw-r--r--app/services/prometheus/adapter_service.rb35
-rw-r--r--app/services/prometheus/proxy_service.rb12
-rw-r--r--app/services/prometheus/proxy_variable_substitution_service.rb55
-rw-r--r--app/services/quick_actions/interpret_service.rb4
-rw-r--r--app/services/releases/update_service.rb13
-rw-r--r--app/services/resource_events/base_synthetic_notes_builder_service.rb39
-rw-r--r--app/services/resource_events/merge_into_notes_service.rb43
-rw-r--r--app/services/resource_events/synthetic_label_notes_builder_service.rb27
-rw-r--r--app/services/search_service.rb11
-rw-r--r--app/services/snippets/base_service.rb15
-rw-r--r--app/services/snippets/create_service.rb40
-rw-r--r--app/services/snippets/destroy_service.rb48
-rw-r--r--app/services/snippets/update_service.rb36
-rw-r--r--app/services/spam/mark_as_spam_service.rb24
-rw-r--r--app/services/spam_service.rb35
-rw-r--r--app/services/suggestions/apply_service.rb37
-rw-r--r--app/services/system_hooks_service.rb2
-rw-r--r--app/services/system_note_service.rb29
-rw-r--r--app/services/system_notes/time_tracking_service.rb73
-rw-r--r--app/services/template_engines/liquid_service.rb48
-rw-r--r--app/services/update_snippet_service.rb36
-rw-r--r--app/services/users/activity_service.rb3
-rw-r--r--app/services/users/build_service.rb8
-rw-r--r--app/services/users/destroy_service.rb7
-rw-r--r--app/services/users/update_service.rb10
63 files changed, 1214 insertions, 384 deletions
diff --git a/app/services/akismet_service.rb b/app/services/akismet_service.rb
index 63be3c371ec..d8098c4a8f5 100644
--- a/app/services/akismet_service.rb
+++ b/app/services/akismet_service.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
class AkismetService
- attr_accessor :owner, :text, :options
+ attr_accessor :text, :options
- def initialize(owner, text, options = {})
- @owner = owner
+ def initialize(owner_name, owner_email, text, options = {})
+ @owner_name = owner_name
+ @owner_email = owner_email
@text = text
@options = options
end
@@ -16,8 +17,8 @@ class AkismetService
type: 'comment',
text: text,
created_at: DateTime.now,
- author: owner.name,
- author_email: owner.email,
+ author: owner_name,
+ author_email: owner_email,
referrer: options[:referrer]
}
@@ -40,6 +41,8 @@ class AkismetService
private
+ attr_accessor :owner_name, :owner_email
+
def akismet_client
@akismet_client ||= ::Akismet::Client.new(Gitlab::CurrentSettings.akismet_api_key,
Gitlab.config.gitlab.url)
@@ -55,8 +58,8 @@ class AkismetService
params = {
type: 'comment',
text: text,
- author: owner.name,
- author_email: owner.email
+ author: owner_name,
+ author_email: owner_email
}
begin
diff --git a/app/services/boards/issues/list_service.rb b/app/services/boards/issues/list_service.rb
index 37a74cd1b00..a9240e1d8a0 100644
--- a/app/services/boards/issues/list_service.rb
+++ b/app/services/boards/issues/list_service.rb
@@ -5,6 +5,10 @@ module Boards
class ListService < Boards::BaseService
include Gitlab::Utils::StrongMemoize
+ def self.valid_params
+ IssuesFinder.valid_params
+ end
+
def execute
fetch_issues.order_by_position_and_priority
end
diff --git a/app/services/boards/list_service.rb b/app/services/boards/list_service.rb
index 44d5a21b15f..8258d5d07d3 100644
--- a/app/services/boards/list_service.rb
+++ b/app/services/boards/list_service.rb
@@ -4,13 +4,24 @@ module Boards
class ListService < Boards::BaseService
def execute
create_board! if parent.boards.empty?
- boards
+
+ if parent.multiple_issue_boards_available?
+ boards
+ else
+ # When multiple issue boards are not available
+ # a user is only allowed to view the default shown board
+ first_board
+ end
end
private
def boards
- parent.boards
+ parent.boards.order_by_name_asc
+ end
+
+ def first_board
+ parent.boards.first_board
end
def create_board!
@@ -18,5 +29,3 @@ module Boards
end
end
end
-
-Boards::ListService.prepend_if_ee('EE::Boards::ListService')
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index ce3a9eb0772..2daf3a51235 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -23,7 +23,7 @@ module Ci
Gitlab::Ci::Pipeline::Chain::Limit::JobActivity].freeze
# rubocop: disable Metrics/ParameterLists
- def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, merge_request: nil, external_pull_request: nil, **options, &block)
+ def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, merge_request: nil, external_pull_request: nil, bridge: nil, **options, &block)
@pipeline = Ci::Pipeline.new
command = Gitlab::Ci::Pipeline::Chain::Command.new(
@@ -46,6 +46,7 @@ module Ci
current_user: current_user,
push_options: params[:push_options] || {},
chat_data: params[:chat_data],
+ bridge: bridge,
**extra_options(options))
sequence = Gitlab::Ci::Pipeline::Chain::Sequence
@@ -104,14 +105,14 @@ module Ci
if Feature.enabled?(:ci_support_interruptible_pipelines, project, default_enabled: true)
project.ci_pipelines
.where(ref: pipeline.ref)
- .where.not(id: pipeline.id)
+ .where.not(id: pipeline.same_family_pipeline_ids)
.where.not(sha: project.commit(pipeline.ref).try(:id))
.alive_or_scheduled
.with_only_interruptible_builds
else
project.ci_pipelines
.where(ref: pipeline.ref)
- .where.not(id: pipeline.id)
+ .where.not(id: pipeline.same_family_pipeline_ids)
.where.not(sha: project.commit(pipeline.ref).try(:id))
.created_or_pending
end
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service.rb b/app/services/ci/pipeline_processing/atomic_processing_service.rb
new file mode 100644
index 00000000000..1ed295f5950
--- /dev/null
+++ b/app/services/ci/pipeline_processing/atomic_processing_service.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineProcessing
+ class AtomicProcessingService
+ include Gitlab::Utils::StrongMemoize
+ include ExclusiveLeaseGuard
+
+ attr_reader :pipeline
+
+ DEFAULT_LEASE_TIMEOUT = 1.minute
+ BATCH_SIZE = 20
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ @collection = AtomicProcessingService::StatusCollection.new(pipeline)
+ end
+
+ def execute
+ return unless pipeline.needs_processing?
+
+ success = try_obtain_lease { process! }
+
+ # re-schedule if we need further processing
+ if success && pipeline.needs_processing?
+ PipelineProcessWorker.perform_async(pipeline.id)
+ end
+
+ success
+ end
+
+ private
+
+ def process!
+ update_stages!
+ update_pipeline!
+ update_statuses_processed!
+
+ true
+ end
+
+ def update_stages!
+ pipeline.stages.ordered.each(&method(:update_stage!))
+ end
+
+ def update_stage!(stage)
+ # Update processables for a given stage in bulk/slices
+ ids = @collection.created_processable_ids_for_stage_position(stage.position)
+ ids.in_groups_of(BATCH_SIZE, false, &method(:update_processables!))
+
+ status = @collection.status_for_stage_position(stage.position)
+ stage.set_status(status)
+ end
+
+ def update_processables!(ids)
+ created_processables = pipeline.processables.for_ids(ids)
+ .with_project_preload
+ .created
+ .latest
+ .ordered_by_stage
+ .select_with_aggregated_needs(project)
+
+ created_processables.each(&method(:update_processable!))
+ end
+
+ def update_pipeline!
+ pipeline.set_status(@collection.status_of_all)
+ end
+
+ def update_statuses_processed!
+ processing = @collection.processing_processables
+ processing.each_slice(BATCH_SIZE) do |slice|
+ pipeline.statuses.match_id_and_lock_version(slice)
+ .update_as_processed!
+ end
+ end
+
+ def update_processable!(processable)
+ status = processable_status(processable)
+ return unless HasStatus::COMPLETED_STATUSES.include?(status)
+
+ # transition status if possible
+ Gitlab::OptimisticLocking.retry_lock(processable) do |subject|
+ Ci::ProcessBuildService.new(project, subject.user)
+ .execute(subject, status)
+
+ # update internal representation of status
+ # to make the status change of processable
+ # to be taken into account during further processing
+ @collection.set_processable_status(
+ processable.id, processable.status, processable.lock_version)
+ end
+ end
+
+ def processable_status(processable)
+ if needs_names = processable.aggregated_needs_names
+ # Processable uses DAG, get status of all dependent needs
+ @collection.status_for_names(needs_names)
+ else
+ # Processable uses Stages, get status of prior stage
+ @collection.status_for_prior_stage_position(processable.stage_idx.to_i)
+ end
+ end
+
+ def project
+ pipeline.project
+ end
+
+ def lease_key
+ "#{super}::pipeline_id:#{pipeline.id}"
+ end
+
+ def lease_timeout
+ DEFAULT_LEASE_TIMEOUT
+ end
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
new file mode 100644
index 00000000000..42e38a5c80f
--- /dev/null
+++ b/app/services/ci/pipeline_processing/atomic_processing_service/status_collection.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineProcessing
+ class AtomicProcessingService
+ class StatusCollection
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :pipeline
+
+ # We use these columns to perform an efficient
+ # calculation of a status
+ STATUSES_COLUMNS = [
+ :id, :name, :status, :allow_failure,
+ :stage_idx, :processed, :lock_version
+ ].freeze
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ @stage_statuses = {}
+ @prior_stage_statuses = {}
+ end
+
+ # This method updates internal status for given ID
+ def set_processable_status(id, status, lock_version)
+ processable = all_statuses_by_id[id]
+ return unless processable
+
+ processable[:status] = status
+ processable[:lock_version] = lock_version
+ end
+
+ # This methods gets composite status of all processables
+ def status_of_all
+ status_for_array(all_statuses)
+ end
+
+ # This methods gets composite status for processables with given names
+ def status_for_names(names)
+ name_statuses = all_statuses_by_name.slice(*names)
+
+ status_for_array(name_statuses.values)
+ end
+
+ # This methods gets composite status for processables before given stage
+ def status_for_prior_stage_position(position)
+ strong_memoize("status_for_prior_stage_position_#{position}") do
+ stage_statuses = all_statuses_grouped_by_stage_position
+ .select { |stage_position, _| stage_position < position }
+
+ status_for_array(stage_statuses.values.flatten)
+ end
+ end
+
+ # This methods gets a list of processables for a given stage
+ def created_processable_ids_for_stage_position(current_position)
+ all_statuses_grouped_by_stage_position[current_position]
+ .to_a
+ .select { |processable| processable[:status] == 'created' }
+ .map { |processable| processable[:id] }
+ end
+
+ # This methods gets composite status for processables at a given stage
+ def status_for_stage_position(current_position)
+ strong_memoize("status_for_stage_position_#{current_position}") do
+ stage_statuses = all_statuses_grouped_by_stage_position[current_position].to_a
+
+ status_for_array(stage_statuses.flatten)
+ end
+ end
+
+ # This method returns a list of all processable, that are to be processed
+ def processing_processables
+ all_statuses.lazy.reject { |status| status[:processed] }
+ end
+
+ private
+
+ def status_for_array(statuses)
+ result = Gitlab::Ci::Status::Composite
+ .new(statuses)
+ .status
+ result || 'success'
+ end
+
+ def all_statuses_grouped_by_stage_position
+ strong_memoize(:all_statuses_by_order) do
+ all_statuses.group_by { |status| status[:stage_idx].to_i }
+ end
+ end
+
+ def all_statuses_by_id
+ strong_memoize(:all_statuses_by_id) do
+ all_statuses.map do |row|
+ [row[:id], row]
+ end.to_h
+ end
+ end
+
+ def all_statuses_by_name
+ strong_memoize(:statuses_by_name) do
+ all_statuses.map do |row|
+ [row[:name], row]
+ end.to_h
+ end
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def all_statuses
+ # We fetch all relevant data in one go.
+ #
+ # This is more efficient than relying
+ # on PostgreSQL to calculate composite status
+ # for us
+ #
+ # Since we need to reprocess everything
+ # we can fetch all of them and do processing
+ # ourselves.
+ strong_memoize(:all_statuses) do
+ raw_statuses = pipeline
+ .statuses
+ .latest
+ .ordered_by_stage
+ .pluck(*STATUSES_COLUMNS)
+
+ raw_statuses.map do |row|
+ STATUSES_COLUMNS.zip(row).to_h
+ end
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_processing/legacy_processing_service.rb b/app/services/ci/pipeline_processing/legacy_processing_service.rb
new file mode 100644
index 00000000000..400dc9f0abb
--- /dev/null
+++ b/app/services/ci/pipeline_processing/legacy_processing_service.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+module Ci
+ module PipelineProcessing
+ class LegacyProcessingService
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :pipeline
+
+ def initialize(pipeline)
+ @pipeline = pipeline
+ end
+
+ def execute(trigger_build_ids = nil)
+ success = process_stages_without_needs
+
+ # we evaluate dependent needs,
+ # only when the another job has finished
+ success = process_builds_with_needs(trigger_build_ids) || success
+
+ @pipeline.update_legacy_status
+
+ success
+ end
+
+ private
+
+ def process_stages_without_needs
+ stage_indexes_of_created_processables_without_needs.flat_map do |index|
+ process_stage_without_needs(index)
+ end.any?
+ end
+
+ def process_stage_without_needs(index)
+ current_status = status_for_prior_stages(index)
+
+ return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
+
+ created_processables_in_stage_without_needs(index).find_each.select do |build|
+ process_build(build, current_status)
+ end.any?
+ end
+
+ def process_builds_with_needs(trigger_build_ids)
+ return false unless trigger_build_ids.present?
+ return false unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
+
+ # we find processables that are dependent:
+ # 1. because of current dependency,
+ trigger_build_names = pipeline.processables.latest
+ .for_ids(trigger_build_ids).names
+
+ # 2. does not have builds that not yet complete
+ incomplete_build_names = pipeline.processables.latest
+ .incomplete.names
+
+ # Each found processable is guaranteed here to have completed status
+ created_processables
+ .with_needs(trigger_build_names)
+ .without_needs(incomplete_build_names)
+ .find_each
+ .map(&method(:process_build_with_needs))
+ .any?
+ end
+
+ def process_build_with_needs(build)
+ current_status = status_for_build_needs(build.needs.map(&:name))
+
+ return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
+
+ process_build(build, current_status)
+ end
+
+ def process_build(build, current_status)
+ Gitlab::OptimisticLocking.retry_lock(build) do |subject|
+ Ci::ProcessBuildService.new(project, subject.user)
+ .execute(subject, current_status)
+ end
+ end
+
+ def status_for_prior_stages(index)
+ pipeline.processables.status_for_prior_stages(index)
+ end
+
+ def status_for_build_needs(needs)
+ pipeline.processables.status_for_names(needs)
+ end
+
+ # rubocop: disable CodeReuse/ActiveRecord
+ def stage_indexes_of_created_processables_without_needs
+ created_processables_without_needs.order(:stage_idx)
+ .pluck(Arel.sql('DISTINCT stage_idx'))
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+
+ def created_processables_in_stage_without_needs(index)
+ created_processables_without_needs
+ .with_preloads
+ .for_stage(index)
+ end
+
+ def created_processables_without_needs
+ if Feature.enabled?(:ci_dag_support, project, default_enabled: true)
+ pipeline.processables.created.without_needs
+ else
+ pipeline.processables.created
+ end
+ end
+
+ def created_processables
+ pipeline.processables.created
+ end
+
+ def project
+ pipeline.project
+ end
+ end
+ end
+end
diff --git a/app/services/ci/prepare_build_service.rb b/app/services/ci/prepare_build_service.rb
index 5d024c45e5f..3f87c711270 100644
--- a/app/services/ci/prepare_build_service.rb
+++ b/app/services/ci/prepare_build_service.rb
@@ -11,7 +11,7 @@ module Ci
def execute
prerequisites.each(&:complete!)
- build.enqueue!
+ build.enqueue_preparing!
rescue => e
Gitlab::ErrorTracking.track_exception(e, build_id: build.id)
diff --git a/app/services/ci/process_pipeline_service.rb b/app/services/ci/process_pipeline_service.rb
index f33cbf7ab29..1ecef256233 100644
--- a/app/services/ci/process_pipeline_service.rb
+++ b/app/services/ci/process_pipeline_service.rb
@@ -2,8 +2,6 @@
module Ci
class ProcessPipelineService
- include Gitlab::Utils::StrongMemoize
-
attr_reader :pipeline
def initialize(pipeline)
@@ -13,104 +11,18 @@ module Ci
def execute(trigger_build_ids = nil)
update_retried
- success = process_stages_without_needs
-
- # we evaluate dependent needs,
- # only when the another job has finished
- success = process_builds_with_needs(trigger_build_ids) || success
-
- @pipeline.update_status
-
- success
- end
-
- private
-
- def process_stages_without_needs
- stage_indexes_of_created_processables_without_needs.flat_map do |index|
- process_stage_without_needs(index)
- end.any?
- end
-
- def process_stage_without_needs(index)
- current_status = status_for_prior_stages(index)
-
- return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
-
- created_processables_in_stage_without_needs(index).find_each.select do |build|
- process_build(build, current_status)
- end.any?
- end
-
- def process_builds_with_needs(trigger_build_ids)
- return false unless trigger_build_ids.present?
- return false unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
-
- # we find processables that are dependent:
- # 1. because of current dependency,
- trigger_build_names = pipeline.processables.latest
- .for_ids(trigger_build_ids).names
-
- # 2. does not have builds that not yet complete
- incomplete_build_names = pipeline.processables.latest
- .incomplete.names
-
- # Each found processable is guaranteed here to have completed status
- created_processables
- .with_needs(trigger_build_names)
- .without_needs(incomplete_build_names)
- .find_each
- .map(&method(:process_build_with_needs))
- .any?
- end
-
- def process_build_with_needs(build)
- current_status = status_for_build_needs(build.needs.map(&:name))
-
- return unless HasStatus::COMPLETED_STATUSES.include?(current_status)
-
- process_build(build, current_status)
- end
-
- def process_build(build, current_status)
- Gitlab::OptimisticLocking.retry_lock(build) do |subject|
- Ci::ProcessBuildService.new(project, build.user)
- .execute(subject, current_status)
- end
- end
-
- def status_for_prior_stages(index)
- pipeline.processables.status_for_prior_stages(index)
- end
-
- def status_for_build_needs(needs)
- pipeline.processables.status_for_names(needs)
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def stage_indexes_of_created_processables_without_needs
- created_processables_without_needs.order(:stage_idx)
- .pluck(Arel.sql('DISTINCT stage_idx'))
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def created_processables_in_stage_without_needs(index)
- created_processables_without_needs
- .with_preloads
- .for_stage(index)
- end
-
- def created_processables_without_needs
- if Feature.enabled?(:ci_dag_support, project, default_enabled: true)
- pipeline.processables.created.without_needs
+ if Feature.enabled?(:ci_atomic_processing, pipeline.project)
+ Ci::PipelineProcessing::AtomicProcessingService
+ .new(pipeline)
+ .execute
else
- pipeline.processables.created
+ Ci::PipelineProcessing::LegacyProcessingService
+ .new(pipeline)
+ .execute(trigger_build_ids)
end
end
- def created_processables
- pipeline.processables.created
- end
+ private
# This method is for compatibility and data consistency and should be removed with 9.3 version of GitLab
# This replicates what is db/post_migrate/20170416103934_upate_retried_for_ci_build.rb
@@ -131,9 +43,5 @@ module Ci
.update_all(retried: true) if latest_statuses.any?
end
# rubocop: enable CodeReuse/ActiveRecord
-
- def project
- pipeline.project
- end
end
end
diff --git a/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
new file mode 100644
index 00000000000..a4bcca8e8b3
--- /dev/null
+++ b/app/services/ci/resource_groups/assign_resource_from_resource_group_service.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module Ci
+ module ResourceGroups
+ class AssignResourceFromResourceGroupService < ::BaseService
+ # rubocop: disable CodeReuse/ActiveRecord
+ def execute(resource_group)
+ free_resources = resource_group.resources.free.count
+
+ resource_group.builds.waiting_for_resource.take(free_resources).each do |build|
+ build.enqueue_waiting_for_resource
+ end
+ end
+ # rubocop: enable CodeReuse/ActiveRecord
+ end
+ end
+end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 7a5e33c61ba..1f00d54b6a7 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -5,13 +5,13 @@ module Ci
CLONE_ACCESSORS = %i[pipeline project ref tag options name
allow_failure stage stage_id stage_idx trigger_request
yaml_variables when environment coverage_regex
- description tag_list protected needs].freeze
+ description tag_list protected needs resource_group].freeze
def execute(build)
reprocess!(build).tap do |new_build|
build.pipeline.mark_as_processable_after_stage(build.stage_idx)
- new_build.enqueue!
+ Gitlab::OptimisticLocking.retry_lock(new_build, &:enqueue)
MergeRequests::AddTodoWhenBuildFailsService
.new(project, current_user)
@@ -31,15 +31,17 @@ module Ci
attributes.push([:user, current_user])
- build.retried = true
-
Ci::Build.transaction do
# mark all other builds of that name as retried
build.pipeline.builds.latest
.where(name: build.name)
- .update_all(retried: true)
+ .update_all(retried: true, processed: true)
- create_build!(attributes)
+ create_build!(attributes).tap do
+ # mark existing object as retried/processed without a reload
+ build.retried = true
+ build.processed = true
+ end
end
end
# rubocop: enable CodeReuse/ActiveRecord
@@ -49,6 +51,7 @@ module Ci
def create_build!(attributes)
build = project.builds.new(Hash[attributes])
build.deployment = ::Gitlab::Ci::Pipeline::Seed::Deployment.new(build).to_resource
+ build.retried = false
build.save!
build
end
diff --git a/app/services/clusters/applications/base_service.rb b/app/services/clusters/applications/base_service.rb
index c9f7917938f..844da11e5cb 100644
--- a/app/services/clusters/applications/base_service.rb
+++ b/app/services/clusters/applications/base_service.rb
@@ -19,10 +19,6 @@ module Clusters
application.hostname = params[:hostname]
end
- if application.has_attribute?(:kibana_hostname)
- application.kibana_hostname = params[:kibana_hostname]
- end
-
if application.has_attribute?(:email)
application.email = params[:email]
end
@@ -31,6 +27,10 @@ module Clusters
application.stack = params[:stack]
end
+ if application.has_attribute?(:modsecurity_enabled)
+ application.modsecurity_enabled = params[:modsecurity_enabled] || false
+ end
+
if application.respond_to?(:oauth_application)
application.oauth_application = create_oauth_application(application, request)
end
@@ -68,7 +68,7 @@ module Clusters
end
def invalid_application?
- unknown_application? || (application_name == Applications::ElasticStack.application_name && !Feature.enabled?(:enable_cluster_application_elastic_stack)) || (application_name == Applications::Crossplane.application_name && !Feature.enabled?(:enable_cluster_application_crossplane))
+ unknown_application? || (application_name == Applications::ElasticStack.application_name && !Feature.enabled?(:enable_cluster_application_elastic_stack))
end
def unknown_application?
diff --git a/app/services/clusters/applications/check_installation_progress_service.rb b/app/services/clusters/applications/check_installation_progress_service.rb
index 1ce6e0c1cb0..7d064abfaa3 100644
--- a/app/services/clusters/applications/check_installation_progress_service.rb
+++ b/app/services/clusters/applications/check_installation_progress_service.rb
@@ -11,6 +11,8 @@ module Clusters
def on_success
app.make_installed!
+
+ Gitlab::Tracking.event('cluster:applications', "cluster_application_#{app.name}_installed")
ensure
remove_installation_pod
end
diff --git a/app/services/clusters/kubernetes/create_or_update_namespace_service.rb b/app/services/clusters/kubernetes/create_or_update_namespace_service.rb
index 15be8446cc0..c6c7eb99bf3 100644
--- a/app/services/clusters/kubernetes/create_or_update_namespace_service.rb
+++ b/app/services/clusters/kubernetes/create_or_update_namespace_service.rb
@@ -21,10 +21,15 @@ module Clusters
attr_reader :cluster, :kubernetes_namespace, :platform
def create_project_service_account
+ environment_slug = kubernetes_namespace.environment&.slug
+ namespace_labels = { 'app.gitlab.com/app' => kubernetes_namespace.project.full_path_slug }
+ namespace_labels['app.gitlab.com/env'] = environment_slug if environment_slug
+
Clusters::Kubernetes::CreateOrUpdateServiceAccountService.namespace_creator(
platform.kubeclient,
service_account_name: kubernetes_namespace.service_account_name,
service_account_namespace: kubernetes_namespace.namespace,
+ service_account_namespace_labels: namespace_labels,
rbac: platform.rbac?
).execute
end
diff --git a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
index d798dcdcfd3..b1820474c9d 100644
--- a/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
+++ b/app/services/clusters/kubernetes/create_or_update_service_account_service.rb
@@ -3,10 +3,11 @@
module Clusters
module Kubernetes
class CreateOrUpdateServiceAccountService
- def initialize(kubeclient, service_account_name:, service_account_namespace:, token_name:, rbac:, namespace_creator: false, role_binding_name: nil)
+ def initialize(kubeclient, service_account_name:, service_account_namespace:, service_account_namespace_labels: nil, token_name:, rbac:, namespace_creator: false, role_binding_name: nil)
@kubeclient = kubeclient
@service_account_name = service_account_name
@service_account_namespace = service_account_namespace
+ @service_account_namespace_labels = service_account_namespace_labels
@token_name = token_name
@rbac = rbac
@namespace_creator = namespace_creator
@@ -23,11 +24,12 @@ module Clusters
)
end
- def self.namespace_creator(kubeclient, service_account_name:, service_account_namespace:, rbac:)
+ def self.namespace_creator(kubeclient, service_account_name:, service_account_namespace:, service_account_namespace_labels:, rbac:)
self.new(
kubeclient,
service_account_name: service_account_name,
service_account_namespace: service_account_namespace,
+ service_account_namespace_labels: service_account_namespace_labels,
token_name: "#{service_account_namespace}-token",
rbac: rbac,
namespace_creator: true,
@@ -55,12 +57,13 @@ module Clusters
private
- attr_reader :kubeclient, :service_account_name, :service_account_namespace, :token_name, :rbac, :namespace_creator, :role_binding_name
+ attr_reader :kubeclient, :service_account_name, :service_account_namespace, :service_account_namespace_labels, :token_name, :rbac, :namespace_creator, :role_binding_name
def ensure_project_namespace_exists
Gitlab::Kubernetes::Namespace.new(
service_account_namespace,
- kubeclient
+ kubeclient,
+ labels: service_account_namespace_labels
).ensure_exists!
end
diff --git a/app/services/concerns/akismet_methods.rb b/app/services/concerns/akismet_methods.rb
new file mode 100644
index 00000000000..1cbcf0d47b9
--- /dev/null
+++ b/app/services/concerns/akismet_methods.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module AkismetMethods
+ def spammable_owner
+ @user ||= User.find(spammable_owner_id)
+ end
+
+ def spammable_owner_id
+ @owner_id ||=
+ if spammable.respond_to?(:author_id)
+ spammable.author_id
+ elsif spammable.respond_to?(:creator_id)
+ spammable.creator_id
+ end
+ end
+
+ def akismet
+ @akismet ||= AkismetService.new(
+ spammable_owner.name,
+ spammable_owner.email,
+ spammable.spammable_text,
+ options
+ )
+ end
+end
diff --git a/app/services/spam_check_service.rb b/app/services/concerns/spam_check_methods.rb
index 51d300d4f1d..75d9759f1d1 100644
--- a/app/services/spam_check_service.rb
+++ b/app/services/concerns/spam_check_methods.rb
@@ -1,14 +1,14 @@
# frozen_string_literal: true
-# SpamCheckService
+# SpamCheckMethods
#
# Provide helper methods for checking if a given spammable object has
# potential spam data.
#
# Dependencies:
# - params with :request
-#
-module SpamCheckService
+
+module SpamCheckMethods
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def filter_spam_check_params
@request = params.delete(:request)
@@ -24,7 +24,7 @@ module SpamCheckService
# rubocop:disable Gitlab/ModuleWithInstanceVariables
# rubocop: disable CodeReuse/ActiveRecord
def spam_check(spammable, user)
- spam_service = SpamService.new(spammable, @request)
+ spam_service = SpamService.new(spammable: spammable, request: @request)
spam_service.when_recaptcha_verified(@recaptcha_verified, @api) do
user.spam_logs.find_by(id: @spam_log_id)&.update!(recaptcha_verified: true)
diff --git a/app/services/container_expiration_policy_service.rb b/app/services/container_expiration_policy_service.rb
new file mode 100644
index 00000000000..5d141d4d64d
--- /dev/null
+++ b/app/services/container_expiration_policy_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class ContainerExpirationPolicyService < BaseService
+ def execute(container_expiration_policy)
+ container_expiration_policy.schedule_next_run!
+
+ container_expiration_policy.container_repositories.find_each do |container_repository|
+ CleanupContainerRepositoryWorker.perform_async(
+ current_user.id,
+ container_repository.id,
+ container_expiration_policy.attributes.except("created_at", "updated_at")
+ )
+ end
+ end
+end
diff --git a/app/services/create_snippet_service.rb b/app/services/create_snippet_service.rb
deleted file mode 100644
index eacea7d94c7..00000000000
--- a/app/services/create_snippet_service.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-class CreateSnippetService < BaseService
- include SpamCheckService
-
- def execute
- filter_spam_check_params
-
- snippet = if project
- project.snippets.build(params)
- else
- PersonalSnippet.new(params)
- end
-
- unless Gitlab::VisibilityLevel.allowed_for?(current_user, snippet.visibility_level)
- deny_visibility_level(snippet)
- return snippet
- end
-
- snippet.author = current_user
-
- spam_check(snippet, current_user)
-
- snippet_saved = snippet.with_transaction_returning_status do
- snippet.save && snippet.store_mentions!
- end
-
- if snippet_saved
- UserAgentDetailService.new(snippet, @request).create
- Gitlab::UsageDataCounters::SnippetCounter.count(:create)
- end
-
- snippet
- end
-end
diff --git a/app/services/deployments/after_create_service.rb b/app/services/deployments/after_create_service.rb
index 1d9cb666cff..3560f9c983b 100644
--- a/app/services/deployments/after_create_service.rb
+++ b/app/services/deployments/after_create_service.rb
@@ -34,21 +34,12 @@ module Deployments
if environment.save && !environment.stopped?
deployment.update_merge_request_metrics!
- link_merge_requests(deployment)
end
end
end
private
- def link_merge_requests(deployment)
- unless Feature.enabled?(:deployment_merge_requests, deployment.project)
- return
- end
-
- LinkMergeRequestsService.new(deployment).execute
- end
-
def environment_options
options&.dig(:environment) || {}
end
diff --git a/app/services/deployments/link_merge_requests_service.rb b/app/services/deployments/link_merge_requests_service.rb
index 71186659290..a1d6d50bbb4 100644
--- a/app/services/deployments/link_merge_requests_service.rb
+++ b/app/services/deployments/link_merge_requests_service.rb
@@ -13,7 +13,10 @@ module Deployments
end
def execute
- return unless deployment.success?
+ # Review apps have the environment type set (e.g. to `review`, though the
+ # exact value may differ). We don't want to link merge requests to review
+ # app deployments, as this is not useful.
+ return if deployment.environment.environment_type
if (prev = deployment.previous_environment_deployment)
link_merge_requests_for_range(prev.sha, deployment.sha)
diff --git a/app/services/error_tracking/issue_update_service.rb b/app/services/error_tracking/issue_update_service.rb
new file mode 100644
index 00000000000..e433b4a11f2
--- /dev/null
+++ b/app/services/error_tracking/issue_update_service.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module ErrorTracking
+ class IssueUpdateService < ErrorTracking::BaseService
+ private
+
+ def fetch
+ project_error_tracking_setting.update_issue(
+ issue_id: params[:issue_id],
+ params: update_params
+ )
+ end
+
+ def update_params
+ params.except(:issue_id)
+ end
+
+ def parse_response(response)
+ { updated: response[:updated].present? }
+ end
+ end
+end
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index f7282c22a52..7460f0df535 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -101,7 +101,7 @@ class EventCreateService
Users::LastPushEventService.new(current_user)
.cache_last_push_event(event)
- Users::ActivityService.new(current_user, 'push').execute
+ Users::ActivityService.new(current_user).execute
end
def create_event(resource_parent, current_user, status, attributes = {})
diff --git a/app/services/git/base_hooks_service.rb b/app/services/git/base_hooks_service.rb
index d935d9e8cdc..a49983a84fc 100644
--- a/app/services/git/base_hooks_service.rb
+++ b/app/services/git/base_hooks_service.rb
@@ -163,7 +163,7 @@ module Git
end
def logger
- if Sidekiq.server?
+ if Gitlab::Runtime.sidekiq?
Sidekiq.logger
else
# This service runs in Sidekiq, so this shouldn't ever be
diff --git a/app/services/ham_service.rb b/app/services/ham_service.rb
index 794eb34d9ca..0bbdaa47a1b 100644
--- a/app/services/ham_service.rb
+++ b/app/services/ham_service.rb
@@ -18,8 +18,10 @@ class HamService
private
def akismet
+ user = spam_log.user
@akismet ||= AkismetService.new(
- spam_log.user,
+ user.name,
+ user.email,
spam_log.text,
ip_address: spam_log.source_ip,
user_agent: spam_log.user_agent
diff --git a/app/services/issuable/clone/attributes_rewriter.rb b/app/services/issuable/clone/attributes_rewriter.rb
index 1f5d83917cc..334e50c0be5 100644
--- a/app/services/issuable/clone/attributes_rewriter.rb
+++ b/app/services/issuable/clone/attributes_rewriter.rb
@@ -18,6 +18,7 @@ module Issuable
new_entity.update(update_attributes)
copy_resource_label_events
+ copy_resource_weight_events
end
private
@@ -60,6 +61,20 @@ module Issuable
end
end
+ def copy_resource_weight_events
+ return unless original_entity.respond_to?(:resource_weight_events)
+
+ original_entity.resource_weight_events.find_in_batches do |batch|
+ events = batch.map do |event|
+ event.attributes
+ .except('id', 'reference', 'reference_html')
+ .merge('issue_id' => new_entity.id)
+ end
+
+ Gitlab::Database.bulk_insert(ResourceWeightEvent.table_name, events)
+ end
+ end
+
def entity_key
new_entity.class.name.parameterize('_').foreign_key
end
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index 8d1df0d87a7..e8879d4df66 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -2,7 +2,7 @@
module Issues
class CreateService < Issues::BaseService
- include SpamCheckService
+ include SpamCheckMethods
include ResolveDiscussions
def execute
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index b98a4d2567f..68d1657d881 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -2,7 +2,7 @@
module Issues
class UpdateService < Issues::BaseService
- include SpamCheckService
+ include SpamCheckMethods
def execute(issue)
handle_move_between_ids(issue)
diff --git a/app/services/members/update_service.rb b/app/services/members/update_service.rb
index fdd2c62a452..b5c27caafa2 100644
--- a/app/services/members/update_service.rb
+++ b/app/services/members/update_service.rb
@@ -7,9 +7,10 @@ module Members
raise Gitlab::Access::AccessDeniedError unless can?(current_user, action_member_permission(permission, member), member)
old_access_level = member.human_access
+ old_expiry = member.expires_at
if member.update(params)
- after_execute(action: permission, old_access_level: old_access_level, member: member)
+ after_execute(action: permission, old_access_level: old_access_level, old_expiry: old_expiry, member: member)
# Deletes only confidential issues todos for guests
enqueue_delete_todos(member) if downgrading_to_guest?
diff --git a/app/services/merge_requests/get_urls_service.rb b/app/services/merge_requests/get_urls_service.rb
index 7c88c9abb41..de3f2acdf63 100644
--- a/app/services/merge_requests/get_urls_service.rb
+++ b/app/services/merge_requests/get_urls_service.rb
@@ -9,7 +9,7 @@ module MergeRequests
end
def execute(changes)
- return [] unless project.printing_merge_request_link_enabled
+ return [] unless project&.printing_merge_request_link_enabled
branches = get_branches(changes)
merge_requests_map = opened_merge_requests_from_source_branches(branches)
diff --git a/app/services/merge_requests/rebase_service.rb b/app/services/merge_requests/rebase_service.rb
index 7e9442c0c7c..bc1e97088af 100644
--- a/app/services/merge_requests/rebase_service.rb
+++ b/app/services/merge_requests/rebase_service.rb
@@ -8,8 +8,9 @@ module MergeRequests
attr_reader :merge_request
- def execute(merge_request)
+ def execute(merge_request, skip_ci: false)
@merge_request = merge_request
+ @skip_ci = skip_ci
if rebase
success
@@ -25,7 +26,7 @@ module MergeRequests
return false
end
- repository.rebase(current_user, merge_request)
+ repository.rebase(current_user, merge_request, skip_ci: @skip_ci)
true
rescue => e
diff --git a/app/services/metrics/dashboard/clone_dashboard_service.rb b/app/services/metrics/dashboard/clone_dashboard_service.rb
new file mode 100644
index 00000000000..b2ec44cb814
--- /dev/null
+++ b/app/services/metrics/dashboard/clone_dashboard_service.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+# Copies system dashboard definition in .yml file into designated
+# .yml file inside `.gitlab/dashboards`
+module Metrics
+ module Dashboard
+ class CloneDashboardService < ::BaseService
+ ALLOWED_FILE_TYPE = '.yml'
+ USER_DASHBOARDS_DIR = ::Metrics::Dashboard::ProjectDashboardService::DASHBOARD_ROOT
+
+ def self.allowed_dashboard_templates
+ @allowed_dashboard_templates ||= Set[::Metrics::Dashboard::SystemDashboardService::DASHBOARD_PATH].freeze
+ end
+
+ def execute
+ catch(:error) do
+ throw(:error, error(_(%q(You can't commit to this project)), :forbidden)) unless push_authorized?
+
+ result = ::Files::CreateService.new(project, current_user, dashboard_attrs).execute
+ throw(:error, wrap_error(result)) unless result[:status] == :success
+
+ repository.refresh_method_caches([:metrics_dashboard])
+ success(result.merge(http_status: :created, dashboard: dashboard_details))
+ end
+ end
+
+ private
+
+ def dashboard_attrs
+ {
+ commit_message: params[:commit_message],
+ file_path: new_dashboard_path,
+ file_content: new_dashboard_content,
+ encoding: 'text',
+ branch_name: branch,
+ start_branch: repository.branch_exists?(branch) ? branch : project.default_branch
+ }
+ end
+
+ def dashboard_details
+ {
+ path: new_dashboard_path,
+ display_name: ::Metrics::Dashboard::ProjectDashboardService.name_for_path(new_dashboard_path),
+ default: false,
+ system_dashboard: false
+ }
+ end
+
+ def push_authorized?
+ Gitlab::UserAccess.new(current_user, project: project).can_push_to_branch?(branch)
+ end
+
+ def dashboard_template
+ @dashboard_template ||= begin
+ throw(:error, error(_('Not found.'), :not_found)) unless self.class.allowed_dashboard_templates.include?(params[:dashboard])
+
+ params[:dashboard]
+ end
+ end
+
+ def branch
+ @branch ||= begin
+ throw(:error, error(_('There was an error creating the dashboard, branch name is invalid.'), :bad_request)) unless valid_branch_name?
+ throw(:error, error(_('There was an error creating the dashboard, branch named: %{branch} already exists.') % { branch: params[:branch] }, :bad_request)) unless new_or_default_branch? # temporary validation for first UI iteration
+
+ params[:branch]
+ end
+ end
+
+ def new_or_default_branch?
+ !repository.branch_exists?(params[:branch]) || project.default_branch == params[:branch]
+ end
+
+ def valid_branch_name?
+ Gitlab::GitRefValidator.validate(params[:branch])
+ end
+
+ def new_dashboard_path
+ @new_dashboard_path ||= File.join(USER_DASHBOARDS_DIR, file_name)
+ end
+
+ def file_name
+ @file_name ||= begin
+ throw(:error, error(_('The file name should have a .yml extension'), :bad_request)) unless target_file_type_valid?
+
+ File.basename(params[:file_name])
+ end
+ end
+
+ def target_file_type_valid?
+ File.extname(params[:file_name]) == ALLOWED_FILE_TYPE
+ end
+
+ def new_dashboard_content
+ File.read(Rails.root.join(dashboard_template))
+ end
+
+ def repository
+ @repository ||= project.repository
+ end
+
+ def wrap_error(result)
+ if result[:message] == 'A file with this name already exists'
+ error(_("A file with '%{file_name}' already exists in %{branch} branch") % { file_name: file_name, branch: branch }, :bad_request)
+ else
+ result
+ end
+ end
+ end
+ end
+end
+
+Metrics::Dashboard::CloneDashboardService.prepend_if_ee('EE::Metrics::Dashboard::CloneDashboardService')
diff --git a/app/services/metrics/sample_metrics_service.rb b/app/services/metrics/sample_metrics_service.rb
index 719bc6614e4..9bf32b295e2 100644
--- a/app/services/metrics/sample_metrics_service.rb
+++ b/app/services/metrics/sample_metrics_service.rb
@@ -4,16 +4,17 @@ module Metrics
class SampleMetricsService
DIRECTORY = "sample_metrics"
- attr_reader :identifier
+ attr_reader :identifier, :range_minutes
- def initialize(identifier)
+ def initialize(identifier, range_start:, range_end:)
@identifier = identifier
+ @range_minutes = convert_range_minutes(range_start, range_end)
end
def query
return unless identifier && File.exist?(file_location)
- YAML.load_file(File.expand_path(file_location, __dir__))
+ query_interval
end
private
@@ -22,5 +23,14 @@ module Metrics
sanitized_string = identifier.gsub(/[^0-9A-Za-z_]/, '')
File.join(Rails.root, DIRECTORY, "#{sanitized_string}.yml")
end
+
+ def query_interval
+ result = YAML.load_file(File.expand_path(file_location, __dir__))
+ result[range_minutes]
+ end
+
+ def convert_range_minutes(range_start, range_end)
+ ((range_end.to_time - range_start.to_time) / 1.minute).to_i
+ end
end
end
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index accfdb5b863..50dc98b88e9 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -4,9 +4,7 @@ module Notes
class CreateService < ::Notes::BaseService
# rubocop:disable Metrics/CyclomaticComplexity
def execute
- merge_request_diff_head_sha = params.delete(:merge_request_diff_head_sha)
-
- note = Notes::BuildService.new(project, current_user, params).execute
+ note = Notes::BuildService.new(project, current_user, params.except(:merge_request_diff_head_sha)).execute
# n+1: https://gitlab.com/gitlab-org/gitlab-foss/issues/37440
note_valid = Gitlab::GitalyClient.allow_n_plus_1_calls do
@@ -23,8 +21,7 @@ module Notes
quick_actions_service = QuickActionsService.new(project, current_user)
if quick_actions_service.supported?(note)
- options = { merge_request_diff_head_sha: merge_request_diff_head_sha }
- content, update_params, message = quick_actions_service.execute(note, options)
+ content, update_params, message = quick_actions_service.execute(note, quick_action_options)
only_commands = content.empty?
@@ -74,6 +71,11 @@ module Notes
private
+ # EE::Notes::CreateService would override this method
+ def quick_action_options
+ { merge_request_diff_head_sha: params[:merge_request_diff_head_sha] }
+ end
+
def tracking_data_for(note)
label = Gitlab.ee? && note.author == User.visual_review_bot ? 'anonymous_visual_review_note' : 'note'
@@ -84,3 +86,5 @@ module Notes
end
end
end
+
+Notes::CreateService.prepend_if_ee('EE::Notes::CreateService')
diff --git a/app/services/notes/destroy_service.rb b/app/services/notes/destroy_service.rb
index fa0c2c5c86b..ee8a680fcb4 100644
--- a/app/services/notes/destroy_service.rb
+++ b/app/services/notes/destroy_service.rb
@@ -11,3 +11,5 @@ module Notes
end
end
end
+
+Notes::DestroyService.prepend_if_ee('EE::Notes::DestroyService')
diff --git a/app/services/pages_domains/create_acme_order_service.rb b/app/services/pages_domains/create_acme_order_service.rb
index c600f497fa5..8eab5c52432 100644
--- a/app/services/pages_domains/create_acme_order_service.rb
+++ b/app/services/pages_domains/create_acme_order_service.rb
@@ -3,6 +3,9 @@
module PagesDomains
class CreateAcmeOrderService
attr_reader :pages_domain
+ # TODO: remove this hack after https://gitlab.com/gitlab-org/gitlab/issues/30146 is implemented
+ # This makes GitLab automatically retry the certificate obtaining process every 2 hours if process wasn't finished
+ SHORT_EXPIRATION_DELAY = 2.hours
def initialize(pages_domain)
@pages_domain = pages_domain
@@ -17,7 +20,7 @@ module PagesDomains
private_key = OpenSSL::PKey::RSA.new(4096)
saved_order = pages_domain.acme_orders.create!(
url: order.url,
- expires_at: order.expires,
+ expires_at: [order.expires, SHORT_EXPIRATION_DELAY.from_now].min,
private_key: private_key.to_pem,
challenge_token: challenge.token,
diff --git a/app/services/projects/operations/update_service.rb b/app/services/projects/operations/update_service.rb
index 0ca89664304..706a6f01a75 100644
--- a/app/services/projects/operations/update_service.rb
+++ b/app/services/projects/operations/update_service.rb
@@ -30,7 +30,7 @@ module Projects
settings = params[:error_tracking_setting_attributes]
return {} if settings.blank?
- api_url = ErrorTracking::ProjectErrorTrackingSetting.build_api_url_from(
+ api_url = ::ErrorTracking::ProjectErrorTrackingSetting.build_api_url_from(
api_host: settings[:api_host],
project_slug: settings.dig(:project, :slug),
organization_slug: settings.dig(:project, :organization_slug)
diff --git a/app/services/projects/update_pages_service.rb b/app/services/projects/update_pages_service.rb
index e8a87fc4320..8b23f610ad1 100644
--- a/app/services/projects/update_pages_service.rb
+++ b/app/services/projects/update_pages_service.rb
@@ -6,7 +6,6 @@ module Projects
FailedToExtractError = Class.new(StandardError)
BLOCK_SIZE = 32.kilobytes
- MAX_SIZE = 1.terabyte
PUBLIC_DIR = 'public'
# this has to be invalid group name,
@@ -130,12 +129,16 @@ module Projects
1 + max_size / BLOCK_SIZE
end
+ def max_size_from_settings
+ Gitlab::CurrentSettings.max_pages_size.megabytes
+ end
+
def max_size
- max_pages_size = Gitlab::CurrentSettings.max_pages_size.megabytes
+ max_pages_size = max_size_from_settings
- return MAX_SIZE if max_pages_size.zero?
+ return ::Gitlab::Pages::MAX_SIZE if max_pages_size.zero?
- [max_pages_size, MAX_SIZE].min
+ max_pages_size
end
def tmp_path
@@ -200,3 +203,5 @@ module Projects
end
end
end
+
+Projects::UpdatePagesService.prepend_if_ee('EE::Projects::UpdatePagesService')
diff --git a/app/services/prometheus/adapter_service.rb b/app/services/prometheus/adapter_service.rb
deleted file mode 100644
index 399f4c35d66..00000000000
--- a/app/services/prometheus/adapter_service.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-module Prometheus
- class AdapterService
- def initialize(project, deployment_platform = nil)
- @project = project
-
- @deployment_platform = if deployment_platform
- deployment_platform
- else
- project.deployment_platform
- end
- end
-
- attr_reader :deployment_platform, :project
-
- def prometheus_adapter
- @prometheus_adapter ||= if service_prometheus_adapter.can_query?
- service_prometheus_adapter
- else
- cluster_prometheus_adapter
- end
- end
-
- def service_prometheus_adapter
- project.find_or_initialize_service('prometheus')
- end
-
- def cluster_prometheus_adapter
- application = deployment_platform&.cluster&.application_prometheus
-
- application if application&.available?
- end
- end
-end
diff --git a/app/services/prometheus/proxy_service.rb b/app/services/prometheus/proxy_service.rb
index a62eb76b8ce..3585c90fc8f 100644
--- a/app/services/prometheus/proxy_service.rb
+++ b/app/services/prometheus/proxy_service.rb
@@ -5,9 +5,17 @@ module Prometheus
include ReactiveCaching
include Gitlab::Utils::StrongMemoize
- self.reactive_cache_key = ->(service) { service.cache_key }
+ self.reactive_cache_key = ->(service) { [] }
self.reactive_cache_lease_timeout = 30.seconds
- self.reactive_cache_refresh_interval = 30.seconds
+
+ # reactive_cache_refresh_interval should be set to a value higher than
+ # reactive_cache_lifetime. If the refresh_interval is less than lifetime
+ # then the ReactiveCachingWorker will re-query prometheus for this
+ # PromQL query even though it's (probably) already been picked up by
+ # the frontend
+ # refresh_interval should be set less than lifetime only if this data
+ # is expected to change *and* be fetched again by the frontend
+ self.reactive_cache_refresh_interval = 90.seconds
self.reactive_cache_lifetime = 1.minute
self.reactive_cache_worker_finder = ->(_id, *args) { from_cache(*args) }
diff --git a/app/services/prometheus/proxy_variable_substitution_service.rb b/app/services/prometheus/proxy_variable_substitution_service.rb
index ca56292e9d6..b34afaf80b8 100644
--- a/app/services/prometheus/proxy_variable_substitution_service.rb
+++ b/app/services/prometheus/proxy_variable_substitution_service.rb
@@ -4,7 +4,10 @@ module Prometheus
class ProxyVariableSubstitutionService < BaseService
include Stepable
- steps :add_params_to_result, :substitute_ruby_variables
+ steps :validate_variables,
+ :add_params_to_result,
+ :substitute_ruby_variables,
+ :substitute_liquid_variables
def initialize(environment, params = {})
@environment, @params = environment, params.deep_dup
@@ -16,24 +19,45 @@ module Prometheus
private
+ def validate_variables(_result)
+ return success unless variables
+
+ unless variables.is_a?(Array) && variables.size.even?
+ return error(_('Optional parameter "variables" must be an array of keys and values. Ex: [key1, value1, key2, value2]'))
+ end
+
+ success
+ end
+
def add_params_to_result(result)
result[:params] = params
success(result)
end
+ def substitute_liquid_variables(result)
+ return success(result) unless query(result)
+
+ result[:params][:query] =
+ TemplateEngines::LiquidService.new(query(result)).render(full_context)
+
+ success(result)
+ rescue TemplateEngines::LiquidService::RenderError => e
+ error(e.message)
+ end
+
def substitute_ruby_variables(result)
- return success(result) unless query
+ return success(result) unless query(result)
# The % operator doesn't replace variables if the hash contains string
# keys.
- result[:params][:query] = query % predefined_context.symbolize_keys
+ result[:params][:query] = query(result) % predefined_context.symbolize_keys
success(result)
rescue TypeError, ArgumentError => exception
log_error(exception.message)
- Gitlab::ErrorTracking.track_exception(exception, extra: {
- template_string: query,
+ Gitlab::ErrorTracking.track_exception(exception, {
+ template_string: query(result),
variables: predefined_context
})
@@ -44,8 +68,25 @@ module Prometheus
@predefined_context ||= Gitlab::Prometheus::QueryVariables.call(@environment)
end
- def query
- params[:query]
+ def full_context
+ @full_context ||= predefined_context.reverse_merge(variables_hash)
+ end
+
+ def variables
+ params[:variables]
+ end
+
+ def variables_hash
+ # .each_slice(2) converts ['key1', 'value1', 'key2', 'value2'] into
+ # [['key1', 'value1'], ['key2', 'value2']] which is then converted into
+ # a hash by to_h: {'key1' => 'value1', 'key2' => 'value2'}
+ # to_h will raise an ArgumentError if the number of elements in the original
+ # array is not even.
+ variables&.each_slice(2).to_h
+ end
+
+ def query(result)
+ result[:params][:query]
end
end
end
diff --git a/app/services/quick_actions/interpret_service.rb b/app/services/quick_actions/interpret_service.rb
index a14e0515a1f..a781eacc40e 100644
--- a/app/services/quick_actions/interpret_service.rb
+++ b/app/services/quick_actions/interpret_service.rb
@@ -84,7 +84,9 @@ module QuickActions
# rubocop: enable CodeReuse/ActiveRecord
def find_milestones(project, params = {})
- MilestonesFinder.new(params.merge(project_ids: [project.id], group_ids: [project.group&.id])).execute
+ group_ids = project.group.self_and_ancestors.select(:id) if project.group
+
+ MilestonesFinder.new(params.merge(project_ids: [project.id], group_ids: group_ids)).execute
end
def parent
diff --git a/app/services/releases/update_service.rb b/app/services/releases/update_service.rb
index 6ba8dac21f0..a452f7aa17a 100644
--- a/app/services/releases/update_service.rb
+++ b/app/services/releases/update_service.rb
@@ -11,10 +11,13 @@ module Releases
return error('params is empty', 400) if empty_params?
return error("Milestone(s) not found: #{inexistent_milestones.join(', ')}", 400) if inexistent_milestones.any?
- params[:milestones] = milestones if param_for_milestone_titles_provided?
+ if param_for_milestone_titles_provided?
+ previous_milestones = release.milestones.map(&:title)
+ params[:milestones] = milestones
+ end
if release.update(params)
- success(tag: existing_tag, release: release)
+ success(tag: existing_tag, release: release, milestones_updated: milestones_updated?(previous_milestones))
else
error(release.errors.messages || '400 Bad request', 400)
end
@@ -29,5 +32,11 @@ module Releases
def empty_params?
params.except(:tag).empty?
end
+
+ def milestones_updated?(previous_milestones)
+ return false unless param_for_milestone_titles_provided?
+
+ previous_milestones.to_set != release.milestones.map(&:title)
+ end
end
end
diff --git a/app/services/resource_events/base_synthetic_notes_builder_service.rb b/app/services/resource_events/base_synthetic_notes_builder_service.rb
new file mode 100644
index 00000000000..1b85ca811a1
--- /dev/null
+++ b/app/services/resource_events/base_synthetic_notes_builder_service.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+# We store events about issuable label changes and weight changes in a separate
+# table (not as other system notes), but we still want to display notes about
+# label changes and weight changes as classic system notes in UI. This service
+# generates "synthetic" notes for label event changes.
+
+module ResourceEvents
+ class BaseSyntheticNotesBuilderService
+ include Gitlab::Utils::StrongMemoize
+
+ attr_reader :resource, :current_user, :params
+
+ def initialize(resource, current_user, params = {})
+ @resource = resource
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute
+ synthetic_notes
+ end
+
+ private
+
+ def since_fetch_at(events)
+ return events unless params[:last_fetched_at].present?
+
+ last_fetched_at = Time.at(params.fetch(:last_fetched_at).to_i)
+ events.created_after(last_fetched_at - NotesFinder::FETCH_OVERLAP)
+ end
+
+ def resource_parent
+ strong_memoize(:resource_parent) do
+ resource.project || resource.group
+ end
+ end
+ end
+end
diff --git a/app/services/resource_events/merge_into_notes_service.rb b/app/services/resource_events/merge_into_notes_service.rb
index 7504773a002..47948fcff6e 100644
--- a/app/services/resource_events/merge_into_notes_service.rb
+++ b/app/services/resource_events/merge_into_notes_service.rb
@@ -1,10 +1,9 @@
# frozen_string_literal: true
-# We store events about issuable label changes in a separate table (not as
-# other system notes), but we still want to display notes about label changes
-# as classic system notes in UI. This service generates "synthetic" notes for
-# label event changes and merges them with classic notes and sorts them by
-# creation time.
+# We store events about issuable label changes and weight changes in separate tables (not as
+# other system notes), but we still want to display notes about label and weight changes
+# as classic system notes in UI. This service merges synthetic label and weight notes
+# with classic notes and sorts them by creation time.
module ResourceEvents
class MergeIntoNotesService
@@ -19,39 +18,15 @@ module ResourceEvents
end
def execute(notes = [])
- (notes + label_notes).sort_by { |n| n.created_at }
+ (notes + synthetic_notes).sort_by { |n| n.created_at }
end
private
- def label_notes
- label_events_by_discussion_id.map do |discussion_id, events|
- LabelNote.from_events(events, resource: resource, resource_parent: resource_parent)
- end
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def label_events_by_discussion_id
- return [] unless resource.respond_to?(:resource_label_events)
-
- events = resource.resource_label_events.includes(:label, user: :status)
- events = since_fetch_at(events)
-
- events.group_by { |event| event.discussion_id }
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def since_fetch_at(events)
- return events unless params[:last_fetched_at].present?
-
- last_fetched_at = Time.at(params.fetch(:last_fetched_at).to_i)
- events.created_after(last_fetched_at - NotesFinder::FETCH_OVERLAP)
- end
-
- def resource_parent
- strong_memoize(:resource_parent) do
- resource.project || resource.group
- end
+ def synthetic_notes
+ SyntheticLabelNotesBuilderService.new(resource, current_user, params).execute
end
end
end
+
+ResourceEvents::MergeIntoNotesService.prepend_if_ee('EE::ResourceEvents::MergeIntoNotesService')
diff --git a/app/services/resource_events/synthetic_label_notes_builder_service.rb b/app/services/resource_events/synthetic_label_notes_builder_service.rb
new file mode 100644
index 00000000000..fd128101b49
--- /dev/null
+++ b/app/services/resource_events/synthetic_label_notes_builder_service.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+# We store events about issuable label changes in a separate table (not as
+# other system notes), but we still want to display notes about label changes
+# as classic system notes in UI. This service generates "synthetic" notes for
+# label event changes.
+
+module ResourceEvents
+ class SyntheticLabelNotesBuilderService < BaseSyntheticNotesBuilderService
+ private
+
+ def synthetic_notes
+ label_events_by_discussion_id.map do |discussion_id, events|
+ LabelNote.from_events(events, resource: resource, resource_parent: resource_parent)
+ end
+ end
+
+ def label_events_by_discussion_id
+ return [] unless resource.respond_to?(:resource_label_events)
+
+ events = resource.resource_label_events.includes(:label, user: :status) # rubocop: disable CodeReuse/ActiveRecord
+ events = since_fetch_at(events)
+
+ events.group_by { |event| event.discussion_id }
+ end
+ end
+end
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index 91c0f9ba104..fe5e823b56c 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -3,6 +3,9 @@
class SearchService
include Gitlab::Allowable
+ SEARCH_TERM_LIMIT = 64
+ SEARCH_CHAR_LIMIT = 4096
+
def initialize(current_user, params = {})
@current_user = current_user
@params = params.dup
@@ -42,6 +45,14 @@ class SearchService
@show_snippets = params[:snippets] == 'true'
end
+ def valid_query_length?
+ params[:search].length <= SEARCH_CHAR_LIMIT
+ end
+
+ def valid_terms_count?
+ params[:search].split.count { |word| word.length >= 3 } <= SEARCH_TERM_LIMIT
+ end
+
delegate :scope, to: :search_service
def search_results
diff --git a/app/services/snippets/base_service.rb b/app/services/snippets/base_service.rb
new file mode 100644
index 00000000000..2b450db0b83
--- /dev/null
+++ b/app/services/snippets/base_service.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module Snippets
+ class BaseService < ::BaseService
+ private
+
+ def snippet_error_response(snippet, http_status)
+ ServiceResponse.error(
+ message: snippet.errors.full_messages.to_sentence,
+ http_status: http_status,
+ payload: { snippet: snippet }
+ )
+ end
+ end
+end
diff --git a/app/services/snippets/create_service.rb b/app/services/snippets/create_service.rb
new file mode 100644
index 00000000000..250e99c466a
--- /dev/null
+++ b/app/services/snippets/create_service.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Snippets
+ class CreateService < Snippets::BaseService
+ include SpamCheckMethods
+
+ def execute
+ filter_spam_check_params
+
+ snippet = if project
+ project.snippets.build(params)
+ else
+ PersonalSnippet.new(params)
+ end
+
+ unless Gitlab::VisibilityLevel.allowed_for?(current_user, snippet.visibility_level)
+ deny_visibility_level(snippet)
+
+ return snippet_error_response(snippet, 403)
+ end
+
+ snippet.author = current_user
+
+ spam_check(snippet, current_user)
+
+ snippet_saved = snippet.with_transaction_returning_status do
+ snippet.save && snippet.store_mentions!
+ end
+
+ if snippet_saved
+ UserAgentDetailService.new(snippet, @request).create
+ Gitlab::UsageDataCounters::SnippetCounter.count(:create)
+
+ ServiceResponse.success(payload: { snippet: snippet } )
+ else
+ snippet_error_response(snippet, 400)
+ end
+ end
+ end
+end
diff --git a/app/services/snippets/destroy_service.rb b/app/services/snippets/destroy_service.rb
new file mode 100644
index 00000000000..f253817d94f
--- /dev/null
+++ b/app/services/snippets/destroy_service.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module Snippets
+ class DestroyService
+ include Gitlab::Allowable
+
+ attr_reader :current_user, :project
+
+ def initialize(user, snippet)
+ @current_user = user
+ @snippet = snippet
+ @project = snippet&.project
+ end
+
+ def execute
+ if snippet.nil?
+ return service_response_error('No snippet found.', 404)
+ end
+
+ unless user_can_delete_snippet?
+ return service_response_error(
+ "You don't have access to delete this snippet.",
+ 403
+ )
+ end
+
+ if snippet.destroy
+ ServiceResponse.success(message: 'Snippet was deleted.')
+ else
+ service_response_error('Failed to remove snippet.', 400)
+ end
+ end
+
+ private
+
+ attr_reader :snippet
+
+ def user_can_delete_snippet?
+ return can?(current_user, :admin_project_snippet, snippet) if project
+
+ can?(current_user, :admin_personal_snippet, snippet)
+ end
+
+ def service_response_error(message, http_status)
+ ServiceResponse.error(message: message, http_status: http_status)
+ end
+ end
+end
diff --git a/app/services/snippets/update_service.rb b/app/services/snippets/update_service.rb
new file mode 100644
index 00000000000..8d2c8cac148
--- /dev/null
+++ b/app/services/snippets/update_service.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Snippets
+ class UpdateService < Snippets::BaseService
+ include SpamCheckMethods
+
+ def execute(snippet)
+ # check that user is allowed to set specified visibility_level
+ new_visibility = visibility_level
+
+ if new_visibility && new_visibility.to_i != snippet.visibility_level
+ unless Gitlab::VisibilityLevel.allowed_for?(current_user, new_visibility)
+ deny_visibility_level(snippet, new_visibility)
+
+ return snippet_error_response(snippet, 403)
+ end
+ end
+
+ filter_spam_check_params
+ snippet.assign_attributes(params)
+ spam_check(snippet, current_user)
+
+ snippet_saved = snippet.with_transaction_returning_status do
+ snippet.save && snippet.store_mentions!
+ end
+
+ if snippet_saved
+ Gitlab::UsageDataCounters::SnippetCounter.count(:update)
+
+ ServiceResponse.success(payload: { snippet: snippet } )
+ else
+ snippet_error_response(snippet, 400)
+ end
+ end
+ end
+end
diff --git a/app/services/spam/mark_as_spam_service.rb b/app/services/spam/mark_as_spam_service.rb
new file mode 100644
index 00000000000..0ebcf17927a
--- /dev/null
+++ b/app/services/spam/mark_as_spam_service.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module Spam
+ class MarkAsSpamService
+ include ::AkismetMethods
+
+ attr_accessor :spammable, :options
+
+ def initialize(spammable:)
+ @spammable = spammable
+ @options = {}
+
+ @options[:ip_address] = @spammable.ip_address
+ @options[:user_agent] = @spammable.user_agent
+ end
+
+ def execute
+ return unless spammable.submittable_as_spam?
+ return unless akismet.submit_spam
+
+ spammable.user_agent_detail.update_attribute(:submitted, true)
+ end
+ end
+end
diff --git a/app/services/spam_service.rb b/app/services/spam_service.rb
index babe69cfdc8..ba9b812a01c 100644
--- a/app/services/spam_service.rb
+++ b/app/services/spam_service.rb
@@ -1,10 +1,12 @@
# frozen_string_literal: true
class SpamService
+ include AkismetMethods
+
attr_accessor :spammable, :request, :options
attr_reader :spam_log
- def initialize(spammable, request = nil)
+ def initialize(spammable:, request:)
@spammable = spammable
@request = request
@options = {}
@@ -19,16 +21,6 @@ class SpamService
end
end
- def mark_as_spam!
- return false unless spammable.submittable_as_spam?
-
- if akismet.submit_spam
- spammable.user_agent_detail.update_attribute(:submitted, true)
- else
- false
- end
- end
-
def when_recaptcha_verified(recaptcha_verified, api = false)
# In case it's a request which is already verified through recaptcha, yield
# block.
@@ -54,27 +46,6 @@ class SpamService
true
end
- def akismet
- @akismet ||= AkismetService.new(
- spammable_owner,
- spammable.spammable_text,
- options
- )
- end
-
- def spammable_owner
- @user ||= User.find(spammable_owner_id)
- end
-
- def spammable_owner_id
- @owner_id ||=
- if spammable.respond_to?(:author_id)
- spammable.author_id
- elsif spammable.respond_to?(:creator_id)
- spammable.creator_id
- end
- end
-
def check_for_spam?
spammable.check_for_spam?
end
diff --git a/app/services/suggestions/apply_service.rb b/app/services/suggestions/apply_service.rb
index 8ba50e22b09..a6485e42bdb 100644
--- a/app/services/suggestions/apply_service.rb
+++ b/app/services/suggestions/apply_service.rb
@@ -2,6 +2,24 @@
module Suggestions
class ApplyService < ::BaseService
+ DEFAULT_SUGGESTION_COMMIT_MESSAGE = 'Apply suggestion to %{file_path}'
+
+ PLACEHOLDERS = {
+ 'project_path' => ->(suggestion, user) { suggestion.project.path },
+ 'project_name' => ->(suggestion, user) { suggestion.project.name },
+ 'file_path' => ->(suggestion, user) { suggestion.file_path },
+ 'branch_name' => ->(suggestion, user) { suggestion.branch },
+ 'username' => ->(suggestion, user) { user.username },
+ 'user_full_name' => ->(suggestion, user) { user.name }
+ }.freeze
+
+ # This regex is built dynamically using the keys from the PLACEHOLDER struct.
+ # So, we can easily add new placeholder just by modifying the PLACEHOLDER hash.
+ # This regex will build the new PLACEHOLDER_REGEX with the new information
+ PLACEHOLDERS_REGEX = Regexp.union(PLACEHOLDERS.keys.map { |key| Regexp.new(Regexp.escape(key)) }).freeze
+
+ attr_reader :current_user
+
def initialize(current_user)
@current_user = current_user
end
@@ -22,7 +40,7 @@ module Suggestions
end
params = file_update_params(suggestion, diff_file)
- result = ::Files::UpdateService.new(suggestion.project, @current_user, params).execute
+ result = ::Files::UpdateService.new(suggestion.project, current_user, params).execute
if result[:status] == :success
suggestion.update(commit_id: result[:result], applied: true)
@@ -46,13 +64,14 @@ module Suggestions
def file_update_params(suggestion, diff_file)
blob = diff_file.new_blob
+ project = suggestion.project
file_path = suggestion.file_path
branch_name = suggestion.branch
file_content = new_file_content(suggestion, blob)
- commit_message = "Apply suggestion to #{file_path}"
+ commit_message = processed_suggestion_commit_message(suggestion)
file_last_commit =
- Gitlab::Git::Commit.last_for_path(suggestion.project.repository,
+ Gitlab::Git::Commit.last_for_path(project.repository,
blob.commit_id,
blob.path)
@@ -75,5 +94,17 @@ module Suggestions
content.join
end
+
+ def suggestion_commit_message(project)
+ project.suggestion_commit_message || DEFAULT_SUGGESTION_COMMIT_MESSAGE
+ end
+
+ def processed_suggestion_commit_message(suggestion)
+ message = suggestion_commit_message(suggestion.project)
+
+ Gitlab::StringPlaceholderReplacer.replace_string_placeholders(message, PLACEHOLDERS_REGEX) do |key|
+ PLACEHOLDERS[key].call(suggestion, current_user)
+ end
+ end
end
end
diff --git a/app/services/system_hooks_service.rb b/app/services/system_hooks_service.rb
index 06d2037fb63..0d369c23b57 100644
--- a/app/services/system_hooks_service.rb
+++ b/app/services/system_hooks_service.rb
@@ -14,7 +14,7 @@ class SystemHooksService
hook.async_execute(data, 'system_hooks')
end
- Gitlab::Plugin.execute_all_async(data)
+ Gitlab::FileHook.execute_all_async(data)
end
private
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 25e3282d3fb..38e0a7d34ad 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -60,9 +60,7 @@ module SystemNoteService
#
# Returns the created Note object
def change_due_date(noteable, project, author, due_date)
- body = due_date ? "changed due date to #{due_date.to_s(:long)}" : 'removed due date'
-
- create_note(NoteSummary.new(noteable, project, author, body, action: 'due_date'))
+ ::SystemNotes::TimeTrackingService.new(noteable: noteable, project: project, author: author).change_due_date(due_date)
end
# Called when the estimated time of a Noteable is changed
@@ -80,14 +78,7 @@ module SystemNoteService
#
# Returns the created Note object
def change_time_estimate(noteable, project, author)
- parsed_time = Gitlab::TimeTrackingFormatter.output(noteable.time_estimate)
- body = if noteable.time_estimate == 0
- "removed time estimate"
- else
- "changed time estimate to #{parsed_time}"
- end
-
- create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
+ ::SystemNotes::TimeTrackingService.new(noteable: noteable, project: project, author: author).change_time_estimate
end
# Called when the spent time of a Noteable is changed
@@ -105,21 +96,7 @@ module SystemNoteService
#
# Returns the created Note object
def change_time_spent(noteable, project, author)
- time_spent = noteable.time_spent
-
- if time_spent == :reset
- body = "removed time spent"
- else
- spent_at = noteable.spent_at
- parsed_time = Gitlab::TimeTrackingFormatter.output(time_spent.abs)
- action = time_spent > 0 ? 'added' : 'subtracted'
-
- text_parts = ["#{action} #{parsed_time} of time spent"]
- text_parts << "at #{spent_at}" if spent_at
- body = text_parts.join(' ')
- end
-
- create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
+ ::SystemNotes::TimeTrackingService.new(noteable: noteable, project: project, author: author).change_time_spent
end
def change_status(noteable, project, author, status, source = nil)
diff --git a/app/services/system_notes/time_tracking_service.rb b/app/services/system_notes/time_tracking_service.rb
new file mode 100644
index 00000000000..8de42bd3225
--- /dev/null
+++ b/app/services/system_notes/time_tracking_service.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module SystemNotes
+ class TimeTrackingService < ::SystemNotes::BaseService
+ # Called when the due_date of a Noteable is changed
+ #
+ # due_date - Due date being assigned, or nil
+ #
+ # Example Note text:
+ #
+ # "removed due date"
+ #
+ # "changed due date to September 20, 2018"
+ #
+ # Returns the created Note object
+ def change_due_date(due_date)
+ body = due_date ? "changed due date to #{due_date.to_s(:long)}" : 'removed due date'
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'due_date'))
+ end
+
+ # Called when the estimated time of a Noteable is changed
+ #
+ # time_estimate - Estimated time
+ #
+ # Example Note text:
+ #
+ # "removed time estimate"
+ #
+ # "changed time estimate to 3d 5h"
+ #
+ # Returns the created Note object
+ def change_time_estimate
+ parsed_time = Gitlab::TimeTrackingFormatter.output(noteable.time_estimate)
+ body = if noteable.time_estimate == 0
+ "removed time estimate"
+ else
+ "changed time estimate to #{parsed_time}"
+ end
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
+ end
+
+ # Called when the spent time of a Noteable is changed
+ #
+ # time_spent - Spent time
+ #
+ # Example Note text:
+ #
+ # "removed time spent"
+ #
+ # "added 2h 30m of time spent"
+ #
+ # Returns the created Note object
+ def change_time_spent
+ time_spent = noteable.time_spent
+
+ if time_spent == :reset
+ body = "removed time spent"
+ else
+ spent_at = noteable.spent_at
+ parsed_time = Gitlab::TimeTrackingFormatter.output(time_spent.abs)
+ action = time_spent > 0 ? 'added' : 'subtracted'
+
+ text_parts = ["#{action} #{parsed_time} of time spent"]
+ text_parts << "at #{spent_at}" if spent_at
+ body = text_parts.join(' ')
+ end
+
+ create_note(NoteSummary.new(noteable, project, author, body, action: 'time_tracking'))
+ end
+ end
+end
diff --git a/app/services/template_engines/liquid_service.rb b/app/services/template_engines/liquid_service.rb
new file mode 100644
index 00000000000..809ebd0316b
--- /dev/null
+++ b/app/services/template_engines/liquid_service.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module TemplateEngines
+ class LiquidService < BaseService
+ RenderError = Class.new(StandardError)
+
+ DEFAULT_RENDER_SCORE_LIMIT = 1_000
+
+ def initialize(string)
+ @template = Liquid::Template.parse(string)
+ end
+
+ def render(context, render_score_limit: DEFAULT_RENDER_SCORE_LIMIT)
+ set_limits(render_score_limit)
+
+ @template.render!(context.stringify_keys)
+ rescue Liquid::MemoryError => e
+ handle_exception(e, string: @string, context: context)
+
+ raise RenderError, _('Memory limit exceeded while rendering template')
+ rescue Liquid::Error => e
+ handle_exception(e, string: @string, context: context)
+
+ raise RenderError, _('Error rendering query')
+ end
+
+ private
+
+ def set_limits(render_score_limit)
+ @template.resource_limits.render_score_limit = render_score_limit
+
+ # We can also set assign_score_limit and render_length_limit if required.
+
+ # render_score_limit limits the number of nodes (string, variable, block, tags)
+ # that are allowed in the template.
+ # render_length_limit seems to limit the sum of the bytesize of all node blocks.
+ # assign_score_limit seems to limit the sum of the bytesize of all capture blocks.
+ end
+
+ def handle_exception(exception, extra = {})
+ log_error(exception.message)
+ Gitlab::ErrorTracking.track_exception(exception, {
+ template_string: extra[:string],
+ variables: extra[:context]
+ })
+ end
+ end
+end
diff --git a/app/services/update_snippet_service.rb b/app/services/update_snippet_service.rb
deleted file mode 100644
index ac7f8e9b1f5..00000000000
--- a/app/services/update_snippet_service.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-class UpdateSnippetService < BaseService
- include SpamCheckService
-
- attr_accessor :snippet
-
- def initialize(project, user, snippet, params)
- super(project, user, params)
- @snippet = snippet
- end
-
- def execute
- # check that user is allowed to set specified visibility_level
- new_visibility = visibility_level
-
- if new_visibility && new_visibility.to_i != snippet.visibility_level
- unless Gitlab::VisibilityLevel.allowed_for?(current_user, new_visibility)
- deny_visibility_level(snippet, new_visibility)
- return snippet
- end
- end
-
- filter_spam_check_params
- snippet.assign_attributes(params)
- spam_check(snippet, current_user)
-
- snippet_saved = snippet.with_transaction_returning_status do
- snippet.save && snippet.store_mentions!
- end
-
- if snippet_saved
- Gitlab::UsageDataCounters::SnippetCounter.count(:update)
- end
- end
-end
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index 33444c2a7dc..85855f45e33 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -4,7 +4,7 @@ module Users
class ActivityService
LEASE_TIMEOUT = 1.minute.to_i
- def initialize(author, activity)
+ def initialize(author)
@user = if author.respond_to?(:username)
author
elsif author.respond_to?(:user)
@@ -12,7 +12,6 @@ module Users
end
@user = nil unless @user.is_a?(User)
- @activity = activity
end
def execute
diff --git a/app/services/users/build_service.rb b/app/services/users/build_service.rb
index ea4d11e728e..d18f20bc1db 100644
--- a/app/services/users/build_service.rb
+++ b/app/services/users/build_service.rb
@@ -86,6 +86,8 @@ module Users
:email_confirmation,
:password_automatically_set,
:name,
+ :first_name,
+ :last_name,
:password,
:username
]
@@ -107,6 +109,12 @@ module Users
if user_params[:skip_confirmation].nil?
user_params[:skip_confirmation] = skip_user_confirmation_email_from_setting
end
+
+ fallback_name = "#{user_params[:first_name]} #{user_params[:last_name]}"
+
+ if user_params[:name].blank? && fallback_name.present?
+ user_params = user_params.merge(name: fallback_name)
+ end
end
if user_default_internal_regex_enabled? && !user_params.key?(:external)
diff --git a/app/services/users/destroy_service.rb b/app/services/users/destroy_service.rb
index e341c7f0537..643ebdc6839 100644
--- a/app/services/users/destroy_service.rb
+++ b/app/services/users/destroy_service.rb
@@ -56,6 +56,13 @@ module Users
MigrateToGhostUserService.new(user).execute unless options[:hard_delete]
+ if Feature.enabled?(:destroy_user_associations_in_batches)
+ # Rails attempts to load all related records into memory before
+ # destroying: https://github.com/rails/rails/issues/22510
+ # This ensures we delete records in batches.
+ user.destroy_dependent_associations_in_batches
+ end
+
# Destroy the namespace after destroying the user since certain methods may depend on the namespace existing
user_data = user.destroy
namespace.destroy
diff --git a/app/services/users/update_service.rb b/app/services/users/update_service.rb
index 422c8ed6575..e7667b0ca18 100644
--- a/app/services/users/update_service.rb
+++ b/app/services/users/update_service.rb
@@ -17,6 +17,8 @@ module Users
yield(@user) if block_given?
user_exists = @user.persisted?
+
+ discard_read_only_attributes
assign_attributes
assign_identity
@@ -50,13 +52,19 @@ module Users
success
end
- def assign_attributes
+ def discard_read_only_attributes
+ discard_synced_attributes
+ end
+
+ def discard_synced_attributes
if (metadata = @user.user_synced_attributes_metadata)
read_only = metadata.read_only_attributes
params.reject! { |key, _| read_only.include?(key.to_sym) }
end
+ end
+ def assign_attributes
@user.assign_attributes(params.except(*identity_attributes)) unless params.empty?
end