summaryrefslogtreecommitdiff
path: root/app/services
diff options
context:
space:
mode:
authorKamil Trzciński <ayufan@ayufan.eu>2018-02-28 20:35:22 +0100
committerKamil Trzciński <ayufan@ayufan.eu>2018-02-28 20:35:22 +0100
commite0401df1214397626e65e58166988fe62715d372 (patch)
tree087d8ca4a1611aa50a8ac98e66f7d1657ff1f90f /app/services
parent2b7b60728426c10ef1188a1073d3630805773a35 (diff)
parent11c67e7c2f992299ff5918ce67995b73d1e0be6d (diff)
downloadgitlab-ce-e0401df1214397626e65e58166988fe62715d372.tar.gz
Merge commit '11c67e7c2f992299ff5918ce67995b73d1e0be6d' into object-storage-ee-to-ce-backport
Diffstat (limited to 'app/services')
-rw-r--r--app/services/ci/create_cluster_service.rb15
-rw-r--r--app/services/ci/create_pipeline_service.rb150
-rw-r--r--app/services/ci/extract_sections_from_build_trace_service.rb30
-rw-r--r--app/services/ci/fetch_gcp_operation_service.rb17
-rw-r--r--app/services/ci/fetch_kubernetes_token_service.rb72
-rw-r--r--app/services/ci/finalize_cluster_creation_service.rb33
-rw-r--r--app/services/ci/integrate_cluster_service.rb26
-rw-r--r--app/services/ci/pipeline_trigger_service.rb2
-rw-r--r--app/services/ci/provision_cluster_service.rb36
-rw-r--r--app/services/ci/update_cluster_service.rb22
-rw-r--r--app/services/commits/change_service.rb6
-rw-r--r--app/services/delete_merged_branches_service.rb19
-rw-r--r--app/services/deploy_keys/create_service.rb7
-rw-r--r--app/services/emails/base_service.rb6
-rw-r--r--app/services/emails/confirm_service.rb7
-rw-r--r--app/services/emails/create_service.rb4
-rw-r--r--app/services/emails/destroy_service.rb6
-rw-r--r--app/services/event_create_service.rb13
-rw-r--r--app/services/gpg_keys/create_service.rb9
-rw-r--r--app/services/issuable_base_service.rb23
-rw-r--r--app/services/issues/close_service.rb1
-rw-r--r--app/services/issues/update_service.rb2
-rw-r--r--app/services/keys/base_service.rb13
-rw-r--r--app/services/keys/create_service.rb9
-rw-r--r--app/services/keys/last_used_service.rb35
-rw-r--r--app/services/merge_requests/close_service.rb1
-rw-r--r--app/services/merge_requests/create_service.rb5
-rw-r--r--app/services/merge_requests/ff_merge_service.rb24
-rw-r--r--app/services/merge_requests/merge_service.rb34
-rw-r--r--app/services/merge_requests/post_merge_service.rb1
-rw-r--r--app/services/notes/create_service.rb8
-rw-r--r--app/services/notification_service.rb7
-rw-r--r--app/services/projects/count_service.rb7
-rw-r--r--app/services/projects/destroy_service.rb7
-rw-r--r--app/services/projects/fork_service.rb20
-rw-r--r--app/services/projects/hashed_storage_migration_service.rb68
-rw-r--r--app/services/projects/unlink_fork_service.rb1
-rw-r--r--app/services/projects/update_service.rb5
-rw-r--r--app/services/system_note_service.rb7
-rw-r--r--app/services/tags/create_service.rb2
-rw-r--r--app/services/todo_service.rb9
-rw-r--r--app/services/users/activity_service.rb2
-rw-r--r--app/services/users/last_push_event_service.rb83
-rw-r--r--app/services/users/update_service.rb19
44 files changed, 687 insertions, 186 deletions
diff --git a/app/services/ci/create_cluster_service.rb b/app/services/ci/create_cluster_service.rb
new file mode 100644
index 00000000000..f7ee0e468e2
--- /dev/null
+++ b/app/services/ci/create_cluster_service.rb
@@ -0,0 +1,15 @@
+module Ci
+ class CreateClusterService < BaseService
+ def execute(access_token)
+ params['gcp_machine_type'] ||= GoogleApi::CloudPlatform::Client::DEFAULT_MACHINE_TYPE
+
+ cluster_params =
+ params.merge(user: current_user,
+ gcp_token: access_token)
+
+ project.create_cluster(cluster_params).tap do |cluster|
+ ClusterProvisionWorker.perform_async(cluster.id) if cluster.persisted?
+ end
+ end
+ end
+end
diff --git a/app/services/ci/create_pipeline_service.rb b/app/services/ci/create_pipeline_service.rb
index d20de9b16a4..31a712ccc1b 100644
--- a/app/services/ci/create_pipeline_service.rb
+++ b/app/services/ci/create_pipeline_service.rb
@@ -2,110 +2,55 @@ module Ci
class CreatePipelineService < BaseService
attr_reader :pipeline
- def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil)
+ SEQUENCE = [Gitlab::Ci::Pipeline::Chain::Validate::Abilities,
+ Gitlab::Ci::Pipeline::Chain::Validate::Repository,
+ Gitlab::Ci::Pipeline::Chain::Validate::Config,
+ Gitlab::Ci::Pipeline::Chain::Skip,
+ Gitlab::Ci::Pipeline::Chain::Create].freeze
+
+ def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, &block)
@pipeline = Ci::Pipeline.new(
source: source,
project: project,
ref: ref,
sha: sha,
before_sha: before_sha,
- tag: tag?,
+ tag: tag_exists?,
trigger_requests: Array(trigger_request),
user: current_user,
pipeline_schedule: schedule,
protected: project.protected_for?(ref)
)
- result = validate_project_and_git_items ||
- validate_pipeline(ignore_skip_ci: ignore_skip_ci,
- save_on_errors: save_on_errors)
+ command = OpenStruct.new(ignore_skip_ci: ignore_skip_ci,
+ save_incompleted: save_on_errors,
+ seeds_block: block,
+ project: project,
+ current_user: current_user)
- return result if result
+ sequence = Gitlab::Ci::Pipeline::Chain::Sequence
+ .new(pipeline, command, SEQUENCE)
- begin
- Ci::Pipeline.transaction do
- pipeline.save!
+ sequence.build! do |pipeline, sequence|
+ update_merge_requests_head_pipeline if pipeline.persisted?
- yield(pipeline) if block_given?
+ if sequence.complete?
+ cancel_pending_pipelines if project.auto_cancel_pending_pipelines?
+ pipeline_created_counter.increment(source: source)
- Ci::CreatePipelineStagesService
- .new(project, current_user)
- .execute(pipeline)
+ pipeline.process!
end
- rescue ActiveRecord::RecordInvalid => e
- return error("Failed to persist the pipeline: #{e}")
end
-
- update_merge_requests_head_pipeline
-
- cancel_pending_pipelines if project.auto_cancel_pending_pipelines?
-
- pipeline_created_counter.increment(source: source)
-
- pipeline.tap(&:process!)
end
private
- def validate_project_and_git_items
- unless project.builds_enabled?
- return error('Pipeline is disabled')
- end
-
- unless allowed_to_trigger_pipeline?
- if can?(current_user, :create_pipeline, project)
- return error("Insufficient permissions for protected ref '#{ref}'")
- else
- return error('Insufficient permissions to create a new pipeline')
- end
- end
-
- unless branch? || tag?
- return error('Reference not found')
- end
-
- unless commit
- return error('Commit not found')
- end
- end
-
- def validate_pipeline(ignore_skip_ci:, save_on_errors:)
- unless pipeline.config_processor
- unless pipeline.ci_yaml_file
- return error("Missing #{pipeline.ci_yaml_file_path} file")
- end
- return error(pipeline.yaml_errors, save: save_on_errors)
- end
-
- if !ignore_skip_ci && skip_ci?
- pipeline.skip if save_on_errors
- return pipeline
- end
-
- unless pipeline.has_stage_seeds?
- return error('No stages / jobs for this pipeline.')
- end
- end
-
- def allowed_to_trigger_pipeline?
- if current_user
- allowed_to_create?
- else # legacy triggers don't have a corresponding user
- !project.protected_for?(ref)
- end
+ def commit
+ @commit ||= project.commit(origin_sha || origin_ref)
end
- def allowed_to_create?
- return unless can?(current_user, :create_pipeline, project)
-
- access = Gitlab::UserAccess.new(current_user, project: project)
- if branch?
- access.can_update_branch?(ref)
- elsif tag?
- access.can_create_tag?(ref)
- else
- true # Allow it for now and we'll reject when we check ref existence
- end
+ def sha
+ commit.try(:id)
end
def update_merge_requests_head_pipeline
@@ -115,11 +60,6 @@ module Ci
.update_all(head_pipeline_id: @pipeline.id)
end
- def skip_ci?
- return false unless pipeline.git_commit_message
- pipeline.git_commit_message =~ /\[(ci[ _-]skip|skip[ _-]ci)\]/i
- end
-
def cancel_pending_pipelines
Gitlab::OptimisticLocking.retry_lock(auto_cancelable_pipelines) do |cancelables|
cancelables.find_each do |cancelable|
@@ -136,14 +76,6 @@ module Ci
.created_or_pending
end
- def commit
- @commit ||= project.commit(origin_sha || origin_ref)
- end
-
- def sha
- commit.try(:id)
- end
-
def before_sha
params[:checkout_sha] || params[:before] || Gitlab::Git::BLANK_SHA
end
@@ -156,41 +88,17 @@ module Ci
params[:ref]
end
- def branch?
- return @is_branch if defined?(@is_branch)
-
- @is_branch =
- project.repository.ref_exists?(Gitlab::Git::BRANCH_REF_PREFIX + ref)
- end
-
- def tag?
- return @is_tag if defined?(@is_tag)
-
- @is_tag =
- project.repository.ref_exists?(Gitlab::Git::TAG_REF_PREFIX + ref)
+ def tag_exists?
+ project.repository.tag_exists?(ref)
end
def ref
@ref ||= Gitlab::Git.ref_name(origin_ref)
end
- def valid_sha?
- origin_sha && origin_sha != Gitlab::Git::BLANK_SHA
- end
-
- def error(message, save: false)
- pipeline.tap do
- pipeline.errors.add(:base, message)
-
- if save
- pipeline.drop
- update_merge_requests_head_pipeline
- end
- end
- end
-
def pipeline_created_counter
- @pipeline_created_counter ||= Gitlab::Metrics.counter(:pipelines_created_total, "Counter of pipelines created")
+ @pipeline_created_counter ||= Gitlab::Metrics
+ .counter(:pipelines_created_total, "Counter of pipelines created")
end
end
end
diff --git a/app/services/ci/extract_sections_from_build_trace_service.rb b/app/services/ci/extract_sections_from_build_trace_service.rb
new file mode 100644
index 00000000000..75f9e0f897d
--- /dev/null
+++ b/app/services/ci/extract_sections_from_build_trace_service.rb
@@ -0,0 +1,30 @@
+module Ci
+ class ExtractSectionsFromBuildTraceService < BaseService
+ def execute(build)
+ return false unless build.trace_sections.empty?
+
+ Gitlab::Database.bulk_insert(BuildTraceSection.table_name, extract_sections(build))
+ true
+ end
+
+ private
+
+ def find_or_create_name(name)
+ project.build_trace_section_names.find_or_create_by!(name: name)
+ rescue ActiveRecord::RecordInvalid
+ project.build_trace_section_names.find_by!(name: name)
+ end
+
+ def extract_sections(build)
+ build.trace.extract_sections.map do |attr|
+ name = attr.delete(:name)
+ name_record = find_or_create_name(name)
+
+ attr.merge(
+ build_id: build.id,
+ project_id: project.id,
+ section_name_id: name_record.id)
+ end
+ end
+ end
+end
diff --git a/app/services/ci/fetch_gcp_operation_service.rb b/app/services/ci/fetch_gcp_operation_service.rb
new file mode 100644
index 00000000000..0b68e4d6ea9
--- /dev/null
+++ b/app/services/ci/fetch_gcp_operation_service.rb
@@ -0,0 +1,17 @@
+module Ci
+ class FetchGcpOperationService
+ def execute(cluster)
+ api_client =
+ GoogleApi::CloudPlatform::Client.new(cluster.gcp_token, nil)
+
+ operation = api_client.projects_zones_operations(
+ cluster.gcp_project_id,
+ cluster.gcp_cluster_zone,
+ cluster.gcp_operation_id)
+
+ yield(operation) if block_given?
+ rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
+ return cluster.make_errored!("Failed to request to CloudPlatform; #{e.message}")
+ end
+ end
+end
diff --git a/app/services/ci/fetch_kubernetes_token_service.rb b/app/services/ci/fetch_kubernetes_token_service.rb
new file mode 100644
index 00000000000..44da87cb00c
--- /dev/null
+++ b/app/services/ci/fetch_kubernetes_token_service.rb
@@ -0,0 +1,72 @@
+##
+# TODO:
+# Almost components in this class were copied from app/models/project_services/kubernetes_service.rb
+# We should dry up those classes not to repeat the same code.
+# Maybe we should have a special facility (e.g. lib/kubernetes_api) to maintain all Kubernetes API caller.
+module Ci
+ class FetchKubernetesTokenService
+ attr_reader :api_url, :ca_pem, :username, :password
+
+ def initialize(api_url, ca_pem, username, password)
+ @api_url = api_url
+ @ca_pem = ca_pem
+ @username = username
+ @password = password
+ end
+
+ def execute
+ read_secrets.each do |secret|
+ name = secret.dig('metadata', 'name')
+ if /default-token/ =~ name
+ token_base64 = secret.dig('data', 'token')
+ return Base64.decode64(token_base64) if token_base64
+ end
+ end
+
+ nil
+ end
+
+ private
+
+ def read_secrets
+ kubeclient = build_kubeclient!
+
+ kubeclient.get_secrets.as_json
+ rescue KubeException => err
+ raise err unless err.error_code == 404
+ []
+ end
+
+ def build_kubeclient!(api_path: 'api', api_version: 'v1')
+ raise "Incomplete settings" unless api_url && username && password
+
+ ::Kubeclient::Client.new(
+ join_api_url(api_path),
+ api_version,
+ auth_options: { username: username, password: password },
+ ssl_options: kubeclient_ssl_options,
+ http_proxy_uri: ENV['http_proxy']
+ )
+ end
+
+ def join_api_url(api_path)
+ url = URI.parse(api_url)
+ prefix = url.path.sub(%r{/+\z}, '')
+
+ url.path = [prefix, api_path].join("/")
+
+ url.to_s
+ end
+
+ def kubeclient_ssl_options
+ opts = { verify_ssl: OpenSSL::SSL::VERIFY_PEER }
+
+ if ca_pem.present?
+ opts[:cert_store] = OpenSSL::X509::Store.new
+ opts[:cert_store].add_cert(OpenSSL::X509::Certificate.new(ca_pem))
+ end
+
+ opts
+ end
+ end
+end
diff --git a/app/services/ci/finalize_cluster_creation_service.rb b/app/services/ci/finalize_cluster_creation_service.rb
new file mode 100644
index 00000000000..347875c5697
--- /dev/null
+++ b/app/services/ci/finalize_cluster_creation_service.rb
@@ -0,0 +1,33 @@
+module Ci
+ class FinalizeClusterCreationService
+ def execute(cluster)
+ api_client =
+ GoogleApi::CloudPlatform::Client.new(cluster.gcp_token, nil)
+
+ begin
+ gke_cluster = api_client.projects_zones_clusters_get(
+ cluster.gcp_project_id,
+ cluster.gcp_cluster_zone,
+ cluster.gcp_cluster_name)
+ rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
+ return cluster.make_errored!("Failed to request to CloudPlatform; #{e.message}")
+ end
+
+ endpoint = gke_cluster.endpoint
+ api_url = 'https://' + endpoint
+ ca_cert = Base64.decode64(gke_cluster.master_auth.cluster_ca_certificate)
+ username = gke_cluster.master_auth.username
+ password = gke_cluster.master_auth.password
+
+ kubernetes_token = Ci::FetchKubernetesTokenService.new(
+ api_url, ca_cert, username, password).execute
+
+ unless kubernetes_token
+ return cluster.make_errored!('Failed to get a default token of kubernetes')
+ end
+
+ Ci::IntegrateClusterService.new.execute(
+ cluster, endpoint, ca_cert, kubernetes_token, username, password)
+ end
+ end
+end
diff --git a/app/services/ci/integrate_cluster_service.rb b/app/services/ci/integrate_cluster_service.rb
new file mode 100644
index 00000000000..d123ce8d26b
--- /dev/null
+++ b/app/services/ci/integrate_cluster_service.rb
@@ -0,0 +1,26 @@
+module Ci
+ class IntegrateClusterService
+ def execute(cluster, endpoint, ca_cert, token, username, password)
+ Gcp::Cluster.transaction do
+ cluster.update!(
+ enabled: true,
+ endpoint: endpoint,
+ ca_cert: ca_cert,
+ kubernetes_token: token,
+ username: username,
+ password: password,
+ service: cluster.project.find_or_initialize_service('kubernetes'),
+ status_event: :make_created)
+
+ cluster.service.update!(
+ active: true,
+ api_url: cluster.api_url,
+ ca_pem: ca_cert,
+ namespace: cluster.project_namespace,
+ token: token)
+ end
+ rescue ActiveRecord::RecordInvalid => e
+ cluster.make_errored!("Failed to integrate cluster into kubernetes_service: #{e.message}")
+ end
+ end
+end
diff --git a/app/services/ci/pipeline_trigger_service.rb b/app/services/ci/pipeline_trigger_service.rb
index 1e5ad28ba57..120af8c1e61 100644
--- a/app/services/ci/pipeline_trigger_service.rb
+++ b/app/services/ci/pipeline_trigger_service.rb
@@ -14,7 +14,7 @@ module Ci
pipeline = Ci::CreatePipelineService.new(project, trigger.owner, ref: params[:ref])
.execute(:trigger, ignore_skip_ci: true) do |pipeline|
- trigger.trigger_requests.create!(pipeline: pipeline)
+ pipeline.trigger_requests.create!(trigger: trigger)
create_pipeline_variables!(pipeline)
end
diff --git a/app/services/ci/provision_cluster_service.rb b/app/services/ci/provision_cluster_service.rb
new file mode 100644
index 00000000000..52d80b01813
--- /dev/null
+++ b/app/services/ci/provision_cluster_service.rb
@@ -0,0 +1,36 @@
+module Ci
+ class ProvisionClusterService
+ def execute(cluster)
+ api_client =
+ GoogleApi::CloudPlatform::Client.new(cluster.gcp_token, nil)
+
+ begin
+ operation = api_client.projects_zones_clusters_create(
+ cluster.gcp_project_id,
+ cluster.gcp_cluster_zone,
+ cluster.gcp_cluster_name,
+ cluster.gcp_cluster_size,
+ machine_type: cluster.gcp_machine_type)
+ rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
+ return cluster.make_errored!("Failed to request to CloudPlatform; #{e.message}")
+ end
+
+ unless operation.status == 'RUNNING' || operation.status == 'PENDING'
+ return cluster.make_errored!("Operation status is unexpected; #{operation.status_message}")
+ end
+
+ cluster.gcp_operation_id = api_client.parse_operation_id(operation.self_link)
+
+ unless cluster.gcp_operation_id
+ return cluster.make_errored!('Can not find operation_id from self_link')
+ end
+
+ if cluster.make_creating
+ WaitForClusterCreationWorker.perform_in(
+ WaitForClusterCreationWorker::INITIAL_INTERVAL, cluster.id)
+ else
+ return cluster.make_errored!("Failed to update cluster record; #{cluster.errors}")
+ end
+ end
+ end
+end
diff --git a/app/services/ci/update_cluster_service.rb b/app/services/ci/update_cluster_service.rb
new file mode 100644
index 00000000000..70d88fca660
--- /dev/null
+++ b/app/services/ci/update_cluster_service.rb
@@ -0,0 +1,22 @@
+module Ci
+ class UpdateClusterService < BaseService
+ def execute(cluster)
+ Gcp::Cluster.transaction do
+ cluster.update!(params)
+
+ if params['enabled'] == 'true'
+ cluster.service.update!(
+ active: true,
+ api_url: cluster.api_url,
+ ca_pem: cluster.ca_cert,
+ namespace: cluster.project_namespace,
+ token: cluster.kubernetes_token)
+ else
+ cluster.service.update!(active: false)
+ end
+ end
+ rescue ActiveRecord::RecordInvalid => e
+ cluster.errors.add(:base, e.message)
+ end
+ end
+end
diff --git a/app/services/commits/change_service.rb b/app/services/commits/change_service.rb
index 85c2fcf9ea6..b9d0173a2d0 100644
--- a/app/services/commits/change_service.rb
+++ b/app/services/commits/change_service.rb
@@ -12,14 +12,18 @@ module Commits
raise NotImplementedError unless repository.respond_to?(action)
# rubocop:disable GitlabSecurity/PublicSend
+ message = @commit.public_send(:"#{action}_message", current_user)
+
+ # rubocop:disable GitlabSecurity/PublicSend
repository.public_send(
action,
current_user,
@commit,
@branch_name,
+ message,
start_project: @start_project,
start_branch_name: @start_branch)
- rescue Repository::CreateTreeError
+ rescue Gitlab::Git::Repository::CreateTreeError
error_msg = "Sorry, we cannot #{action.to_s.dasherize} this #{@commit.change_type_title(current_user)} automatically.
This #{@commit.change_type_title(current_user)} may already have been #{action.to_s.dasherize}ed, or a more recent commit may have updated some of its content."
raise ChangeError, error_msg
diff --git a/app/services/delete_merged_branches_service.rb b/app/services/delete_merged_branches_service.rb
index ff11bd59d29..077268b2388 100644
--- a/app/services/delete_merged_branches_service.rb
+++ b/app/services/delete_merged_branches_service.rb
@@ -6,15 +6,18 @@ class DeleteMergedBranchesService < BaseService
def execute
raise Gitlab::Access::AccessDeniedError unless can?(current_user, :push_code, project)
- branches = project.repository.branch_names
- branches = branches.select { |branch| project.repository.merged_to_root_ref?(branch) }
- # Prevent deletion of branches relevant to open merge requests
- branches -= merge_request_branch_names
- # Prevent deletion of protected branches
- branches = branches.reject { |branch| project.protected_for?(branch) }
+ # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37438
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ branches = project.repository.branch_names
+ branches = branches.select { |branch| project.repository.merged_to_root_ref?(branch) }
+ # Prevent deletion of branches relevant to open merge requests
+ branches -= merge_request_branch_names
+ # Prevent deletion of protected branches
+ branches = branches.reject { |branch| project.protected_for?(branch) }
- branches.each do |branch|
- DeleteBranchService.new(project, current_user).execute(branch)
+ branches.each do |branch|
+ DeleteBranchService.new(project, current_user).execute(branch)
+ end
end
end
diff --git a/app/services/deploy_keys/create_service.rb b/app/services/deploy_keys/create_service.rb
new file mode 100644
index 00000000000..16de3d08df2
--- /dev/null
+++ b/app/services/deploy_keys/create_service.rb
@@ -0,0 +1,7 @@
+module DeployKeys
+ class CreateService < Keys::BaseService
+ def execute
+ DeployKey.create(params.merge(user: user))
+ end
+ end
+end
diff --git a/app/services/emails/base_service.rb b/app/services/emails/base_service.rb
index ace49889097..5bbceeb3b3f 100644
--- a/app/services/emails/base_service.rb
+++ b/app/services/emails/base_service.rb
@@ -1,8 +1,8 @@
module Emails
class BaseService
- def initialize(user, opts)
- @user = user
- @email = opts[:email]
+ def initialize(current_user, params = {})
+ @current_user, @params = current_user, params.dup
+ @user = params.delete(:user)
end
end
end
diff --git a/app/services/emails/confirm_service.rb b/app/services/emails/confirm_service.rb
new file mode 100644
index 00000000000..b5301bf2b82
--- /dev/null
+++ b/app/services/emails/confirm_service.rb
@@ -0,0 +1,7 @@
+module Emails
+ class ConfirmService < ::Emails::BaseService
+ def execute(email)
+ email.resend_confirmation_instructions
+ end
+ end
+end
diff --git a/app/services/emails/create_service.rb b/app/services/emails/create_service.rb
index b6491ee9804..94a841af7c3 100644
--- a/app/services/emails/create_service.rb
+++ b/app/services/emails/create_service.rb
@@ -1,7 +1,7 @@
module Emails
class CreateService < ::Emails::BaseService
- def execute
- @user.emails.create(email: @email)
+ def execute(extra_params = {})
+ @user.emails.create(@params.merge(extra_params))
end
end
end
diff --git a/app/services/emails/destroy_service.rb b/app/services/emails/destroy_service.rb
index d586b9dfe0c..1ed131fe326 100644
--- a/app/services/emails/destroy_service.rb
+++ b/app/services/emails/destroy_service.rb
@@ -1,13 +1,13 @@
module Emails
class DestroyService < ::Emails::BaseService
- def execute
- Email.find_by_email!(@email).destroy && update_secondary_emails!
+ def execute(email)
+ email.destroy && update_secondary_emails!
end
private
def update_secondary_emails!
- result = ::Users::UpdateService.new(@user).execute do |user|
+ result = ::Users::UpdateService.new(@current_user, user: @user).execute do |user|
user.update_secondary_emails!
end
diff --git a/app/services/event_create_service.rb b/app/services/event_create_service.rb
index 0b7e4f187f7..6328d567a07 100644
--- a/app/services/event_create_service.rb
+++ b/app/services/event_create_service.rb
@@ -74,12 +74,19 @@ class EventCreateService
# We're using an explicit transaction here so that any errors that may occur
# when creating push payload data will result in the event creation being
# rolled back as well.
- Event.transaction do
- event = create_event(project, current_user, Event::PUSHED)
+ event = Event.transaction do
+ new_event = create_event(project, current_user, Event::PUSHED)
- PushEventPayloadService.new(event, push_data).execute
+ PushEventPayloadService
+ .new(new_event, push_data)
+ .execute
+
+ new_event
end
+ Users::LastPushEventService.new(current_user)
+ .cache_last_push_event(event)
+
Users::ActivityService.new(current_user, 'push').execute
end
diff --git a/app/services/gpg_keys/create_service.rb b/app/services/gpg_keys/create_service.rb
new file mode 100644
index 00000000000..e822a89c4d3
--- /dev/null
+++ b/app/services/gpg_keys/create_service.rb
@@ -0,0 +1,9 @@
+module GpgKeys
+ class CreateService < Keys::BaseService
+ def execute
+ key = user.gpg_keys.create(params)
+ notification_service.new_gpg_key(key) if key.persisted?
+ key
+ end
+ end
+end
diff --git a/app/services/issuable_base_service.rb b/app/services/issuable_base_service.rb
index 8b967b78052..f83ece7098f 100644
--- a/app/services/issuable_base_service.rb
+++ b/app/services/issuable_base_service.rb
@@ -43,6 +43,10 @@ class IssuableBaseService < BaseService
SystemNoteService.change_time_spent(issuable, issuable.project, current_user)
end
+ def create_discussion_lock_note(issuable)
+ SystemNoteService.discussion_lock(issuable, current_user)
+ end
+
def filter_params(issuable)
ability_name = :"admin_#{issuable.to_ability_name}"
@@ -57,6 +61,7 @@ class IssuableBaseService < BaseService
params.delete(:due_date)
params.delete(:canonical_issue_id)
params.delete(:project)
+ params.delete(:discussion_locked)
end
filter_assignee(issuable)
@@ -182,6 +187,7 @@ class IssuableBaseService < BaseService
after_create(issuable)
execute_hooks(issuable)
invalidate_cache_counts(issuable, users: issuable.assignees)
+ issuable.update_project_counter_caches
end
issuable
@@ -193,8 +199,6 @@ class IssuableBaseService < BaseService
def after_create(issuable)
# To be overridden by subclasses
-
- issuable.update_project_counter_caches
end
def before_update(issuable)
@@ -203,8 +207,6 @@ class IssuableBaseService < BaseService
def after_update(issuable)
# To be overridden by subclasses
-
- issuable.update_project_counter_caches
end
def update(issuable)
@@ -229,12 +231,17 @@ class IssuableBaseService < BaseService
before_update(issuable)
+ # We have to perform this check before saving the issuable as Rails resets
+ # the changed fields upon calling #save.
+ update_project_counters = issuable.update_project_counter_caches?
+
if issuable.with_transaction_returning_status { issuable.save }
# We do not touch as it will affect a update on updated_at field
ActiveRecord::Base.no_touching do
handle_common_system_notes(issuable, old_labels: old_labels)
end
+ change_discussion_lock(issuable)
handle_changes(
issuable,
old_labels: old_labels,
@@ -249,6 +256,8 @@ class IssuableBaseService < BaseService
after_update(issuable)
issuable.create_new_cross_references!(current_user)
execute_hooks(issuable, 'update')
+
+ issuable.update_project_counter_caches if update_project_counters
end
end
@@ -291,6 +300,12 @@ class IssuableBaseService < BaseService
end
end
+ def change_discussion_lock(issuable)
+ if issuable.previous_changes.include?('discussion_locked')
+ create_discussion_lock_note(issuable)
+ end
+ end
+
def toggle_award(issuable)
award = params.delete(:emoji_award)
if award
diff --git a/app/services/issues/close_service.rb b/app/services/issues/close_service.rb
index 74459c3342c..0c5cf2c62ad 100644
--- a/app/services/issues/close_service.rb
+++ b/app/services/issues/close_service.rb
@@ -29,6 +29,7 @@ module Issues
todo_service.close_issue(issue, current_user)
execute_hooks(issue, 'close')
invalidate_cache_counts(issue, users: issue.assignees)
+ issue.update_project_counter_caches
end
issue
diff --git a/app/services/issues/update_service.rb b/app/services/issues/update_service.rb
index b4ca3966505..e0339ddf9bb 100644
--- a/app/services/issues/update_service.rb
+++ b/app/services/issues/update_service.rb
@@ -34,7 +34,7 @@ module Issues
if issue.assignees != old_assignees
create_assignee_note(issue, old_assignees)
notification_service.reassigned_issue(issue, current_user, old_assignees)
- todo_service.reassigned_issue(issue, current_user)
+ todo_service.reassigned_issue(issue, current_user, old_assignees)
end
if issue.previous_changes.include?('confidential')
diff --git a/app/services/keys/base_service.rb b/app/services/keys/base_service.rb
new file mode 100644
index 00000000000..545832d0bd4
--- /dev/null
+++ b/app/services/keys/base_service.rb
@@ -0,0 +1,13 @@
+module Keys
+ class BaseService
+ attr_accessor :user, :params
+
+ def initialize(user, params)
+ @user, @params = user, params
+ end
+
+ def notification_service
+ NotificationService.new
+ end
+ end
+end
diff --git a/app/services/keys/create_service.rb b/app/services/keys/create_service.rb
new file mode 100644
index 00000000000..e2e5a6c46c5
--- /dev/null
+++ b/app/services/keys/create_service.rb
@@ -0,0 +1,9 @@
+module Keys
+ class CreateService < ::Keys::BaseService
+ def execute
+ key = user.keys.create(params)
+ notification_service.new_key(key) if key.persisted?
+ key
+ end
+ end
+end
diff --git a/app/services/keys/last_used_service.rb b/app/services/keys/last_used_service.rb
new file mode 100644
index 00000000000..dbd79f7da55
--- /dev/null
+++ b/app/services/keys/last_used_service.rb
@@ -0,0 +1,35 @@
+module Keys
+ class LastUsedService
+ TIMEOUT = 1.day.to_i
+
+ attr_reader :key
+
+ # key - The Key for which to update the last used timestamp.
+ def initialize(key)
+ @key = key
+ end
+
+ def execute
+ # We _only_ want to update last_used_at and not also updated_at (which
+ # would be updated when using #touch).
+ key.update_column(:last_used_at, Time.zone.now) if update?
+ end
+
+ def update?
+ return false if ::Gitlab::Database.read_only?
+
+ last_used = key.last_used_at
+
+ return false if last_used && (Time.zone.now - last_used) <= TIMEOUT
+
+ !!redis_lease.try_obtain
+ end
+
+ private
+
+ def redis_lease
+ Gitlab::ExclusiveLease
+ .new("key_update_last_used_at:#{key.id}", timeout: TIMEOUT)
+ end
+ end
+end
diff --git a/app/services/merge_requests/close_service.rb b/app/services/merge_requests/close_service.rb
index c0ce01f7523..40213c99014 100644
--- a/app/services/merge_requests/close_service.rb
+++ b/app/services/merge_requests/close_service.rb
@@ -14,6 +14,7 @@ module MergeRequests
todo_service.close_merge_request(merge_request, current_user)
execute_hooks(merge_request, 'close')
invalidate_cache_counts(merge_request, users: merge_request.assignees)
+ merge_request.update_project_counter_caches
end
merge_request
diff --git a/app/services/merge_requests/create_service.rb b/app/services/merge_requests/create_service.rb
index 3d53fe0646b..820709583fa 100644
--- a/app/services/merge_requests/create_service.rb
+++ b/app/services/merge_requests/create_service.rb
@@ -13,7 +13,10 @@ module MergeRequests
merge_request.source_branch = params[:source_branch]
merge_request.merge_params['force_remove_source_branch'] = params.delete(:force_remove_source_branch)
- create(merge_request)
+ # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37439
+ Gitlab::GitalyClient.allow_n_plus_1_calls do
+ create(merge_request)
+ end
end
def before_create(merge_request)
diff --git a/app/services/merge_requests/ff_merge_service.rb b/app/services/merge_requests/ff_merge_service.rb
new file mode 100644
index 00000000000..ba6853b835a
--- /dev/null
+++ b/app/services/merge_requests/ff_merge_service.rb
@@ -0,0 +1,24 @@
+module MergeRequests
+ # MergeService class
+ #
+ # Do git fast-forward merge and in case of success
+ # mark merge request as merged and execute all hooks and notifications
+ # Executed when you do fast-forward merge via GitLab UI
+ #
+ class FfMergeService < MergeRequests::MergeService
+ private
+
+ def commit
+ repository.ff_merge(current_user,
+ source,
+ merge_request.target_branch,
+ merge_request: merge_request)
+ rescue Gitlab::Git::HooksService::PreReceiveError => e
+ raise MergeError, e.message
+ rescue StandardError => e
+ raise MergeError, "Something went wrong during merge: #{e.message}"
+ ensure
+ merge_request.update(in_progress_merge_commit_sha: nil)
+ end
+ end
+end
diff --git a/app/services/merge_requests/merge_service.rb b/app/services/merge_requests/merge_service.rb
index b2b6c5627fb..a110abf8256 100644
--- a/app/services/merge_requests/merge_service.rb
+++ b/app/services/merge_requests/merge_service.rb
@@ -11,16 +11,21 @@ module MergeRequests
attr_reader :merge_request, :source
def execute(merge_request)
+ if project.merge_requests_ff_only_enabled && !self.is_a?(FfMergeService)
+ FfMergeService.new(project, current_user, params).execute(merge_request)
+ return
+ end
+
@merge_request = merge_request
unless @merge_request.mergeable?
- return log_merge_error('Merge request is not mergeable', save_message_on_model: true)
+ return handle_merge_error(log_message: 'Merge request is not mergeable', save_message_on_model: true)
end
@source = find_merge_source
unless @source
- return log_merge_error('No source for merge', save_message_on_model: true)
+ return handle_merge_error(log_message: 'No source for merge', save_message_on_model: true)
end
merge_request.in_locked_state do
@@ -31,22 +36,15 @@ module MergeRequests
end
end
rescue MergeError => e
- clean_merge_jid
- log_merge_error(e.message, save_message_on_model: true)
+ handle_merge_error(log_message: e.message, save_message_on_model: true)
end
private
def commit
- committer = repository.user_to_committer(current_user)
+ message = params[:commit_message] || merge_request.merge_commit_message
- options = {
- message: params[:commit_message] || merge_request.merge_commit_message,
- author: committer,
- committer: committer
- }
-
- commit_id = repository.merge(current_user, source, merge_request, options)
+ commit_id = repository.merge(current_user, source, merge_request, message)
raise MergeError, 'Conflicts detected during merge' unless commit_id
@@ -80,10 +78,16 @@ module MergeRequests
@merge_request.force_remove_source_branch? ? @merge_request.author : current_user
end
- def log_merge_error(message, save_message_on_model: false)
- Rails.logger.error("MergeService ERROR: #{merge_request_info} - #{message}")
+ # Logs merge error message and cleans `MergeRequest#merge_jid`.
+ #
+ def handle_merge_error(log_message:, save_message_on_model: false)
+ Rails.logger.error("MergeService ERROR: #{merge_request_info} - #{log_message}")
- @merge_request.update(merge_error: message) if save_message_on_model
+ if save_message_on_model
+ @merge_request.update(merge_error: log_message, merge_jid: nil)
+ else
+ clean_merge_jid
+ end
end
def merge_request_info
diff --git a/app/services/merge_requests/post_merge_service.rb b/app/services/merge_requests/post_merge_service.rb
index 261a8bfa200..b1d6bac4d4a 100644
--- a/app/services/merge_requests/post_merge_service.rb
+++ b/app/services/merge_requests/post_merge_service.rb
@@ -14,6 +14,7 @@ module MergeRequests
notification_service.merge_mr(merge_request, current_user)
execute_hooks(merge_request, 'merge')
invalidate_cache_counts(merge_request, users: merge_request.assignees)
+ merge_request.update_project_counter_caches
end
private
diff --git a/app/services/notes/create_service.rb b/app/services/notes/create_service.rb
index 06971483992..9ea28733f5f 100644
--- a/app/services/notes/create_service.rb
+++ b/app/services/notes/create_service.rb
@@ -4,7 +4,13 @@ module Notes
merge_request_diff_head_sha = params.delete(:merge_request_diff_head_sha)
note = Notes::BuildService.new(project, current_user, params).execute
- return note unless note.valid?
+
+ # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37440
+ note_valid = Gitlab::GitalyClient.allow_n_plus_1_calls do
+ note.valid?
+ end
+
+ return note unless note_valid
# We execute commands (extracted from `params[:note]`) on the noteable
# **before** we save the note because if the note consists of commands
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index e2a80db06a6..8d5da459882 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -31,13 +31,6 @@ class NotificationService
end
end
- # Always notify user about email added to profile
- def new_email(email)
- if email.user&.can?(:receive_notifications)
- mailer.new_email_email(email.id).deliver_later
- end
- end
-
# When create an issue we should send an email to:
#
# * issue assignee if their notification level is not Disabled
diff --git a/app/services/projects/count_service.rb b/app/services/projects/count_service.rb
index 5e633c37bf8..aa034315280 100644
--- a/app/services/projects/count_service.rb
+++ b/app/services/projects/count_service.rb
@@ -2,6 +2,11 @@ module Projects
# Base class for the various service classes that count project data (e.g.
# issues or forks).
class CountService
+ # The version of the cache format. This should be bumped whenever the
+ # underlying logic changes. This removes the need for explicitly flushing
+ # all caches.
+ VERSION = 1
+
def initialize(project)
@project = project
end
@@ -37,7 +42,7 @@ module Projects
end
def cache_key
- ['projects', @project.id, cache_key_name]
+ ['projects', 'count_service', VERSION, @project.id, cache_key_name]
end
end
end
diff --git a/app/services/projects/destroy_service.rb b/app/services/projects/destroy_service.rb
index 54eb75ab9bf..19d75ff2efa 100644
--- a/app/services/projects/destroy_service.rb
+++ b/app/services/projects/destroy_service.rb
@@ -22,6 +22,13 @@ module Projects
Projects::UnlinkForkService.new(project, current_user).execute
+ # The project is not necessarily a fork, so update the fork network originating
+ # from this project
+ if fork_network = project.root_of_fork_network
+ fork_network.update(root_project: nil,
+ deleted_root_project_name: project.full_name)
+ end
+
attempt_destroy_transaction(project)
system_hook_service.execute_hooks_for(project, :destroy)
diff --git a/app/services/projects/fork_service.rb b/app/services/projects/fork_service.rb
index ad67e68a86a..eb5cce5ab98 100644
--- a/app/services/projects/fork_service.rb
+++ b/app/services/projects/fork_service.rb
@@ -23,11 +23,31 @@ module Projects
refresh_forks_count
+ link_fork_network(new_project)
+
new_project
end
private
+ def fork_network
+ if @project.fork_network
+ @project.fork_network
+ elsif forked_from_project = @project.forked_from_project
+ # TODO: remove this case when all background migrations have completed
+ # this only happens when a project had a `forked_project_link` that was
+ # not migrated to the `fork_network` relation
+ forked_from_project.fork_network || forked_from_project.create_root_of_fork_network
+ else
+ @project.create_root_of_fork_network
+ end
+ end
+
+ def link_fork_network(new_project)
+ fork_network.fork_network_members.create(project: new_project,
+ forked_from_project: @project)
+ end
+
def refresh_forks_count
Projects::ForksCountService.new(@project).refresh_cache
end
diff --git a/app/services/projects/hashed_storage_migration_service.rb b/app/services/projects/hashed_storage_migration_service.rb
new file mode 100644
index 00000000000..41259de3a16
--- /dev/null
+++ b/app/services/projects/hashed_storage_migration_service.rb
@@ -0,0 +1,68 @@
+module Projects
+ class HashedStorageMigrationService < BaseService
+ include Gitlab::ShellAdapter
+
+ attr_reader :old_disk_path, :new_disk_path
+
+ def initialize(project, logger = nil)
+ @project = project
+ @logger ||= Rails.logger
+ end
+
+ def execute
+ return if project.hashed_storage?
+
+ @old_disk_path = project.disk_path
+ has_wiki = project.wiki.repository_exists?
+
+ project.storage_version = Storage::HashedProject::STORAGE_VERSION
+ project.ensure_storage_path_exists
+
+ @new_disk_path = project.disk_path
+
+ result = move_repository(@old_disk_path, @new_disk_path)
+
+ if has_wiki
+ result &&= move_repository("#{@old_disk_path}.wiki", "#{@new_disk_path}.wiki")
+ end
+
+ unless result
+ rollback_folder_move
+ return
+ end
+
+ project.repository_read_only = false
+ project.save!
+
+ block_given? ? yield : result
+ end
+
+ private
+
+ def move_repository(from_name, to_name)
+ from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git")
+ to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git")
+
+ # If we don't find the repository on either original or target we should log that as it could be an issue if the
+ # project was not originally empty.
+ if !from_exists && !to_exists
+ logger.warn "Can't find a repository on either source or target paths for #{project.full_path} (ID=#{project.id}) ..."
+ return false
+ elsif !from_exists
+ # Repository have been moved already.
+ return true
+ end
+
+ gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
+ end
+
+ def rollback_folder_move
+ move_repository(@new_disk_path, @old_disk_path)
+ move_repository("#{@new_disk_path}.wiki", "#{@old_disk_path}.wiki")
+ end
+
+ def logger
+ @logger
+ end
+ end
+end
diff --git a/app/services/projects/unlink_fork_service.rb b/app/services/projects/unlink_fork_service.rb
index f30b40423c8..abe414d0c05 100644
--- a/app/services/projects/unlink_fork_service.rb
+++ b/app/services/projects/unlink_fork_service.rb
@@ -16,6 +16,7 @@ module Projects
refresh_forks_count(@project.forked_from_project)
@project.forked_project_link.destroy
+ @project.fork_network_member.destroy
end
def refresh_forks_count(project)
diff --git a/app/services/projects/update_service.rb b/app/services/projects/update_service.rb
index cb4ffcab778..13e292a18bf 100644
--- a/app/services/projects/update_service.rb
+++ b/app/services/projects/update_service.rb
@@ -24,7 +24,10 @@ module Projects
success
else
- error('Project could not be updated!')
+ model_errors = project.errors.full_messages.to_sentence
+ error_message = model_errors.presence || 'Project could not be updated!'
+
+ error(error_message)
end
end
diff --git a/app/services/system_note_service.rb b/app/services/system_note_service.rb
index 1f66a2668f9..7b32e215c7f 100644
--- a/app/services/system_note_service.rb
+++ b/app/services/system_note_service.rb
@@ -591,6 +591,13 @@ module SystemNoteService
create_note(NoteSummary.new(noteable, project, author, body, action: 'duplicate'))
end
+ def discussion_lock(issuable, author)
+ action = issuable.discussion_locked? ? 'locked' : 'unlocked'
+ body = "#{action} this issue"
+
+ create_note(NoteSummary.new(issuable, issuable.project, author, body, action: action))
+ end
+
private
def notes_for_mentioner(mentioner, noteable, notes)
diff --git a/app/services/tags/create_service.rb b/app/services/tags/create_service.rb
index b3f4a72d6fe..cc76d0df3a1 100644
--- a/app/services/tags/create_service.rb
+++ b/app/services/tags/create_service.rb
@@ -11,7 +11,7 @@ module Tags
begin
new_tag = repository.add_tag(current_user, tag_name, target, message)
- rescue Rugged::TagError
+ rescue Gitlab::Git::Repository::TagExistsError
return error("Tag #{tag_name} already exists")
rescue Gitlab::Git::HooksService::PreReceiveError => ex
return error(ex.message)
diff --git a/app/services/todo_service.rb b/app/services/todo_service.rb
index 6ee96d6a0f8..b6125cafa83 100644
--- a/app/services/todo_service.rb
+++ b/app/services/todo_service.rb
@@ -43,8 +43,8 @@ class TodoService
#
# * create a pending todo for new assignee if issue is assigned
#
- def reassigned_issue(issue, current_user)
- create_assignment_todo(issue, current_user)
+ def reassigned_issue(issue, current_user, old_assignees = [])
+ create_assignment_todo(issue, current_user, old_assignees)
end
# When create a merge request we should:
@@ -254,10 +254,11 @@ class TodoService
create_mention_todos(project, target, author, note, skip_users)
end
- def create_assignment_todo(issuable, author)
+ def create_assignment_todo(issuable, author, old_assignees = [])
if issuable.assignees.any?
+ assignees = issuable.assignees - old_assignees
attributes = attributes_for_todo(issuable.project, issuable, author, Todo::ASSIGNED)
- create_todos(issuable.assignees, attributes)
+ create_todos(assignees, attributes)
end
end
diff --git a/app/services/users/activity_service.rb b/app/services/users/activity_service.rb
index ab532a1fdcf..5803404c3c8 100644
--- a/app/services/users/activity_service.rb
+++ b/app/services/users/activity_service.rb
@@ -14,7 +14,7 @@ module Users
private
def record_activity
- Gitlab::UserActivities.record(@author.id)
+ Gitlab::UserActivities.record(@author.id) if Gitlab::Database.read_write?
Rails.logger.debug("Recorded activity: #{@activity} for User ID: #{@author.id} (username: #{@author.username})")
end
diff --git a/app/services/users/last_push_event_service.rb b/app/services/users/last_push_event_service.rb
new file mode 100644
index 00000000000..f2bfb60604f
--- /dev/null
+++ b/app/services/users/last_push_event_service.rb
@@ -0,0 +1,83 @@
+module Users
+ # Service class for caching and retrieving the last push event of a user.
+ class LastPushEventService
+ EXPIRATION = 2.hours
+
+ def initialize(user)
+ @user = user
+ end
+
+ # Caches the given push event for the current user in the Rails cache.
+ #
+ # event - An instance of PushEvent to cache.
+ def cache_last_push_event(event)
+ keys = [
+ project_cache_key(event.project),
+ user_cache_key
+ ]
+
+ if event.project.forked?
+ keys << project_cache_key(event.project.forked_from_project)
+ end
+
+ keys.each { |key| set_key(key, event.id) }
+ end
+
+ # Returns the last PushEvent for the current user.
+ #
+ # This method will return nil if no event was found.
+ def last_event_for_user
+ find_cached_event(user_cache_key)
+ end
+
+ # Returns the last PushEvent for the current user and the given project.
+ #
+ # project - An instance of Project for which to retrieve the PushEvent.
+ #
+ # This method will return nil if no event was found.
+ def last_event_for_project(project)
+ find_cached_event(project_cache_key(project))
+ end
+
+ def find_cached_event(cache_key)
+ event_id = get_key(cache_key)
+
+ return unless event_id
+
+ unless (event = find_event_in_database(event_id))
+ # We don't want to keep querying the same data over and over when a
+ # merge request has been created, thus we remove the key if no event
+ # (meaning an MR was created) is returned.
+ Rails.cache.delete(cache_key)
+ end
+
+ event
+ end
+
+ private
+
+ def find_event_in_database(id)
+ PushEvent
+ .without_existing_merge_requests
+ .find_by(id: id)
+ end
+
+ def user_cache_key
+ "last-push-event/#{@user.id}"
+ end
+
+ def project_cache_key(project)
+ "last-push-event/#{@user.id}/#{project.id}"
+ end
+
+ def get_key(key)
+ Rails.cache.read(key, raw: true)
+ end
+
+ def set_key(key, value)
+ # We're using raw values here since this takes up less space and we don't
+ # store complex objects.
+ Rails.cache.write(key, value, raw: true, expires_in: EXPIRATION)
+ end
+ end
+end
diff --git a/app/services/users/update_service.rb b/app/services/users/update_service.rb
index 6188b8a4349..15ca1a55a5b 100644
--- a/app/services/users/update_service.rb
+++ b/app/services/users/update_service.rb
@@ -2,22 +2,21 @@ module Users
class UpdateService < BaseService
include NewUserNotifier
- def initialize(user, params = {})
- @user = user
+ def initialize(current_user, params = {})
+ @current_user = current_user
+ @user = params.delete(:user)
@params = params.dup
end
def execute(validate: true, &block)
yield(@user) if block_given?
- assign_attributes(&block)
-
user_exists = @user.persisted?
- if @user.save(validate: validate)
- notify_new_user(@user, nil) unless user_exists
+ assign_attributes(&block)
- success
+ if @user.save(validate: validate)
+ notify_success(user_exists)
else
error(@user.errors.full_messages.uniq.join('. '))
end
@@ -33,6 +32,12 @@ module Users
private
+ def notify_success(user_exists)
+ notify_new_user(@user, nil) unless user_exists
+
+ success
+ end
+
def assign_attributes(&block)
if @user.user_synced_attributes_metadata
params.except!(*@user.user_synced_attributes_metadata.read_only_attributes)