summaryrefslogtreecommitdiff
path: root/app/workers
diff options
context:
space:
mode:
authorKamil Trzciński <ayufan@ayufan.eu>2018-02-28 21:28:43 +0100
committerKamil Trzciński <ayufan@ayufan.eu>2018-02-28 21:28:43 +0100
commita2f375e8f74870dcdcfa1c7886bd1c14c80a684e (patch)
tree6b6e3a4f7554f4671edc17d87869dd6916984404 /app/workers
parenta22f6fa6e50bb31921415b01fd345d6802581390 (diff)
parent81852d1f902c2923c239e9c33cab77f5fd6ca8d8 (diff)
downloadgitlab-ce-a2f375e8f74870dcdcfa1c7886bd1c14c80a684e.tar.gz
Merge remote-tracking branch 'origin/master' into object-storage-ee-to-ce-backportobject-storage-ee-to-ce-backport
Diffstat (limited to 'app/workers')
-rw-r--r--app/workers/all_queues.yml2
-rw-r--r--app/workers/authorized_projects_worker.rb36
-rw-r--r--app/workers/check_gcp_project_billing_worker.rb49
-rw-r--r--app/workers/concerns/waitable_worker.rb44
-rw-r--r--app/workers/pages_domain_verification_cron_worker.rb10
-rw-r--r--app/workers/pages_domain_verification_worker.rb11
-rw-r--r--app/workers/process_commit_worker.rb12
-rw-r--r--app/workers/stuck_import_jobs_worker.rb40
8 files changed, 134 insertions, 70 deletions
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index f2c20114534..28a5e5da037 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -3,6 +3,7 @@
- cronjob:expire_build_artifacts
- cronjob:gitlab_usage_ping
- cronjob:import_export_project_cleanup
+- cronjob:pages_domain_verification_cron
- cronjob:pipeline_schedule
- cronjob:prune_old_events
- cronjob:remove_expired_group_links
@@ -82,6 +83,7 @@
- new_merge_request
- new_note
- pages
+- pages_domain_verification
- post_receive
- process_commit
- project_cache
diff --git a/app/workers/authorized_projects_worker.rb b/app/workers/authorized_projects_worker.rb
index 09559e3b696..d7e24491516 100644
--- a/app/workers/authorized_projects_worker.rb
+++ b/app/workers/authorized_projects_worker.rb
@@ -1,42 +1,10 @@
class AuthorizedProjectsWorker
include ApplicationWorker
+ prepend WaitableWorker
- # Schedules multiple jobs and waits for them to be completed.
- def self.bulk_perform_and_wait(args_list)
- # Short-circuit: it's more efficient to do small numbers of jobs inline
- return bulk_perform_inline(args_list) if args_list.size <= 3
-
- waiter = Gitlab::JobWaiter.new(args_list.size)
-
- # Point all the bulk jobs at the same JobWaiter. Converts, [[1], [2], [3]]
- # into [[1, "key"], [2, "key"], [3, "key"]]
- waiting_args_list = args_list.map { |args| [*args, waiter.key] }
- bulk_perform_async(waiting_args_list)
-
- waiter.wait
- end
-
- # Performs multiple jobs directly. Failed jobs will be put into sidekiq so
- # they can benefit from retries
- def self.bulk_perform_inline(args_list)
- failed = []
-
- args_list.each do |args|
- begin
- new.perform(*args)
- rescue
- failed << args
- end
- end
-
- bulk_perform_async(failed) if failed.present?
- end
-
- def perform(user_id, notify_key = nil)
+ def perform(user_id)
user = User.find_by(id: user_id)
user&.refresh_authorized_projects
- ensure
- Gitlab::JobWaiter.notify(notify_key, jid) if notify_key
end
end
diff --git a/app/workers/check_gcp_project_billing_worker.rb b/app/workers/check_gcp_project_billing_worker.rb
index 5466ccdda59..363f81590ab 100644
--- a/app/workers/check_gcp_project_billing_worker.rb
+++ b/app/workers/check_gcp_project_billing_worker.rb
@@ -7,6 +7,7 @@ class CheckGcpProjectBillingWorker
LEASE_TIMEOUT = 3.seconds.to_i
SESSION_KEY_TIMEOUT = 5.minutes
BILLING_TIMEOUT = 1.hour
+ BILLING_CHANGED_LABELS = { state_transition: nil }.freeze
def self.get_session_token(token_key)
Gitlab::Redis::SharedState.with do |redis|
@@ -22,8 +23,11 @@ class CheckGcpProjectBillingWorker
end
end
- def self.redis_shared_state_key_for(token)
- "gitlab:gcp:#{Digest::SHA1.hexdigest(token)}:billing_enabled"
+ def self.get_billing_state(token)
+ Gitlab::Redis::SharedState.with do |redis|
+ value = redis.get(redis_shared_state_key_for(token))
+ ActiveRecord::Type::Boolean.new.type_cast_from_user(value)
+ end
end
def perform(token_key)
@@ -33,12 +37,9 @@ class CheckGcpProjectBillingWorker
return unless token
return unless try_obtain_lease_for(token)
- billing_enabled_projects = CheckGcpProjectBillingService.new.execute(token)
- Gitlab::Redis::SharedState.with do |redis|
- redis.set(self.class.redis_shared_state_key_for(token),
- !billing_enabled_projects.empty?,
- ex: BILLING_TIMEOUT)
- end
+ billing_enabled_state = !CheckGcpProjectBillingService.new.execute(token).empty?
+ update_billing_change_counter(self.class.get_billing_state(token), billing_enabled_state)
+ self.class.set_billing_state(token, billing_enabled_state)
end
private
@@ -51,9 +52,41 @@ class CheckGcpProjectBillingWorker
"gitlab:gcp:session:#{token_key}"
end
+ def self.redis_shared_state_key_for(token)
+ "gitlab:gcp:#{Digest::SHA1.hexdigest(token)}:billing_enabled"
+ end
+
+ def self.set_billing_state(token, value)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.set(redis_shared_state_key_for(token), value, ex: BILLING_TIMEOUT)
+ end
+ end
+
def try_obtain_lease_for(token)
Gitlab::ExclusiveLease
.new("check_gcp_project_billing_worker:#{token.hash}", timeout: LEASE_TIMEOUT)
.try_obtain
end
+
+ def billing_changed_counter
+ @billing_changed_counter ||= Gitlab::Metrics.counter(
+ :gcp_billing_change_count,
+ "Counts the number of times a GCP project changed billing_enabled state from false to true",
+ BILLING_CHANGED_LABELS
+ )
+ end
+
+ def state_transition(previous_state, current_state)
+ if previous_state.nil? && !current_state
+ 'no_billing'
+ elsif previous_state.nil? && current_state
+ 'with_billing'
+ elsif !previous_state && current_state
+ 'billing_configured'
+ end
+ end
+
+ def update_billing_change_counter(previous_state, current_state)
+ billing_changed_counter.increment(state_transition: state_transition(previous_state, current_state))
+ end
end
diff --git a/app/workers/concerns/waitable_worker.rb b/app/workers/concerns/waitable_worker.rb
new file mode 100644
index 00000000000..48ebe862248
--- /dev/null
+++ b/app/workers/concerns/waitable_worker.rb
@@ -0,0 +1,44 @@
+module WaitableWorker
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Schedules multiple jobs and waits for them to be completed.
+ def bulk_perform_and_wait(args_list, timeout: 10)
+ # Short-circuit: it's more efficient to do small numbers of jobs inline
+ return bulk_perform_inline(args_list) if args_list.size <= 3
+
+ waiter = Gitlab::JobWaiter.new(args_list.size)
+
+ # Point all the bulk jobs at the same JobWaiter. Converts, [[1], [2], [3]]
+ # into [[1, "key"], [2, "key"], [3, "key"]]
+ waiting_args_list = args_list.map { |args| [*args, waiter.key] }
+ bulk_perform_async(waiting_args_list)
+
+ waiter.wait(timeout)
+ end
+
+ # Performs multiple jobs directly. Failed jobs will be put into sidekiq so
+ # they can benefit from retries
+ def bulk_perform_inline(args_list)
+ failed = []
+
+ args_list.each do |args|
+ begin
+ new.perform(*args)
+ rescue
+ failed << args
+ end
+ end
+
+ bulk_perform_async(failed) if failed.present?
+ end
+ end
+
+ def perform(*args)
+ notify_key = args.pop if Gitlab::JobWaiter.key?(args.last)
+
+ super(*args)
+ ensure
+ Gitlab::JobWaiter.notify(notify_key, jid) if notify_key
+ end
+end
diff --git a/app/workers/pages_domain_verification_cron_worker.rb b/app/workers/pages_domain_verification_cron_worker.rb
new file mode 100644
index 00000000000..a3ff4bd2101
--- /dev/null
+++ b/app/workers/pages_domain_verification_cron_worker.rb
@@ -0,0 +1,10 @@
+class PagesDomainVerificationCronWorker
+ include ApplicationWorker
+ include CronjobQueue
+
+ def perform
+ PagesDomain.needs_verification.find_each do |domain|
+ PagesDomainVerificationWorker.perform_async(domain.id)
+ end
+ end
+end
diff --git a/app/workers/pages_domain_verification_worker.rb b/app/workers/pages_domain_verification_worker.rb
new file mode 100644
index 00000000000..2e93489113c
--- /dev/null
+++ b/app/workers/pages_domain_verification_worker.rb
@@ -0,0 +1,11 @@
+class PagesDomainVerificationWorker
+ include ApplicationWorker
+
+ def perform(domain_id)
+ domain = PagesDomain.find_by(id: domain_id)
+
+ return unless domain
+
+ VerifyPagesDomainService.new(domain).execute
+ end
+end
diff --git a/app/workers/process_commit_worker.rb b/app/workers/process_commit_worker.rb
index 52eebe475ec..5b25d980bdb 100644
--- a/app/workers/process_commit_worker.rb
+++ b/app/workers/process_commit_worker.rb
@@ -23,27 +23,25 @@ class ProcessCommitWorker
return unless user
commit = build_commit(project, commit_hash)
-
author = commit.author || user
process_commit_message(project, commit, user, author, default)
-
update_issue_metrics(commit, author)
end
def process_commit_message(project, commit, user, author, default = false)
- closed_issues = default ? commit.closes_issues(user) : []
+ # this is a GitLab generated commit message, ignore it.
+ return if commit.merged_merge_request?(user)
- unless closed_issues.empty?
- close_issues(project, user, author, commit, closed_issues)
- end
+ closed_issues = default ? commit.closes_issues(user) : []
+ close_issues(project, user, author, commit, closed_issues) if closed_issues.any?
commit.create_cross_references!(author, closed_issues)
end
def close_issues(project, user, author, commit, issues)
# We don't want to run permission related queries for every single issue,
- # therefor we use IssueCollection here and skip the authorization check in
+ # therefore we use IssueCollection here and skip the authorization check in
# Issues::CloseService#execute.
IssueCollection.new(issues).updatable_by_user(user).each do |issue|
Issues::CloseService.new(project, author)
diff --git a/app/workers/stuck_import_jobs_worker.rb b/app/workers/stuck_import_jobs_worker.rb
index e0e6d1418de..fbb14efc525 100644
--- a/app/workers/stuck_import_jobs_worker.rb
+++ b/app/workers/stuck_import_jobs_worker.rb
@@ -16,43 +16,41 @@ class StuckImportJobsWorker
private
def mark_projects_without_jid_as_failed!
- started_projects_without_jid.each do |project|
+ enqueued_projects_without_jid.each do |project|
project.mark_import_as_failed(error_message)
end.count
end
def mark_projects_with_jid_as_failed!
- completed_jids_count = 0
+ jids_and_ids = enqueued_projects_with_jid.pluck(:import_jid, :id).to_h
- started_projects_with_jid.find_in_batches(batch_size: 500) do |group|
- jids = group.map(&:import_jid)
+ # Find the jobs that aren't currently running or that exceeded the threshold.
+ completed_jids = Gitlab::SidekiqStatus.completed_jids(jids_and_ids.keys)
+ return unless completed_jids.any?
- # Find the jobs that aren't currently running or that exceeded the threshold.
- completed_jids = Gitlab::SidekiqStatus.completed_jids(jids).to_set
+ completed_project_ids = jids_and_ids.values_at(*completed_jids)
- if completed_jids.any?
- completed_jids_count += completed_jids.count
- group.each do |project|
- project.mark_import_as_failed(error_message) if completed_jids.include?(project.import_jid)
- end
+ # We select the projects again, because they may have transitioned from
+ # scheduled/started to finished/failed while we were looking up their Sidekiq status.
+ completed_projects = enqueued_projects_with_jid.where(id: completed_project_ids)
- Rails.logger.info("Marked stuck import jobs as failed. JIDs: #{completed_jids.to_a.join(', ')}")
- end
- end
+ Rails.logger.info("Marked stuck import jobs as failed. JIDs: #{completed_projects.map(&:import_jid).join(', ')}")
- completed_jids_count
+ completed_projects.each do |project|
+ project.mark_import_as_failed(error_message)
+ end.count
end
- def started_projects
- Project.with_import_status(:started)
+ def enqueued_projects
+ Project.with_import_status(:scheduled, :started)
end
- def started_projects_with_jid
- started_projects.where.not(import_jid: nil)
+ def enqueued_projects_with_jid
+ enqueued_projects.where.not(import_jid: nil)
end
- def started_projects_without_jid
- started_projects.where(import_jid: nil)
+ def enqueued_projects_without_jid
+ enqueued_projects.where(import_jid: nil)
end
def error_message