Skip to content
Snippets Groups Projects
Commit 1e6ca3c4 authored by Douwe Maan's avatar Douwe Maan
Browse files

Consistently schedule Sidekiq jobs

parent a5c3f1c8
No related branches found
No related tags found
No related merge requests found
Showing
with 76 additions and 68 deletions
Loading
Loading
@@ -2,6 +2,7 @@ require 'carrierwave/orm/activerecord'
 
class Group < Namespace
include Gitlab::ConfigHelper
include AfterCommitQueue
include AccessRequestable
include Avatarable
include Referable
Loading
Loading
Loading
Loading
@@ -2,6 +2,7 @@ require 'digest/md5'
 
class Key < ActiveRecord::Base
include Gitlab::CurrentSettings
include AfterCommitQueue
include Sortable
 
belongs_to :user
Loading
Loading
class Member < ActiveRecord::Base
include AfterCommitQueue
include Sortable
include Importable
include Expirable
Loading
Loading
Loading
Loading
@@ -211,7 +211,7 @@ class Service < ActiveRecord::Base
def async_execute(data)
return unless supported_events.include?(data[:object_kind])
 
Sidekiq::Client.enqueue(ProjectServiceWorker, id, data)
ProjectServiceWorker.perform_async(id, data)
end
 
def issue_tracker?
Loading
Loading
Loading
Loading
@@ -7,6 +7,7 @@ class User < ActiveRecord::Base
include Gitlab::ConfigHelper
include Gitlab::CurrentSettings
include Gitlab::SQL::Pattern
include AfterCommitQueue
include Avatarable
include Referable
include Sortable
Loading
Loading
@@ -903,6 +904,7 @@ class User < ActiveRecord::Base
 
def post_destroy_hook
log_info("User \"#{name}\" (#{email}) was removed")
system_hook_service.execute_hooks_for(self, :destroy)
end
 
Loading
Loading
class SystemHooksService
def execute_hooks_for(model, event)
execute_hooks(build_event_data(model, event))
data = build_event_data(model, event)
model.run_after_commit_or_now do
SystemHooksService.new.execute_hooks(data)
end
end
 
def execute_hooks(data, hooks_scope = :all)
Loading
Loading
Loading
Loading
@@ -63,7 +63,7 @@ class WebHookService
end
 
def async_execute
Sidekiq::Client.enqueue(WebHookWorker, hook.id, data, hook_name)
WebHookWorker.perform_async(hook.id, data, hook_name)
end
 
private
Loading
Loading
Loading
Loading
@@ -16,11 +16,6 @@ class AuthorizedProjectsWorker
waiter.wait
end
 
# Schedules multiple jobs to run in sidekiq without waiting for completion
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
# Performs multiple jobs directly. Failed jobs will be put into sidekiq so
# they can benefit from retries
def self.bulk_perform_inline(args_list)
Loading
Loading
class BackgroundMigrationWorker
include ApplicationWorker
 
# Enqueues a number of jobs in bulk.
#
# The `jobs` argument should be an Array of Arrays, each sub-array must be in
# the form:
#
# [migration-class, [arg1, arg2, ...]]
def self.perform_bulk(jobs)
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs)
end
# Schedules multiple jobs in bulk, with a delay.
#
def self.perform_bulk_in(delay, jobs)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self,
'queue' => sidekiq_options['queue'],
'args' => jobs,
'at' => schedule)
end
# Performs the background migration.
#
# See Gitlab::BackgroundMigration.perform for more information.
Loading
Loading
Loading
Loading
@@ -21,5 +21,20 @@ module ApplicationWorker
def queue
get_sidekiq_options['queue'].to_s
end
def bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list)
end
def bulk_perform_in(delay, args_list)
now = Time.now.to_i
schedule = now + delay.to_i
if schedule <= now
raise ArgumentError, 'The schedule time must be in the future!'
end
Sidekiq::Client.push_bulk('class' => self, 'args' => args_list, 'at' => schedule)
end
end
end
Loading
Loading
@@ -8,6 +8,6 @@ class ExpireBuildArtifactsWorker
build_ids = Ci::Build.with_expired_artifacts.pluck(:id)
build_ids = build_ids.map { |build_id| [build_id] }
 
Sidekiq::Client.push_bulk('class' => ExpireBuildInstanceArtifactsWorker, 'args' => build_ids )
ExpireBuildInstanceArtifactsWorker.bulk_perform_async(build_ids)
end
end
Loading
Loading
@@ -8,10 +8,6 @@ class NamespacelessProjectDestroyWorker
include ApplicationWorker
include ExceptionBacktrace
 
def self.bulk_perform_async(args_list)
Sidekiq::Client.push_bulk('class' => self, 'queue' => sidekiq_options['queue'], 'args' => args_list)
end
def perform(project_id)
begin
project = Project.unscoped.find(project_id)
Loading
Loading
Loading
Loading
@@ -13,20 +13,19 @@ module Sidekiq
 
module ClassMethods
module NoSchedulingFromTransactions
NESTING = ::Rails.env.test? ? 1 : 0
%i(perform_async perform_at perform_in).each do |name|
define_method(name) do |*args|
return super(*args) if Sidekiq::Worker.skip_transaction_check
return super(*args) unless ActiveRecord::Base.connection.open_transactions > NESTING
if !Sidekiq::Worker.skip_transaction_check && AfterCommitQueue.inside_transaction?
raise <<-MSG.strip_heredoc
`#{self}.#{name}` cannot be called inside a transaction as this can lead to
race conditions when the worker runs before the transaction is committed and
tries to access a model that has not been saved yet.
 
raise <<-MSG.strip_heredoc
`#{self}.#{name}` cannot be called inside a transaction as this can lead to
race conditions when the worker runs before the transaction is committed and
tries to access a model that has not been saved yet.
Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead.
MSG
end
 
Use an `after_commit` hook, or include `AfterCommitQueue` and use a `run_after_commit` block instead.
MSG
super(*args)
end
end
end
Loading
Loading
Loading
Loading
@@ -25,14 +25,14 @@ class ScheduleEventMigrations < ActiveRecord::Migration
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear
end
 
jobs << ['MigrateEventsToPushEventPayloads', [min, max]]
end
 
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end
 
def down
Loading
Loading
Loading
Loading
@@ -19,7 +19,7 @@ class ScheduleCreateGpgKeySubkeysFromGpgKeys < ActiveRecord::Migration
[MIGRATION, [id]]
end
 
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
end
end
 
Loading
Loading
Loading
Loading
@@ -68,10 +68,10 @@ BackgroundMigrationWorker.perform_async('BackgroundMigrationClassName', [arg1, a
```
 
Usually it's better to enqueue jobs in bulk, for this you can use
`BackgroundMigrationWorker.perform_bulk`:
`BackgroundMigrationWorker.bulk_perform_async`:
 
```ruby
BackgroundMigrationWorker.perform_bulk(
BackgroundMigrationWorker.bulk_perform_async(
[['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
)
Loading
Loading
@@ -85,13 +85,13 @@ updates. Removals in turn can be handled by simply defining foreign keys with
cascading deletes.
 
If you would like to schedule jobs in bulk with a delay, you can use
`BackgroundMigrationWorker.perform_bulk_in`:
`BackgroundMigrationWorker.bulk_perform_in`:
 
```ruby
jobs = [['BackgroundMigrationClassName', [1]],
['BackgroundMigrationClassName', [2]]]
 
BackgroundMigrationWorker.perform_bulk_in(5.minutes, jobs)
BackgroundMigrationWorker.bulk_perform_in(5.minutes, jobs)
```
 
## Cleaning Up
Loading
Loading
@@ -201,7 +201,7 @@ class ScheduleExtractServicesUrl < ActiveRecord::Migration
['ExtractServicesUrl', [id]]
end
 
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
end
end
 
Loading
Loading
Loading
Loading
@@ -6,12 +6,34 @@ module AfterCommitQueue
after_rollback :_clear_after_commit_queue
end
 
def run_after_commit(method = nil, &block)
_after_commit_queue << proc { self.send(method) } if method # rubocop:disable GitlabSecurity/PublicSend
def run_after_commit(&block)
_after_commit_queue << block if block
true
end
def run_after_commit_or_now(&block)
if AfterCommitQueue.inside_transaction?
run_after_commit(&block)
else
instance_eval(&block)
end
true
end
 
def self.open_transactions_baseline
if ::Rails.env.test?
return DatabaseCleaner.connections.count { |conn| conn.strategy.is_a?(DatabaseCleaner::ActiveRecord::Transaction) }
end
0
end
def self.inside_transaction?
ActiveRecord::Base.connection.open_transactions > open_transactions_baseline
end
protected
 
def _run_after_commit_queue
Loading
Loading
Loading
Loading
@@ -703,14 +703,14 @@ into similar problems in the future (e.g. when new tables are created).
# We push multiple jobs at a time to reduce the time spent in
# Sidekiq/Redis operations. We're using this buffer based approach so we
# don't need to run additional queries for every range.
BackgroundMigrationWorker.perform_bulk(jobs)
BackgroundMigrationWorker.bulk_perform_async(jobs)
jobs.clear
end
 
jobs << [job_class_name, [start_id, end_id]]
end
 
BackgroundMigrationWorker.perform_bulk(jobs) unless jobs.empty?
BackgroundMigrationWorker.bulk_perform_async(jobs) unless jobs.empty?
end
 
# Queues background migration jobs for an entire table, batched by ID range.
Loading
Loading
Loading
Loading
@@ -942,8 +942,8 @@ describe Gitlab::Database::MigrationHelpers do
end
 
it 'queues jobs in groups of buffer size 1' do
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]]])
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id3, id3]]])
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]]])
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id3, id3]]])
 
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
end
Loading
Loading
@@ -960,8 +960,8 @@ describe Gitlab::Database::MigrationHelpers do
end
 
it 'queues jobs in bulk all at once (big buffer size)' do
expect(BackgroundMigrationWorker).to receive(:perform_bulk).with([['FooJob', [id1, id2]],
['FooJob', [id3, id3]]])
expect(BackgroundMigrationWorker).to receive(:bulk_perform_async).with([['FooJob', [id1, id2]],
['FooJob', [id3, id3]]])
 
model.bulk_queue_background_migration_jobs_by_range(User, 'FooJob', batch_size: 2)
end
Loading
Loading
Loading
Loading
@@ -146,7 +146,7 @@ describe WebHookService do
let(:system_hook) { create(:system_hook) }
 
it 'enqueue WebHookWorker' do
expect(Sidekiq::Client).to receive(:enqueue).with(WebHookWorker, project_hook.id, data, 'push_hooks')
expect(WebHookWorker).to receive(:perform_async).with(project_hook.id, data, 'push_hooks')
 
described_class.new(project_hook, data, 'push_hooks').async_execute
end
Loading
Loading
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment