IndexingScheduler: fetch and import in batches (#24285)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
This commit is contained in:
parent
eb8a04db9a
commit
37f5435edd
@ -3,22 +3,20 @@
|
|||||||
class Scheduler::IndexingScheduler
|
class Scheduler::IndexingScheduler
|
||||||
include Sidekiq::Worker
|
include Sidekiq::Worker
|
||||||
include Redisable
|
include Redisable
|
||||||
include DatabaseHelper
|
|
||||||
|
|
||||||
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
|
sidekiq_options retry: 0
|
||||||
|
|
||||||
IMPORT_BATCH_SIZE = 1000
|
|
||||||
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
|
|
||||||
|
|
||||||
def perform
|
def perform
|
||||||
return unless Chewy.enabled?
|
return unless Chewy.enabled?
|
||||||
|
|
||||||
indexes.each do |type|
|
indexes.each do |type|
|
||||||
with_redis do |redis|
|
with_redis do |redis|
|
||||||
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
|
ids = redis.smembers("chewy:queue:#{type.name}")
|
||||||
type.import!(ids)
|
|
||||||
|
|
||||||
redis.srem("chewy:queue:#{type.name}", ids)
|
type.import!(ids)
|
||||||
|
|
||||||
|
redis.pipelined do |pipeline|
|
||||||
|
ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -27,6 +25,6 @@ class Scheduler::IndexingScheduler
|
|||||||
private
|
private
|
||||||
|
|
||||||
def indexes
|
def indexes
|
||||||
[AccountsIndex, TagsIndex, PublicStatusesIndex, StatusesIndex]
|
[AccountsIndex, TagsIndex, StatusesIndex]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
Loading…
x
Reference in New Issue
Block a user