IndexingScheduler: fetch and import in batches (#24285)

Co-authored-by: Claire <claire.github-309c@sitedethib.com>
This commit is contained in:
Vyr Cossont 2023-03-31 05:38:47 -07:00 committed by Tarrien
parent eb8a04db9a
commit 37f5435edd

View File

@ -3,22 +3,20 @@
class Scheduler::IndexingScheduler
include Sidekiq::Worker
include Redisable
include DatabaseHelper
sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 30.minutes.to_i
IMPORT_BATCH_SIZE = 1000
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
sidekiq_options retry: 0
def perform
return unless Chewy.enabled?
indexes.each do |type|
with_redis do |redis|
redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
ids = redis.smembers("chewy:queue:#{type.name}")
type.import!(ids)
redis.srem("chewy:queue:#{type.name}", ids)
redis.pipelined do |pipeline|
ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) }
end
end
end
@ -27,6 +25,6 @@ class Scheduler::IndexingScheduler
private
def indexes
[AccountsIndex, TagsIndex, PublicStatusesIndex, StatusesIndex]
[AccountsIndex, TagsIndex, StatusesIndex]
end
end