IndexingScheduler: fetch and import in batches (#24285)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
This commit is contained in:
		
							parent
							
								
									500d6f93be
								
							
						
					
					
						commit
						7646ad8a2b
					
				| @ -6,17 +6,21 @@ class Scheduler::IndexingScheduler | ||||
| 
 | ||||
|   sidekiq_options retry: 0 | ||||
| 
 | ||||
|   IMPORT_BATCH_SIZE = 1000 | ||||
|   SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE | ||||
| 
 | ||||
|   def perform | ||||
|     return unless Chewy.enabled? | ||||
| 
 | ||||
|     indexes.each do |type| | ||||
|       with_redis do |redis| | ||||
|         ids = redis.smembers("chewy:queue:#{type.name}") | ||||
| 
 | ||||
|         type.import!(ids) | ||||
| 
 | ||||
|         redis.pipelined do |pipeline| | ||||
|           ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) } | ||||
|         redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE) do |ids| | ||||
|           redis.pipelined do | ||||
|             ids.each_slice(IMPORT_BATCH_SIZE) do |slice_ids| | ||||
|               type.import!(slice_ids) | ||||
|               redis.srem("chewy:queue:#{type.name}", slice_ids) | ||||
|             end | ||||
|           end | ||||
|         end | ||||
|       end | ||||
|     end | ||||
|  | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user