class Sidekiq::ProcessSet

def each(&block)

def each(&block)
  procs = Sidekiq.redis { |conn| conn.smembers('processes') }
  to_prune = []
  sorted = procs.sort
  Sidekiq.redis do |conn|
    # We're making a tradeoff here between consuming more memory instead of
    # making more roundtrips to Redis, but if you have hundreds or thousands of workers,
    # you'll be happier this way
    result = conn.pipelined do
      sorted.each do |key|
        conn.hmget(key, 'info', 'busy', 'beat')
      end
    end
    result.each_with_index do |(info, busy, at_s), i|
      # the hash named key has an expiry of 60 seconds.
      # if it's not found, that means the process has not reported
      # in to Redis and probably died.
      (to_prune << sorted[i]; next) if info.nil?
      hash = Sidekiq.load_json(info)
      yield Process.new(hash.merge('busy' => busy.to_i, 'beat' => at_s.to_f))
    end
  end
  Sidekiq.redis {|conn| conn.srem('processes', to_prune) } unless to_prune.empty?
  nil
end

def size

60 seconds.
contains Sidekiq processes which have sent a heartbeat within the last
based on current heartbeat. #each does that and ensures the set only
This method is not guaranteed accurate since it does not prune the set
def size
  Sidekiq.redis { |conn| conn.scard('processes') }
end