class Sidekiq::CLI

def run(boot_app: true, warmup: true)

test coverage of Sidekiq::CLI are welcomed.
global process state irreversibly. PRs which improve the
Code within this method is not tested because it alters
def run(boot_app: true, warmup: true)
  boot_application if boot_app
  if environment == "development" && $stdout.tty? && @config.logger.formatter.is_a?(Sidekiq::Logger::Formatters::Pretty)
    print_banner
  end
  logger.info "Booted Rails #{::Rails.version} application in #{environment} environment" if rails_app?
  self_read, self_write = IO.pipe
  sigs = %w[INT TERM TTIN TSTP]
  # USR1 and USR2 don't work on the JVM
  sigs << "USR2" if Sidekiq.pro? && !jruby?
  sigs.each do |sig|
    old_handler = Signal.trap(sig) do
      if old_handler.respond_to?(:call)
        begin
          old_handler.call
        rescue Exception => exc
          # signal handlers can't use Logger so puts only
          puts ["Error in #{sig} handler", exc].inspect
        end
      end
      self_write.puts(sig)
    end
  rescue ArgumentError
    puts "Signal #{sig} not supported"
  end
  logger.info "Running in #{RUBY_DESCRIPTION}"
  logger.info Sidekiq::LICENSE
  logger.info "Upgrade to Sidekiq Pro for more features and support: https://sidekiq.org" unless defined?(::Sidekiq::Pro)
  # touch the connection pool so it is created before we
  # fire startup and start multithreading.
  info = @config.redis_info
  ver = Gem::Version.new(info["redis_version"])
  raise "You are connected to Redis #{ver}, Sidekiq requires Redis 7.0.0 or greater" if ver < Gem::Version.new("7.0.0")
  maxmemory_policy = info["maxmemory_policy"]
  if maxmemory_policy != "noeviction" && maxmemory_policy != ""
    # Redis Enterprise Cloud returns "" for their policy 😳
    logger.warn <<~EOM
      WARNING: Your Redis instance will evict Sidekiq data under heavy load.
      The 'noeviction' maxmemory policy is recommended (current policy: '#{maxmemory_policy}').
      See: https://github.com/sidekiq/sidekiq/wiki/Using-Redis#memory
    EOM
  end
  # Since the user can pass us a connection pool explicitly in the initializer, we
  # need to verify the size is large enough or else Sidekiq's performance is dramatically slowed.
  @config.capsules.each_pair do |name, cap|
    raise ArgumentError, "Pool size too small for #{name}" if cap.redis_pool.size < cap.concurrency
  end
  # cache process identity
  @config[:identity] = identity
  # Touch middleware so it isn't lazy loaded by multiple threads, #3043
  @config.server_middleware
  ::Process.warmup if warmup && ::Process.respond_to?(:warmup) && ENV["RUBY_DISABLE_WARMUP"] != "1"
  # Before this point, the process is initializing with just the main thread.
  # Starting here the process will now have multiple threads running.
  fire_event(:startup, reverse: false, reraise: true)
  logger.debug { "Client Middleware: #{@config.default_capsule.client_middleware.map(&:klass).join(", ")}" }
  logger.debug { "Server Middleware: #{@config.default_capsule.server_middleware.map(&:klass).join(", ")}" }
  launch(self_read)
end