class Async::HTTP::Protocol::HTTP1::Server
def each(task: Task.current)
def each(task: Task.current) task.annotate("Reading #{self.version} requests for #{self.class}.") while request = next_request response = yield(request, self) return if @stream.nil? or @stream.closed? if response trailers = response.headers.trailers! write_response(@version, response.status, response.headers) body = response.body if body and protocol = response.protocol stream = write_upgrade_body(protocol) # At this point, the request body is hijacked, so we don't want to call #finish below. request = nil # We also don't want to hold on to the response object: response = nil body.call(stream) elsif request.connect? and response.success? stream = write_tunnel_body(request.version) # Same as above: request = nil response = nil body.call(stream) else head = request.head? version = request.version request = nil unless body response = nil write_body(version, body, head, trailers) end else # If the request failed to generate a response, it was an internal server error: write_response(@version, 500, {}) write_body(request.version, nil) end # Gracefully finish reading the request body if it was not already done so. request&.finish # This ensures we yield at least once every iteration of the loop and allow other fibers to execute. task.yield end end
def fail_request(status)
def fail_request(status) @persistent = false write_response(@version, status, {}, nil) end
def next_request
def next_request # The default is true. return unless @persistent # Read an incoming request: return unless request = Request.read(self) unless persistent?(request.version, request.method, request.headers) @persistent = false end return request rescue Async::TimeoutError # For an interesting discussion about this behaviour, see https://trac.nginx.org/nginx/ticket/1005 # If you enable this, you will see some spec failures... # fail_request(408) raise rescue fail_request(400) raise end