class Async::HTTP::Protocol::HTTP2

A server that supports both HTTP1.0 and HTTP1.1 semantics by detecting the version of the request.

def self.client(stream)

def self.client(stream)
	self.new(::HTTP2::Client.new, stream)
end

def self.server(stream)

def self.server(stream)
	self.new(::HTTP2::Server.new, stream)
end

def call(request)

Used by the client to send requests to the remote server.
def call(request)
	@count += 1
	
	stream = @controller.new_stream
	response = Response.new(self, stream)
	body = response.body
	
	exception = nil
	finished = Async::Notification.new
	waiting = true
	
	stream.on(:close) do |error|
		if waiting
			if error
				# If the stream was closed due to an error, we will raise it rather than returning normally.
				exception = EOFError.new(error)
			end
			
			waiting = false
			finished.signal
		else
			# At this point, we are now expecting two events: data and close.
			# If we receive close after this point, it's not a request error, but a failure we need to signal to the body.
			if error
				body.stop(EOFError.new(error))
			else
				body.finish
			end
		end
	end
	
	stream.on(:headers) do |headers|
		response.assign_headers(headers)
		
		# Once we receive the headers, we can return. The body will be read in the background.
		waiting = false
		finished.signal
	end
	
	# This is a little bit tricky due to the event handlers.
	# 1/ Caller invokes `response.stop` which causes `body.write` below to fail.
	# 2/ We invoke `stream.close(:internal_error)` which eventually triggers `on(:close)` above.
	# 3/ Error is set to :internal_error which causes us to call `body.stop` a 2nd time.
	# So, we guard against that, by ensuring that `Writable#stop` only stores the first exception assigned to it.
	stream.on(:data) do |chunk|
		begin
			# If the body is stopped, write will fail...
			body.write(chunk.to_s) unless chunk.empty?
		rescue
			# ... so, we close the stream:
			stream.close(:internal_error)
		end
	end
	
	write_request(request, stream)
	
	Async.logger.debug(self) {"Request sent, waiting for signal."}
	finished.wait
	
	if exception
		raise exception
	end
	
	Async.logger.debug(self) {"Stream finished: #{response.inspect}"}
	return response
end

def close

def close
	Async.logger.debug(self) {"Closing connection"}
	
	@reader.stop if @reader
	@stream.close
end

def generate_response(request, stream, &block)

Generate a response to the request. If this fails, the stream is terminated and the error is reported.
def generate_response(request, stream, &block)
ed to close the stream if the user code blows up while generating a response:
e = begin
request, stream)
.close(:internal_error)
onse
s = Headers::Merged.new({
S => response.status,
ponse.headers)
ponse.body.nil? or response.body.empty?
m.headers(headers, end_stream: true)
nse.body.read if response.body
m.headers(headers, end_stream: false)
nse.body.each do |chunk|
am.data(chunk, end_stream: false)
m.data("", end_stream: true)
.close(:internal_error) unless stream.state == :closed
ogger.error(request) {$!}

def good?

Can we use this connection to make requests?
def good?
	@stream.connected?
end

def initialize(controller, stream)

def initialize(controller, stream)
	@controller = controller
	@stream = stream
	
	@controller.on(:frame) do |data|
		@stream.write(data)
		@stream.flush
	end
	
	@controller.on(:frame_sent) do |frame|
		Async.logger.debug(self) {"Sent frame: #{frame.inspect}"}
	end
	
	@controller.on(:frame_received) do |frame|
		Async.logger.debug(self) {"Received frame: #{frame.inspect}"}
	end
	
	@goaway = false
	
	@controller.on(:goaway) do |payload|
		Async.logger.error(self) {"goaway: #{payload.inspect}"}
		
		@goaway = true
	end
	
	@count = 0
end

def multiplex

Multiple requests can be processed at the same time.
def multiplex
	@controller.remote_settings[:settings_max_concurrent_streams]
end

def peer

def peer
	@stream.io
end

def read_in_background(task: Task.current)

def read_in_background(task: Task.current)
	task.async do |nested_task|
		nested_task.annotate("#{version} reading data")
		
		while buffer = @stream.read_partial
			@controller << buffer
		end
		
		Async.logger.debug(self) {"Connection reset by peer!"}
	end
end

def receive_requests(task: Task.current, &block)

def receive_requests(task: Task.current, &block)
	# emits new streams opened by the client
	@controller.on(:stream) do |stream|
		@count += 1
		
		request = Request.new(self, stream)
		body = request.body
		
		stream.on(:headers) do |headers|
			begin
				request.assign_headers(headers)
			rescue
				Async.logger.error(self) {$!}
				
				stream.headers({
					STATUS => "400"
				}, end_stream: true)
			else
				task.async do
					generate_response(request, stream, &block)
				end
			end
		end
		
		stream.on(:data) do |chunk|
			body.write(chunk.to_s) unless chunk.empty?
		end
		
		stream.on(:half_close) do
			# We are no longer receiving any more data frames:
			body.finish
		end
		
		stream.on(:close) do |error|
			if error
				body.stop(EOFError.new(error))
			else
				# In theory, we should have received half_close, so there is no need to:
				# body.finish
			end
		end
	end
	
	start_connection
	@reader.wait
end

def reusable?

def reusable?
	!@goaway || !@stream.closed?
end

def start_connection

def start_connection
	@reader ||= read_in_background
end

def version

def version
	VERSION
end

def write_request(request, stream)

def write_request(request, stream)
 = Headers::Merged.new({
 => HTTPS,
 => request.method,
> request.path,
ITY => request.authority,
est.headers)
est.body.nil? or request.body.empty?
.headers(headers, end_stream: true)
t.body.read if request.body
m.headers(headers)

 RequestFailed.new
t.body.each do |chunk|
m.data(chunk, end_stream: false)
.data("")
onnection
.flush