module RubyLLM::Providers::OpenAI::Chat

def completion_url

def completion_url
  'chat/completions'
end

def format_messages(messages)

def format_messages(messages)
  messages.map do |msg|
    {
      role: format_role(msg.role),
      content: Media.format_content(msg.content),
      tool_calls: format_tool_calls(msg.tool_calls),
      tool_call_id: msg.tool_call_id
    }.compact
  end
end

def format_role(role)

def format_role(role)
  case role
  when :system
    'developer'
  else
    role.to_s
  end
end

def parse_completion_response(response) # rubocop:disable Metrics/MethodLength

rubocop:disable Metrics/MethodLength
def parse_completion_response(response) # rubocop:disable Metrics/MethodLength
  data = response.body
  return if data.empty?
  message_data = data.dig('choices', 0, 'message')
  return unless message_data
  Message.new(
    role: :assistant,
    content: message_data['content'],
    tool_calls: parse_tool_calls(message_data['tool_calls']),
    input_tokens: data['usage']['prompt_tokens'],
    output_tokens: data['usage']['completion_tokens'],
    model_id: data['model']
  )
end

def render_payload(messages, tools:, temperature:, model:, stream: false) # rubocop:disable Metrics/MethodLength

rubocop:disable Metrics/MethodLength
def render_payload(messages, tools:, temperature:, model:, stream: false) # rubocop:disable Metrics/MethodLength
  {
    model: model,
    messages: format_messages(messages),
    temperature: temperature,
    stream: stream
  }.tap do |payload|
    if tools.any?
      payload[:tools] = tools.map { |_, tool| tool_for(tool) }
      payload[:tool_choice] = 'auto'
    end
    payload[:stream_options] = { include_usage: true } if stream
  end
end