class Multiwoven::Integrations::Source::OpenAI::Client

def run_model_stream(connection_config, payload)

def run_model_stream(connection_config, payload)
  send_streaming_request(
    url: OPEN_AI_URL,
    http_method: HTTP_POST,
    payload: payload,
    headers: auth_headers(connection_config[:api_key]),
    config: connection_config[:config]
  ) do |chunk|
    process_streaming_response(chunk) { |message| yield message if block_given? }
  end
rescue StandardError => e
  handle_exception(e, { context: "OPEN AI:RUN_STREAM_MODEL:EXCEPTION", type: "error" })
end