class Multiwoven::Integrations::Source::OpenAI::Client

def read(sync_config)

def read(sync_config)
  connection_config = prepare_config(sync_config.source.connection_specification)
  stream = connection_config[:is_stream] ||= false
  # The server checks the ConnectorQueryType.
  # If it's "ai_ml," the server calculates the payload and passes it as a query in the sync config model protocol.
  # This query is then sent to the AI/ML model.
  payload = parse_json(sync_config.model.query)
  if stream
    run_model_stream(connection_config, payload) { |message| yield message if block_given? }
  else
    run_model(connection_config, payload)
  end
rescue StandardError => e
  handle_exception(e, { context: "OPEN AI:READ:EXCEPTION", type: "error" })
end