class L::OpenAI

Use the OpenAI API’s official Ruby library

def array?(schema)

def array?(schema)
  schema.is_a?(Hash) && schema.dig('json_schema', 'schema', 'properties', 'items', 'type') == 'array'
end

def chat(user_message, system_message = nil)

Generate a response with support for structured output
def chat(user_message, system_message = nil)
  schema = schema(settings)
  response = client.chat(
    parameters: {
      model: settings[:model], response_format: schema, messages: [
        system_message ? { role: :system, content: system_message } : nil,
        { role: :user, content: user_message }
      ].compact
    }.compact
  ).dig('choices', 0, 'message', 'content')
  content = schema ? ::Hashie::Mash.new(JSON.parse(response)) : response
  array?(schema) ? content.items : content
end

def client

def client
  return settings[:client] if settings[:client]
  @client ||= ::OpenAI::Client.new(
    access_token: ENV.fetch('OPENAI_ACCESS_TOKEN')
  )
end

def embeddings(chunks)

and large distances suggest low relatedness.
numbers. The distance between two vectors measures their relatedness. Small distances suggest high relatedness
OpenAI’s text embeddings measure the relatedness of text strings. An embedding is a vector of floating point
def embeddings(chunks)
  responses = chunks.map do |chunk|
    response = client.embeddings(
      parameters: { model: settings[:model], dimensions: settings[:dimensions], input: chunk }
    )
    response.dig('data', 0, 'embedding')
  end
  responses.one? ? responses.first : responses
end

def initialize(settings)

def initialize(settings)
  @settings = settings
end

def schema(settings)

def schema(settings)
  return unless settings[:schema]
  {
    'type' => 'json_schema',
    'json_schema' => {
      'name' => 'schema',
      'schema' => settings[:schema]
    }
  }
end