app/models/ruby_conversations/prompt.rb



# frozen_string_literal: true

require 'active_model'

module RubyConversations
  # Represents a prompt template used to generate AI messages.
  class Prompt
    include ActiveModel::Model

    # Define attributes
    attr_accessor :id, :name, :role, :message, :valid_placeholders, :temperature, :metadata, :created_at, :updated_at,
                  :latest_version_id, :llm

    # Constants
    ROLES = %w[system user assistant].freeze

    # Class methods
    def self.roles
      ROLES
    end

    def self.find_by_name!(name)
      prompt_data = RubyConversations.client.fetch_prompt(name)
      raise "Prompt not found: #{name}" unless prompt_data

      new(prompt_data)
    end

    def self.find_by!(conditions)
      name = conditions[:name]
      find_by_name!(name)
    end

    # Validations
    validates :name, presence: true
    validates :role, presence: true, inclusion: { in: ROLES }
    validates :message, presence: true

    # Initialization
    def initialize(attributes = {})
      super
    end

    # Basic attributes method
    def attributes
      instance_values
    end

    # Method for API serialization
    def attributes_for_api
      {
        id: id,
        name: name,
        role: role,
        message: message,
        valid_placeholders: valid_placeholders,
        temperature: temperature,
        metadata: metadata,
        latest_version_id: latest_version_id
      }.compact
    end

    # Interpolate placeholders in the message
    def interpolate(variables = {})
      return message if message.nil? || variables.empty?

      format(message, **variables)
    end
  end
end