class Anthropic::Resources::Beta::Messages

def create(params)

Other tags:
    See: Anthropic::Models::Beta::MessageCreateParams -

Returns:
  • (Anthropic::Models::Beta::BetaMessage) -

Parameters:
  • request_options (Anthropic::RequestOptions, Hash{Symbol=>Object}, nil) --
  • betas (Array) -- Header param: Optional header to specify the beta version(s) you want to use.
  • top_p (Float) -- Body param: Use nucleus sampling.
  • top_k (Integer) -- Body param: Only sample from the top K options for each subsequent token.
  • tools (Array) -- Body param: Definitions of tools that the model may use.
  • tool_choice (Anthropic::Models::Beta::BetaToolChoiceAuto, Anthropic::Models::Beta::BetaToolChoiceAny, Anthropic::Models::Beta::BetaToolChoiceTool, Anthropic::Models::Beta::BetaToolChoiceNone) -- Body param: How the model should use the provided tools. The model can use a spe
  • thinking (Anthropic::Models::Beta::BetaThinkingConfigEnabled, Anthropic::Models::Beta::BetaThinkingConfigDisabled) -- Body param: Configuration for enabling Claude's extended thinking.
  • temperature (Float) -- Body param: Amount of randomness injected into the response.
  • system_ (String, Array) -- Body param: System prompt.
  • stop_sequences (Array) -- Body param: Custom text sequences that will cause the model to stop generating.
  • service_tier (Symbol, Anthropic::Models::Beta::MessageCreateParams::ServiceTier) -- Body param: Determines whether to use priority capacity (if available) or standa
  • metadata (Anthropic::Models::Beta::BetaMetadata) -- Body param: An object describing metadata about the request.
  • mcp_servers (Array) -- Body param: MCP servers to be utilized in this request
  • container (String, nil) -- Body param: Container identifier for reuse across requests.
  • model (Symbol, String, Anthropic::Models::Model) -- Body param: The model that will complete your prompt.\n\nSee [models](https://do
  • messages (Array) -- Body param: Input messages.
  • max_tokens (Integer) -- Body param: The maximum number of tokens to generate before stopping.

Overloads:
  • create(max_tokens:, messages:, model:, container: nil, mcp_servers: nil, metadata: nil, service_tier: nil, stop_sequences: nil, system_: nil, temperature: nil, thinking: nil, tool_choice: nil, tools: nil, top_k: nil, top_p: nil, betas: nil, request_options: {})
def create(params)
  parsed, options = Anthropic::Beta::MessageCreateParams.dump_request(params)
  if parsed[:stream]
    message = "Please use `#stream` for the streaming use case."
    raise ArgumentError.new(message)
  end
  if options.empty? && @client.timeout == Anthropic::Client::DEFAULT_TIMEOUT_IN_SECONDS
    model = parsed[:model].to_sym
    max_tokens = parsed[:max_tokens].to_i
    timeout = @client.calculate_nonstreaming_timeout(
      max_tokens,
      Anthropic::Client::MODEL_NONSTREAMING_TOKENS[model]
    )
    options = {timeout: timeout}
  else
    options = {timeout: 600, **options}
  end
  header_params = {betas: "anthropic-beta"}
  @client.request(
    method: :post,
    path: "v1/messages?beta=true",
    headers: parsed.slice(*header_params.keys).transform_keys(header_params),
    body: parsed.except(*header_params.keys),
    model: Anthropic::Beta::BetaMessage,
    options: options
  )
end