# frozen_string_literal: true
module Anthropic
module Models
module Beta
# @see Anthropic::Resources::Beta::Messages#create
#
# @see Anthropic::Resources::Beta::Messages#stream_raw
class BetaMessage < Anthropic::Internal::Type::BaseModel
# @!attribute id
# Unique object identifier.
#
# The format and length of IDs may change over time.
#
# @return [String]
required :id, String
# @!attribute container
# Information about the container used in the request (for the code execution
# tool)
#
# @return [Anthropic::Models::Beta::BetaContainer, nil]
required :container, -> { Anthropic::Beta::BetaContainer }, nil?: true
# @!attribute content
# Content generated by the model.
#
# This is an array of content blocks, each of which has a `type` that determines
# its shape.
#
# Example:
#
# ```json
# [{ "type": "text", "text": "Hi, I'm Claude." }]
# ```
#
# If the request input `messages` ended with an `assistant` turn, then the
# response `content` will continue directly from that last turn. You can use this
# to constrain the model's output.
#
# For example, if the input `messages` were:
#
# ```json
# [
# {
# "role": "user",
# "content": "What's the Greek name for Sun? (A) Sol (B) Helios (C) Sun"
# },
# { "role": "assistant", "content": "The best answer is (" }
# ]
# ```
#
# Then the response `content` might be:
#
# ```json
# [{ "type": "text", "text": "B)" }]
# ```
#
# @return [Array<Anthropic::Models::Beta::BetaTextBlock, Anthropic::Models::Beta::BetaThinkingBlock, Anthropic::Models::Beta::BetaRedactedThinkingBlock, Anthropic::Models::Beta::BetaToolUseBlock, Anthropic::Models::Beta::BetaServerToolUseBlock, Anthropic::Models::Beta::BetaWebSearchToolResultBlock, Anthropic::Models::Beta::BetaCodeExecutionToolResultBlock, Anthropic::Models::Beta::BetaMCPToolUseBlock, Anthropic::Models::Beta::BetaMCPToolResultBlock, Anthropic::Models::Beta::BetaContainerUploadBlock>]
required :content, -> { Anthropic::Internal::Type::ArrayOf[union: Anthropic::Beta::BetaContentBlock] }
# @!attribute model
# The model that will complete your prompt.\n\nSee
# [models](https://docs.anthropic.com/en/docs/models-overview) for additional
# details and options.
#
# @return [Symbol, String, Anthropic::Models::Model]
required :model, union: -> { Anthropic::Model }
# @!attribute role
# Conversational role of the generated message.
#
# This will always be `"assistant"`.
#
# @return [Symbol, :assistant]
required :role, const: :assistant
# @!attribute stop_reason
# The reason that we stopped.
#
# This may be one the following values:
#
# - `"end_turn"`: the model reached a natural stopping point
# - `"max_tokens"`: we exceeded the requested `max_tokens` or the model's maximum
# - `"stop_sequence"`: one of your provided custom `stop_sequences` was generated
# - `"tool_use"`: the model invoked one or more tools
# - `"pause_turn"`: we paused a long-running turn. You may provide the response
# back as-is in a subsequent request to let the model continue.
# - `"refusal"`: when streaming classifiers intervene to handle potential policy
# violations
#
# In non-streaming mode this value is always non-null. In streaming mode, it is
# null in the `message_start` event and non-null otherwise.
#
# @return [Symbol, Anthropic::Models::Beta::BetaStopReason, nil]
required :stop_reason, enum: -> { Anthropic::Beta::BetaStopReason }, nil?: true
# @!attribute stop_sequence
# Which custom stop sequence was generated, if any.
#
# This value will be a non-null string if one of your custom stop sequences was
# generated.
#
# @return [String, nil]
required :stop_sequence, String, nil?: true
# @!attribute type
# Object type.
#
# For Messages, this is always `"message"`.
#
# @return [Symbol, :message]
required :type, const: :message
# @!attribute usage
# Billing and rate-limit usage.
#
# Anthropic's API bills and rate-limits by token counts, as tokens represent the
# underlying cost to our systems.
#
# Under the hood, the API transforms requests into a format suitable for the
# model. The model's output then goes through a parsing stage before becoming an
# API response. As a result, the token counts in `usage` will not match one-to-one
# with the exact visible content of an API request or response.
#
# For example, `output_tokens` will be non-zero, even for an empty string response
# from Claude.
#
# Total input tokens in a request is the summation of `input_tokens`,
# `cache_creation_input_tokens`, and `cache_read_input_tokens`.
#
# @return [Anthropic::Models::Beta::BetaUsage]
required :usage, -> { Anthropic::Beta::BetaUsage }
# @!method initialize(id:, container:, content:, model:, stop_reason:, stop_sequence:, usage:, role: :assistant, type: :message)
# Some parameter documentations has been truncated, see
# {Anthropic::Models::Beta::BetaMessage} for more details.
#
# @param id [String] Unique object identifier.
#
# @param container [Anthropic::Models::Beta::BetaContainer, nil] Information about the container used in the request (for the code execution tool
#
# @param content [Array<Anthropic::Models::Beta::BetaTextBlock, Anthropic::Models::Beta::BetaThinkingBlock, Anthropic::Models::Beta::BetaRedactedThinkingBlock, Anthropic::Models::Beta::BetaToolUseBlock, Anthropic::Models::Beta::BetaServerToolUseBlock, Anthropic::Models::Beta::BetaWebSearchToolResultBlock, Anthropic::Models::Beta::BetaCodeExecutionToolResultBlock, Anthropic::Models::Beta::BetaMCPToolUseBlock, Anthropic::Models::Beta::BetaMCPToolResultBlock, Anthropic::Models::Beta::BetaContainerUploadBlock>] Content generated by the model.
#
# @param model [Symbol, String, Anthropic::Models::Model] The model that will complete your prompt.\n\nSee [models](https://docs.anthropic
#
# @param stop_reason [Symbol, Anthropic::Models::Beta::BetaStopReason, nil] The reason that we stopped.
#
# @param stop_sequence [String, nil] Which custom stop sequence was generated, if any.
#
# @param usage [Anthropic::Models::Beta::BetaUsage] Billing and rate-limit usage.
#
# @param role [Symbol, :assistant] Conversational role of the generated message.
#
# @param type [Symbol, :message] Object type.
end
end
BetaMessage = Beta::BetaMessage
end
end