class SemanticLogger::Appender::CloudwatchLogs

def close

def close
  task.shutdown
end

def default_formatter

Use JSON Formatter by default
def default_formatter
  SemanticLogger::Formatters::Json.new
end

def flush

def flush
  task.execute while buffered_logs.size.positive?
end

def initialize(

Default: 4000
Note that currently CloudWatch Logs has 10000 hard limit.
Flush buffered logs if they are above the currently set size.
max_buffered_events: [Integer]

Default: 5
Flush buffered logs every X seconds, regardless of the current buffer size.
force_flush_interval_seconds: [Integer]

Default: true
If the missing log stream should be automatically created.
create_stream: [Boolean]

Default: false
If the missing log group should be automatically created.
create_group: [Boolean]

Default: SemanticLogger.host
Log stream name
stream: [String]

Default: {}
A hash to be passed to Aws::CloudWatchLogs::Client.new
client_kwargs: [Hash]

Log group name
group: [String]
Parameters:

Create CloudWatch Logs Appender
def initialize(
  *args,
  group:,
  client_kwargs: {},
  stream: nil,
  create_group: false,
  create_stream: true,
  force_flush_interval_seconds: 5,
  max_buffered_events: 4_000,
  **kwargs,
  &block
)
  @group = group
  @client_kwargs = client_kwargs
  @stream = stream
  @create_group = create_group
  @create_stream = create_stream
  @force_flush_interval_seconds = force_flush_interval_seconds
  @max_buffered_events = max_buffered_events
  super(*args, **kwargs, &block)
  reopen
end

def log(log)

Method called to log an event
def log(log)
  buffered_logs << log
  put_log_events if buffered_logs.size >= max_buffered_events
end

def put_log_events

def put_log_events
  logs = buffered_logs.shift(max_buffered_events)
  return if logs.none?
  begin
    client.put_log_events({
                            log_group_name:  group,
                            log_stream_name: stream,
                            log_events:      logs.map do |log|
                                               {
                                                 timestamp: (log.time.to_f * 1000).floor,
                                                 message:   formatter.call(log, self)
                                               }
                                             end
                          })
  rescue Aws::CloudWatchLogs::Errors::ResourceNotFoundException => e
    if e.message.include?("log group does not exist.") && create_group
      client.create_log_group({
                                log_group_name: group
                              })
      retry
    elsif e.message.include?("log stream does not exist.") && create_stream
      client.create_log_stream({
                                 log_group_name:  group,
                                 log_stream_name: stream
                               })
      retry
    end
  end
end

def reopen

def reopen
  @buffered_logs = Concurrent::Array.new
  @client = Aws::CloudWatchLogs::Client.new(client_kwargs)
  @task = Concurrent::TimerTask.new(execution_interval: force_flush_interval_seconds,
                                    interval_type:      :fixed_rate) do
    put_log_events
  end
  @task.execute
end

def stream

def stream
  @stream || host
end