class Regexp::Lexer

def lex(input, syntax = nil, options: nil, collect_tokens: true, &block)

def lex(input, syntax = nil, options: nil, collect_tokens: true, &block)
  syntax = syntax ? Regexp::Syntax.for(syntax) : Regexp::Syntax::CURRENT
  self.block = block
  self.collect_tokens = collect_tokens
  self.tokens = []
  self.prev_token = nil
  self.preprev_token = nil
  self.nesting = 0
  self.set_nesting = 0
  self.conditional_nesting = 0
  self.shift = 0
  Regexp::Scanner.scan(input, options: options, collect_tokens: false) do |type, token, text, ts, te|
    type, token = *syntax.normalize(type, token)
    syntax.check! type, token
    ascend(type, token)
    if (last = prev_token) &&
       type == :quantifier &&
       (
         (last.type == :literal         && (parts = break_literal(last))) ||
         (last.token == :codepoint_list && (parts = break_codepoint_list(last)))
       )
      emit(parts[0])
      last = parts[1]
    end
    current = Regexp::Token.new(type, token, text, ts + shift, te + shift,
                                nesting, set_nesting, conditional_nesting)
    if type == :conditional && CONDITION_TOKENS.include?(token)
      current = merge_condition(current, last)
    elsif last
      last.next = current
      current.previous = last
      emit(last)
    end
    self.preprev_token = last
    self.prev_token = current
    descend(type, token)
  end
  emit(prev_token) if prev_token
  collect_tokens ? tokens : nil
end