class Nokogiri::CSS::GeneratedTokenizer

def action &block

def action &block
  yield
end

def load_file( filename )

def load_file( filename )
  @filename = filename
  open(filename, "r") do |f|
    scan_evaluate  f.read
  end
end

def next_token

def next_token
  @rex_tokens.shift
end

def scan_evaluate( str )

def scan_evaluate( str )
  scan_setup
  @rex_tokens = []
  @lineno  =  1
  ss = StringScanner.new(str)
  state = nil
  until ss.eos?
    text = ss.peek(1)
    @lineno  +=  1  if text == "\n"
    case state
    when nil
      case
      when (text = ss.scan(/~=/i))
         @rex_tokens.push action { [:INCLUDES, text] }
      when (text = ss.scan(/\|=/i))
         @rex_tokens.push action { [:DASHMATCH, text] }
      when (text = ss.scan(/\^=/i))
         @rex_tokens.push action { [:PREFIXMATCH, text] }
      when (text = ss.scan(/\$=/i))
         @rex_tokens.push action { [:SUFFIXMATCH, text] }
      when (text = ss.scan(/\*=/i))
         @rex_tokens.push action { [:SUBSTRINGMATCH, text] }
      when (text = ss.scan(/!=/i))
         @rex_tokens.push action { [:NOT_EQUAL, text] }
      when (text = ss.scan(/[-]?([_a-z]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])([_a-z0-9-]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*\(\s*/i))
         @rex_tokens.push action { [:FUNCTION, text] }
      when (text = ss.scan(/@[-]?([_a-z]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])([_a-z0-9-]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*/i))
         @rex_tokens.push action { [:IDENT, text] }
      when (text = ss.scan(/[-]?([_a-z]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])([_a-z0-9-]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*/i))
         @rex_tokens.push action { [:IDENT, text] }
      when (text = ss.scan(/-?([0-9]+|[0-9]*\.[0-9]+)/i))
         @rex_tokens.push action { [:NUMBER, text] }
      when (text = ss.scan(/\#([_a-z0-9-]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])+/i))
         @rex_tokens.push action { [:HASH, text] }
      when (text = ss.scan(/[\s\r\n\f]*\+/i))
         @rex_tokens.push action { [:PLUS, text] }
      when (text = ss.scan(/[\s\r\n\f]*>/i))
         @rex_tokens.push action { [:GREATER, text] }
      when (text = ss.scan(/[\s\r\n\f]*,/i))
         @rex_tokens.push action { [:COMMA, text] }
      when (text = ss.scan(/[\s\r\n\f]*~/i))
         @rex_tokens.push action { [:TILDE, text] }
      when (text = ss.scan(/\:not\(/i))
         @rex_tokens.push action { [:NOT, text] }
      when (text = ss.scan(/@[-]?([_a-z]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])([_a-z0-9-]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*/i))
         @rex_tokens.push action { [:ATKEYWORD, text] }
      when (text = ss.scan(/-?([0-9]+|[0-9]*\.[0-9]+)%/i))
         @rex_tokens.push action { [:PERCENTAGE, text] }
      when (text = ss.scan(/-?([0-9]+|[0-9]*\.[0-9]+)[-]?([_a-z]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])([_a-z0-9-]|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*/i))
         @rex_tokens.push action { [:DIMENSION, text] }
      when (text = ss.scan(/<!--/i))
         @rex_tokens.push action { [:CDO, text] }
      when (text = ss.scan(/-->/i))
         @rex_tokens.push action { [:CDC, text] }
      when (text = ss.scan(/[\s\r\n\f]*\/\//i))
         @rex_tokens.push action { [:DOUBLESLASH, text] }
      when (text = ss.scan(/[\s\r\n\f]*\//i))
         @rex_tokens.push action { [:SLASH, text] }
      when (text = ss.scan(/U\+[0-9a-f?]{1,6}(-[0-9a-f]{1,6})?/i))
         @rex_tokens.push action {[:UNICODE_RANGE, text] }
      when (text = ss.scan(/\/\*(.|[\r\n])*?\*\//i))
        ;
      when (text = ss.scan(/[\s\t\r\n\f]+/i))
         @rex_tokens.push action { [:S, text] }
      when (text = ss.scan(/[\.*:\[\]=\)]/i))
         @rex_tokens.push action { [text, text] }
      when (text = ss.scan(/"([^\n\r\f"]|\\n|\r\n|\r|\f|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*"|'([^\n\r\f']|\\n|\r\n|\r|\f|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*'/i))
         @rex_tokens.push action { [:STRING, text] }
      when (text = ss.scan(/\"([^\n\r\f\"]|\\n|\r\n|\r|\f|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*|([^\n\r\f\']|\\n|\r\n|\r|\f|[^\0-\177]|\\[0-9a-f]{1,6}(\r\n|[\s\n\r\t\f])?|\\[^\n\r\f0-9a-f])*/i))
         @rex_tokens.push action { [:INVALID, text] }
      when (text = ss.scan(/./i))
         @rex_tokens.push action { [text, text] }
      else
        text = ss.string[ss.pos .. -1]
        raise  ScanError, "can not match: '" + text + "'"
      end  # if
    else
      raise  ScanError, "undefined state: '" + state.to_s + "'"
    end  # case state
  end  # until ss
end  # def scan_evaluate

def scan_file( filename )

def scan_file( filename )
  load_file  filename
  do_parse
end

def scan_setup ; end

def scan_setup ; end

def scan_str( str )

def scan_str( str )
  scan_evaluate  str
  do_parse
end