module GraphQL::Language::Lexer
def self.emit(token_name, ts, te, meta)
def self.emit(token_name, ts, te, meta) meta[:tokens] << GraphQL::Language::Token.new( name: token_name, value: meta[:data][ts...te].pack("c*"), line: meta[:line], col: meta[:col], ) # Bump the column counter for the next token meta[:col] += te - ts end
def self.emit_string(ts, te, meta)
def self.emit_string(ts, te, meta) value = meta[:data][ts...te].pack("c*").force_encoding("UTF-8") if value =~ /\\u|\\./ && value !~ ESCAPES meta[:tokens] << GraphQL::Language::Token.new( name: :BAD_UNICODE_ESCAPE, value: value, line: meta[:line], col: meta[:col], ) else replace_escaped_characters_in_place(value) meta[:tokens] << GraphQL::Language::Token.new( name: :STRING, value: value, line: meta[:line], col: meta[:col], ) end meta[:col] += te - ts end
def self.replace_escaped_characters_in_place(raw_string)
Replace any escaped unicode or whitespace with the _actual_ characters
def self.replace_escaped_characters_in_place(raw_string) raw_string.gsub!(ESCAPES, ESCAPES_REPLACE) raw_string.gsub!(UTF_8, &UTF_8_REPLACE) nil end
def self.run_lexer(query_string)
def self.run_lexer(query_string) data = query_string.unpack("c*") eof = data.length meta = { line: 1, col: 1, data: data, tokens: [] } 481 "lib/graphql/language/lexer.rb" 0 = data.length graphql_lexer_start nil nil 0 130 "lib/graphql/language/lexer.rl" emit_token = -> (name) { emit(name, ts, te, meta) } 498 "lib/graphql/language/lexer.rb" , _trans, _keys, _acts, _nacts = nil _level = 0 me = 10 trans = 15 n = 20 _eof = 30 = 40 true ger_goto = false oto_level <= 0 == pe o_level = _test_eof oto_level <= _resume = _graphql_lexer_from_state_actions[cs] s = _graphql_lexer_actions[_acts] += 1 _nacts > 0 ts -= 1 s += 1 _graphql_lexer_actions[_acts - 1] n 1 then 1 "NONE" n 528 "lib/graphql/language/lexer.rb" # from state action switch rigger_goto = _graphql_lexer_key_offsets[cs] s = _graphql_lexer_index_offsets[cs] = _graphql_lexer_single_lengths[cs] k_match = false _klen > 0 _lower = _keys _upper = _keys + _klen - 1 loop do break if _upper < _lower _mid = _lower + ( (_upper - _lower) >> 1 ) if data[p].ord < _graphql_lexer_trans_keys[_mid] _upper = _mid - 1 elsif data[p].ord > _graphql_lexer_trans_keys[_mid] _lower = _mid + 1 else _trans += (_mid - _keys) _break_match = true break end end # loop break if _break_match _keys += _klen _trans += _klen en = _graphql_lexer_range_lengths[cs] _klen > 0 _lower = _keys _upper = _keys + (_klen << 1) - 2 loop do break if _upper < _lower _mid = _lower + (((_upper-_lower) >> 1) & ~1) if data[p].ord < _graphql_lexer_trans_keys[_mid] _upper = _mid - 2 elsif data[p].ord > _graphql_lexer_trans_keys[_mid+1] _lower = _mid + 2 else _trans += ((_mid - _keys) >> 1) _break_match = true break end end # loop break if _break_match _trans += _klen hile false oto_level <= _eof_trans _graphql_lexer_trans_targs[_trans] raphql_lexer_trans_actions[_trans] != 0 s = _graphql_lexer_trans_actions[_trans] ts = _graphql_lexer_actions[_acts] s += 1 e _nacts > 0 cts -= 1 ts += 1 e _graphql_lexer_actions[_acts - 1] then 1 "NONE" n +1 then 51 "lib/graphql/language/lexer.rl" n 1; end then 52 "lib/graphql/language/lexer.rl" n 2; end then 53 "lib/graphql/language/lexer.rl" n 3; end then 54 "lib/graphql/language/lexer.rl" n 4; end then 55 "lib/graphql/language/lexer.rl" n 5; end then 56 "lib/graphql/language/lexer.rl" n 6; end then 57 "lib/graphql/language/lexer.rl" n 7; end 0 then 58 "lib/graphql/language/lexer.rl" n 8; end 1 then 59 "lib/graphql/language/lexer.rl" n 9; end 2 then 60 "lib/graphql/language/lexer.rl" n 10; end 3 then 61 "lib/graphql/language/lexer.rl" n 11; end 4 then 62 "lib/graphql/language/lexer.rl" n 12; end 5 then 63 "lib/graphql/language/lexer.rl" n 13; end 6 then 64 "lib/graphql/language/lexer.rl" n 14; end 7 then 65 "lib/graphql/language/lexer.rl" n 15; end 8 then 66 "lib/graphql/language/lexer.rl" n 16; end 9 then 67 "lib/graphql/language/lexer.rl" n 17; end 0 then 68 "lib/graphql/language/lexer.rl" n 18; end 1 then 76 "lib/graphql/language/lexer.rl" n 26; end 2 then 83 "lib/graphql/language/lexer.rl" n 33; end 3 then 93 "lib/graphql/language/lexer.rl" n 37; end 4 then 69 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:RCURLY) end 5 then 70 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:LCURLY) end 6 then 71 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:RPAREN) end 7 then 72 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:LPAREN) end 8 then 73 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:RBRACKET) end 9 then 74 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:LBRACKET) end 0 then 75 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:COLON) end 1 then 76 "lib/graphql/language/lexer.rl" n +1 emit_string(ts + 1, te - 1, meta) end 2 then 77 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:VAR_SIGN) end 3 then 78 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:DIR_SIGN) end 4 then 79 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:ELLIPSIS) end 5 then 80 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:EQUALS) end 6 then 81 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:BANG) end 7 then 82 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:PIPE) end 8 then 85 "lib/graphql/language/lexer.rl" n +1 meta[:line] += 1 meta[:col] = 1 nd 9 then 93 "lib/graphql/language/lexer.rl" n +1 emit_token.call(:UNKNOWN_CHAR) end 0 then 51 "lib/graphql/language/lexer.rl" n - 1; begin emit_token.call(:INT) end 1 then 52 "lib/graphql/language/lexer.rl" n - 1; begin emit_token.call(:FLOAT) end 2 then 83 "lib/graphql/language/lexer.rl" n - 1; begin emit_token.call(:IDENTIFIER) end 3 then 90 "lib/graphql/language/lexer.rl" n - 1; begin meta[:col] += te - ts end 4 then 91 "lib/graphql/language/lexer.rl" n - 1; begin meta[:col] += te - ts end 5 then 93 "lib/graphql/language/lexer.rl" n - 1; begin emit_token.call(:UNKNOWN_CHAR) end 6 then 51 "lib/graphql/language/lexer.rl" n p = ((te))-1; end emit_token.call(:INT) end 7 then 93 "lib/graphql/language/lexer.rl" n p = ((te))-1; end emit_token.call(:UNKNOWN_CHAR) end 8 then 1 "NONE" n act 1 then begin p = ((te))-1; end token.call(:INT) end 2 then begin p = ((te))-1; end token.call(:FLOAT) end 3 then begin p = ((te))-1; end token.call(:ON) end 4 then begin p = ((te))-1; end token.call(:FRAGMENT) end 5 then begin p = ((te))-1; end token.call(:TRUE) end 6 then begin p = ((te))-1; end token.call(:FALSE) end 7 then begin p = ((te))-1; end token.call(:NULL) end 8 then begin p = ((te))-1; end token.call(:QUERY) end 9 then begin p = ((te))-1; end token.call(:MUTATION) end 10 then begin p = ((te))-1; end token.call(:SUBSCRIPTION) end 11 then begin p = ((te))-1; end token.call(:SCHEMA) end 12 then begin p = ((te))-1; end token.call(:SCALAR) end 13 then begin p = ((te))-1; end token.call(:TYPE) end 14 then begin p = ((te))-1; end token.call(:IMPLEMENTS) end 15 then begin p = ((te))-1; end token.call(:INTERFACE) end 16 then begin p = ((te))-1; end token.call(:UNION) end 17 then begin p = ((te))-1; end token.call(:ENUM) end 18 then begin p = ((te))-1; end token.call(:INPUT) end 26 then begin p = ((te))-1; end string(ts + 1, te - 1, meta) end 33 then begin p = ((te))-1; end token.call(:IDENTIFIER) end 37 then begin p = ((te))-1; end token.call(:UNKNOWN_CHAR) end 899 "lib/graphql/language/lexer.rb" # action switch rigger_goto oto_level <= _again = _graphql_lexer_to_state_actions[cs] s = _graphql_lexer_actions[_acts] += 1 _nacts > 0 ts -= 1 s += 1 _graphql_lexer_actions[_acts - 1] then 1 "NONE" n il; end 919 "lib/graphql/language/lexer.rb" # to state action switch rigger_goto 1 != pe o_level = _resume oto_level <= _test_eof == eof raphql_lexer_eof_trans[cs] > 0 ns = _graphql_lexer_eof_trans[cs] - 1; o_level = _eof_trans ; oto_level <= _out k 136 "lib/graphql/language/lexer.rl" meta[:tokens] end
def self.tokenize(query_string)
def self.tokenize(query_string) run_lexer(query_string) end