# frozen_string_literal: truerequire'strscan'require'rubygems/request_set/lockfile/parser'classGem::RequestSet::Lockfile::TokenizerToken=Struct.new:type,:value,:column,:lineEOF=Token.new:EOFdefself.from_filefilenewFile.read(file),fileenddefinitializeinput,filename=nil,line=0,pos=0@line=line@line_pos=pos@tokens=[]@filename=filenametokenizeinputenddefmake_parserset,platformsGem::RequestSet::Lockfile::Parser.newself,set,platforms,@filenameenddefto_a@tokens.map{|token|[token.type,token.value,token.column,token.line]}enddefskiptype@tokens.shiftwhilenot@tokens.empty?andpeek.type==typeend### Calculates the column (by byte) and the line of the current token based on# +byte_offset+.deftoken_posbyte_offset# :nodoc:[byte_offset-@line_pos,@line]enddefempty?@tokens.empty?enddefunshifttoken@tokens.unshifttokenenddefnext_token@tokens.shiftendalias:shift:next_tokendefpeek@tokens.first||EOFendprivatedeftokenizeinputs=StringScanner.newinputuntils.eos?dopos=s.pospos=s.posifleading_whitespace=s.scan(/ +/)ifs.scan(/[<|=>]{7}/)thenmessage="your #{@filename} contains merge conflict markers"column,line=token_posposraiseGem::RequestSet::Lockfile::ParseError.newmessage,column,line,@filenameend@tokens<<casewhens.scan(/\r?\n/)thentoken=Token.new(:newline,nil,*token_pos(pos))@line_pos=s.pos@line+=1tokenwhens.scan(/[A-Z]+/)thenifleading_whitespacethentext=s.matchedtext+=s.scan(/[^\s)]*/).to_s# in case of no matchToken.new(:text,text,*token_pos(pos))elseToken.new(:section,s.matched,*token_pos(pos))endwhens.scan(/([a-z]+):\s/)thens.pos-=1# rewind for possible newlineToken.new(:entry,s[1],*token_pos(pos))whens.scan(/\(/)thenToken.new(:l_paren,nil,*token_pos(pos))whens.scan(/\)/)thenToken.new(:r_paren,nil,*token_pos(pos))whens.scan(/<=|>=|=|~>|<|>|!=/)thenToken.new(:requirement,s.matched,*token_pos(pos))whens.scan(/,/)thenToken.new(:comma,nil,*token_pos(pos))whens.scan(/!/)thenToken.new(:bang,nil,*token_pos(pos))whens.scan(/[^\s),!]*/)thenToken.new(:text,s.matched,*token_pos(pos))elseraise"BUG: can't create token for: #{s.string[s.pos..-1].inspect}"endend@tokensendend