1
0
Fork 0
mirror of https://github.com/ruby/ruby.git synced 2022-11-09 12:17:21 -05:00
ruby--ruby/lib/rubygems/request_set/lockfile/tokenizer.rb
hsbt fa59a2ea80 Merge rubygems-2.6.11
This version fixed regression of rubygems-2.6.10.
  https://github.com/rubygems/rubygems/pull/1856

  See details of changelogs for 2.6.11 release:
  adfcf40502/History.txt (L3)

git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@57998 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2017-03-17 01:29:22 +00:00

112 lines
2.8 KiB
Ruby

# frozen_string_literal: true
require 'rubygems/request_set/lockfile/parser'
class Gem::RequestSet::Lockfile::Tokenizer
Token = Struct.new :type, :value, :column, :line
EOF = Token.new :EOF
def self.from_file file
new File.read(file), file
end
def initialize input, filename = nil, line = 0, pos = 0
@line = line
@line_pos = pos
@tokens = []
@filename = filename
tokenize input
end
def make_parser set, platforms
Gem::RequestSet::Lockfile::Parser.new self, set, platforms, @filename
end
def to_a
@tokens.map { |token| [token.type, token.value, token.column, token.line] }
end
def skip type
@tokens.shift while not @tokens.empty? and peek.type == type
end
##
# Calculates the column (by byte) and the line of the current token based on
# +byte_offset+.
def token_pos byte_offset # :nodoc:
[byte_offset - @line_pos, @line]
end
def empty?
@tokens.empty?
end
def unshift token
@tokens.unshift token
end
def next_token
@tokens.shift
end
alias :shift :next_token
def peek
@tokens.first || EOF
end
private
def tokenize input
require 'strscan'
s = StringScanner.new input
until s.eos? do
pos = s.pos
pos = s.pos if leading_whitespace = s.scan(/ +/)
if s.scan(/[<|=>]{7}/) then
message = "your #{@filename} contains merge conflict markers"
column, line = token_pos pos
raise Gem::RequestSet::Lockfile::ParseError.new message, column, line, @filename
end
@tokens <<
case
when s.scan(/\r?\n/) then
token = Token.new(:newline, nil, *token_pos(pos))
@line_pos = s.pos
@line += 1
token
when s.scan(/[A-Z]+/) then
if leading_whitespace then
text = s.matched
text += s.scan(/[^\s)]*/).to_s # in case of no match
Token.new(:text, text, *token_pos(pos))
else
Token.new(:section, s.matched, *token_pos(pos))
end
when s.scan(/([a-z]+):\s/) then
s.pos -= 1 # rewind for possible newline
Token.new(:entry, s[1], *token_pos(pos))
when s.scan(/\(/) then
Token.new(:l_paren, nil, *token_pos(pos))
when s.scan(/\)/) then
Token.new(:r_paren, nil, *token_pos(pos))
when s.scan(/<=|>=|=|~>|<|>|!=/) then
Token.new(:requirement, s.matched, *token_pos(pos))
when s.scan(/,/) then
Token.new(:comma, nil, *token_pos(pos))
when s.scan(/!/) then
Token.new(:bang, nil, *token_pos(pos))
when s.scan(/[^\s),!]*/) then
Token.new(:text, s.matched, *token_pos(pos))
else
raise "BUG: can't create token for: #{s.string[s.pos..-1].inspect}"
end
end
@tokens
end
end