Make max tokens in expressions lexer a class-level attribute

This commit is contained in:
Grzegorz Bizon 2018-03-01 14:42:11 +01:00
parent c6ea7a2a13
commit 511046f9eb
2 changed files with 8 additions and 7 deletions

View file

@ -16,12 +16,13 @@ module Gitlab
MAX_TOKENS = 100 MAX_TOKENS = 100
def initialize(statement) def initialize(statement, max_tokens: MAX_TOKENS)
@scanner = StringScanner.new(statement) @scanner = StringScanner.new(statement)
@max_tokens = max_tokens
end end
def tokens(max: MAX_TOKENS) def tokens
strong_memoize(:tokens) { tokenize(max) } strong_memoize(:tokens) { tokenize }
end end
def lexemes def lexemes
@ -30,10 +31,10 @@ module Gitlab
private private
def tokenize(max_tokens) def tokenize
tokens = [] tokens = []
max_tokens.times do @max_tokens.times do
@scanner.skip(/\s+/) # ignore whitespace @scanner.skip(/\s+/) # ignore whitespace
return tokens if @scanner.eos? return tokens if @scanner.eos?

View file

@ -46,9 +46,9 @@ describe Gitlab::Ci::Pipeline::Expression::Lexer do
end end
it 'limits statement to specified amount of tokens' do it 'limits statement to specified amount of tokens' do
lexer = described_class.new("$V1 $V2 $V3 $V4 $V5 $V6") lexer = described_class.new("$V1 $V2 $V3 $V4", max_tokens: 3)
expect { lexer.tokens(max: 5) } expect { lexer.tokens }
.to raise_error described_class::SyntaxError .to raise_error described_class::SyntaxError
end end