2008-01-07 02:11:15 -05:00
|
|
|
module RDoc; end
|
|
|
|
|
|
|
|
##
|
|
|
|
# A TokenStream is a list of tokens, gathered during the parse of some entity
|
|
|
|
# (say a method). Entities populate these streams by being registered with the
|
|
|
|
# lexer. Any class can collect tokens by including TokenStream. From the
|
|
|
|
# outside, you use such an object by calling the start_collecting_tokens
|
|
|
|
# method, followed by calls to add_token and pop_token.
|
|
|
|
|
|
|
|
module RDoc::TokenStream
|
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Adds +tokens+ to the collected tokens
|
|
|
|
|
|
|
|
def add_tokens(*tokens)
|
|
|
|
tokens.flatten.each { |token| @token_stream << token }
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
alias add_token add_tokens
|
|
|
|
|
|
|
|
##
|
|
|
|
# Starts collecting tokens
|
|
|
|
|
|
|
|
def collect_tokens
|
2003-12-01 02:12:49 -05:00
|
|
|
@token_stream = []
|
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
alias start_collecting_tokens collect_tokens
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Remove the last token from the collected tokens
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
def pop_token
|
|
|
|
@token_stream.pop
|
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Current token stream
|
|
|
|
|
|
|
|
def token_stream
|
|
|
|
@token_stream
|
|
|
|
end
|
|
|
|
|
|
|
|
##
|
|
|
|
# Returns a string representation of the token stream
|
|
|
|
|
|
|
|
def tokens_to_s
|
|
|
|
token_stream.map { |token| token.text }.join ''
|
|
|
|
end
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|