2008-01-07 02:11:15 -05:00
|
|
|
##
|
|
|
|
# A TokenStream is a list of tokens, gathered during the parse of some entity
|
|
|
|
# (say a method). Entities populate these streams by being registered with the
|
|
|
|
# lexer. Any class can collect tokens by including TokenStream. From the
|
|
|
|
# outside, you use such an object by calling the start_collecting_tokens
|
|
|
|
# method, followed by calls to add_token and pop_token.
|
|
|
|
|
|
|
|
module RDoc::TokenStream
|
|
|
|
|
2012-11-26 23:28:14 -05:00
|
|
|
##
|
|
|
|
# Converts +token_stream+ to HTML wrapping various tokens with
|
|
|
|
# <tt><span></tt> elements. The following tokens types are wrapped in spans
|
|
|
|
# with the given class names:
|
|
|
|
#
|
|
|
|
# TkCONSTANT :: 'ruby-constant'
|
|
|
|
# TkKW :: 'ruby-keyword'
|
|
|
|
# TkIVAR :: 'ruby-ivar'
|
|
|
|
# TkOp :: 'ruby-operator'
|
|
|
|
# TkId :: 'ruby-identifier'
|
|
|
|
# TkNode :: 'ruby-node'
|
|
|
|
# TkCOMMENT :: 'ruby-comment'
|
|
|
|
# TkREGEXP :: 'ruby-regexp'
|
|
|
|
# TkSTRING :: 'ruby-string'
|
|
|
|
# TkVal :: 'ruby-value'
|
|
|
|
#
|
|
|
|
# Other token types are not wrapped in spans.
|
|
|
|
|
|
|
|
def self.to_html token_stream
|
|
|
|
token_stream.map do |t|
|
|
|
|
next unless t
|
|
|
|
|
|
|
|
style = case t
|
|
|
|
when RDoc::RubyToken::TkCONSTANT then 'ruby-constant'
|
|
|
|
when RDoc::RubyToken::TkKW then 'ruby-keyword'
|
|
|
|
when RDoc::RubyToken::TkIVAR then 'ruby-ivar'
|
|
|
|
when RDoc::RubyToken::TkOp then 'ruby-operator'
|
|
|
|
when RDoc::RubyToken::TkId then 'ruby-identifier'
|
|
|
|
when RDoc::RubyToken::TkNode then 'ruby-node'
|
|
|
|
when RDoc::RubyToken::TkCOMMENT then 'ruby-comment'
|
|
|
|
when RDoc::RubyToken::TkREGEXP then 'ruby-regexp'
|
|
|
|
when RDoc::RubyToken::TkSTRING then 'ruby-string'
|
|
|
|
when RDoc::RubyToken::TkVal then 'ruby-value'
|
|
|
|
end
|
|
|
|
|
|
|
|
text = CGI.escapeHTML t.text
|
|
|
|
|
|
|
|
if style then
|
|
|
|
"<span class=\"#{style}\">#{text}</span>"
|
|
|
|
else
|
|
|
|
text
|
|
|
|
end
|
|
|
|
end.join
|
|
|
|
end
|
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Adds +tokens+ to the collected tokens
|
|
|
|
|
|
|
|
def add_tokens(*tokens)
|
|
|
|
tokens.flatten.each { |token| @token_stream << token }
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
alias add_token add_tokens
|
|
|
|
|
|
|
|
##
|
|
|
|
# Starts collecting tokens
|
|
|
|
|
|
|
|
def collect_tokens
|
2003-12-01 02:12:49 -05:00
|
|
|
@token_stream = []
|
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
alias start_collecting_tokens collect_tokens
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Remove the last token from the collected tokens
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
def pop_token
|
|
|
|
@token_stream.pop
|
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Current token stream
|
|
|
|
|
|
|
|
def token_stream
|
|
|
|
@token_stream
|
|
|
|
end
|
|
|
|
|
|
|
|
##
|
|
|
|
# Returns a string representation of the token stream
|
|
|
|
|
|
|
|
def tokens_to_s
|
2014-09-10 21:03:22 -04:00
|
|
|
token_stream.compact.map { |token| token.text }.join ''
|
2010-04-01 03:45:16 -04:00
|
|
|
end
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|