2017-11-27 05:45:24 -05:00
|
|
|
# frozen_string_literal: true
|
2008-01-07 02:11:15 -05:00
|
|
|
##
|
|
|
|
# A TokenStream is a list of tokens, gathered during the parse of some entity
|
|
|
|
# (say a method). Entities populate these streams by being registered with the
|
|
|
|
# lexer. Any class can collect tokens by including TokenStream. From the
|
|
|
|
# outside, you use such an object by calling the start_collecting_tokens
|
|
|
|
# method, followed by calls to add_token and pop_token.
|
|
|
|
|
|
|
|
module RDoc::TokenStream
|
|
|
|
|
2012-11-26 23:28:14 -05:00
|
|
|
##
|
|
|
|
# Converts +token_stream+ to HTML wrapping various tokens with
|
2017-10-09 22:01:00 -04:00
|
|
|
# <tt><span></tt> elements. Some tokens types are wrapped in spans
|
|
|
|
# with the given class names. Other token types are not wrapped in spans.
|
2012-11-26 23:28:14 -05:00
|
|
|
|
|
|
|
def self.to_html token_stream
|
2017-11-27 05:45:24 -05:00
|
|
|
starting_title = false
|
|
|
|
|
2012-11-26 23:28:14 -05:00
|
|
|
token_stream.map do |t|
|
|
|
|
next unless t
|
|
|
|
|
2017-09-11 23:42:54 -04:00
|
|
|
style = case t[:kind]
|
|
|
|
when :on_const then 'ruby-constant'
|
|
|
|
when :on_kw then 'ruby-keyword'
|
|
|
|
when :on_ivar then 'ruby-ivar'
|
|
|
|
when :on_cvar then 'ruby-identifier'
|
|
|
|
when :on_gvar then 'ruby-identifier'
|
2017-11-27 05:45:24 -05:00
|
|
|
when '=' != t[:text] && :on_op
|
|
|
|
then 'ruby-operator'
|
2017-09-11 23:42:54 -04:00
|
|
|
when :on_tlambda then 'ruby-operator'
|
|
|
|
when :on_ident then 'ruby-identifier'
|
|
|
|
when :on_label then 'ruby-value'
|
|
|
|
when :on_backref, :on_dstring
|
|
|
|
then 'ruby-node'
|
|
|
|
when :on_comment then 'ruby-comment'
|
|
|
|
when :on_embdoc then 'ruby-comment'
|
|
|
|
when :on_regexp then 'ruby-regexp'
|
|
|
|
when :on_tstring then 'ruby-string'
|
|
|
|
when :on_int, :on_float,
|
|
|
|
:on_rational, :on_imaginary,
|
|
|
|
:on_heredoc,
|
|
|
|
:on_symbol, :on_CHAR then 'ruby-value'
|
|
|
|
when :on_heredoc_beg, :on_heredoc_end
|
|
|
|
then 'ruby-identifier'
|
2012-11-26 23:28:14 -05:00
|
|
|
end
|
|
|
|
|
2017-08-29 07:52:50 -04:00
|
|
|
comment_with_nl = false
|
2017-09-11 23:42:54 -04:00
|
|
|
if :on_comment == t[:kind] or :on_embdoc == t[:kind] or :on_heredoc_end == t[:kind]
|
|
|
|
comment_with_nl = true if "\n" == t[:text][-1]
|
|
|
|
text = t[:text].rstrip
|
2017-08-29 07:52:50 -04:00
|
|
|
else
|
2017-09-11 23:42:54 -04:00
|
|
|
text = t[:text]
|
2017-08-29 07:52:50 -04:00
|
|
|
end
|
2017-11-27 05:45:24 -05:00
|
|
|
|
|
|
|
if :on_ident == t[:kind] && starting_title
|
|
|
|
starting_title = false
|
|
|
|
style = 'ruby-identifier ruby-title'
|
|
|
|
end
|
|
|
|
|
|
|
|
if :on_kw == t[:kind] and 'def' == t[:text]
|
|
|
|
starting_title = true
|
|
|
|
end
|
|
|
|
|
2017-08-29 07:52:50 -04:00
|
|
|
text = CGI.escapeHTML text
|
2012-11-26 23:28:14 -05:00
|
|
|
|
|
|
|
if style then
|
2017-08-29 07:52:50 -04:00
|
|
|
"<span class=\"#{style}\">#{text}</span>#{"\n" if comment_with_nl}"
|
2012-11-26 23:28:14 -05:00
|
|
|
else
|
|
|
|
text
|
|
|
|
end
|
|
|
|
end.join
|
|
|
|
end
|
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Adds +tokens+ to the collected tokens
|
|
|
|
|
2019-08-06 13:32:03 -04:00
|
|
|
def add_tokens(tokens)
|
|
|
|
@token_stream.concat(tokens)
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
2019-08-06 13:32:03 -04:00
|
|
|
##
|
|
|
|
# Adds one +token+ to the collected tokens
|
|
|
|
|
|
|
|
def add_token(token)
|
|
|
|
@token_stream.push(token)
|
|
|
|
end
|
2010-04-01 03:45:16 -04:00
|
|
|
|
|
|
|
##
|
|
|
|
# Starts collecting tokens
|
|
|
|
|
|
|
|
def collect_tokens
|
2003-12-01 02:12:49 -05:00
|
|
|
@token_stream = []
|
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
alias start_collecting_tokens collect_tokens
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Remove the last token from the collected tokens
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
def pop_token
|
|
|
|
@token_stream.pop
|
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|
2010-04-01 03:45:16 -04:00
|
|
|
##
|
|
|
|
# Current token stream
|
|
|
|
|
|
|
|
def token_stream
|
|
|
|
@token_stream
|
|
|
|
end
|
|
|
|
|
|
|
|
##
|
|
|
|
# Returns a string representation of the token stream
|
|
|
|
|
|
|
|
def tokens_to_s
|
2018-03-26 01:56:26 -04:00
|
|
|
token_stream.compact.map { |token| token[:text] }.join ''
|
2010-04-01 03:45:16 -04:00
|
|
|
end
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
2008-01-07 02:11:15 -05:00
|
|
|
|