2003-12-01 02:12:49 -05:00
|
|
|
#!/usr/local/bin/ruby
|
|
|
|
|
|
|
|
# Parse a Ruby source file, building a set of objects
|
|
|
|
# representing the modules, classes, methods,
|
|
|
|
# requires, and includes we find (these classes
|
|
|
|
# are defined in code_objects.rb).
|
|
|
|
|
|
|
|
# This file contains stuff stolen outright from:
|
|
|
|
#
|
|
|
|
# rtags.rb -
|
|
|
|
# ruby-lex.rb - ruby lexcal analizer
|
|
|
|
# ruby-token.rb - ruby tokens
|
|
|
|
# by Keiju ISHITSUKA (Nippon Rational Inc.)
|
|
|
|
#
|
|
|
|
|
|
|
|
require "e2mmap"
|
|
|
|
require "irb/slex"
|
|
|
|
|
|
|
|
require "rdoc/code_objects"
|
|
|
|
require "rdoc/tokenstream"
|
|
|
|
|
|
|
|
require "rdoc/markup/simple_markup/preprocess"
|
|
|
|
|
|
|
|
require "rdoc/parsers/parserfactory"
|
|
|
|
|
|
|
|
$TOKEN_DEBUG = $DEBUG
|
|
|
|
|
|
|
|
# Definitions of all tokens involved in the lexical analysis
|
|
|
|
|
|
|
|
module RubyToken
|
|
|
|
EXPR_BEG = :EXPR_BEG
|
|
|
|
EXPR_MID = :EXPR_MID
|
|
|
|
EXPR_END = :EXPR_END
|
|
|
|
EXPR_ARG = :EXPR_ARG
|
|
|
|
EXPR_FNAME = :EXPR_FNAME
|
|
|
|
EXPR_DOT = :EXPR_DOT
|
|
|
|
EXPR_CLASS = :EXPR_CLASS
|
|
|
|
|
|
|
|
class Token
|
|
|
|
NO_TEXT = "??".freeze
|
|
|
|
attr :text
|
|
|
|
|
|
|
|
def initialize(line_no, char_no)
|
|
|
|
@line_no = line_no
|
|
|
|
@char_no = char_no
|
|
|
|
@text = NO_TEXT
|
|
|
|
end
|
|
|
|
|
|
|
|
# Because we're used in contexts that expect to return a token,
|
|
|
|
# we set the text string and then return ourselves
|
|
|
|
def set_text(text)
|
|
|
|
@text = text
|
|
|
|
self
|
|
|
|
end
|
|
|
|
|
|
|
|
attr_reader :line_no, :char_no, :text
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkNode < Token
|
|
|
|
attr :node
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkId < Token
|
|
|
|
def initialize(line_no, char_no, name)
|
|
|
|
super(line_no, char_no)
|
|
|
|
@name = name
|
|
|
|
end
|
|
|
|
attr :name
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkKW < TkId
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkVal < Token
|
|
|
|
def initialize(line_no, char_no, value = nil)
|
|
|
|
super(line_no, char_no)
|
|
|
|
set_text(value)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkOp < Token
|
|
|
|
def name
|
|
|
|
self.class.op_name
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkOPASGN < TkOp
|
|
|
|
def initialize(line_no, char_no, op)
|
|
|
|
super(line_no, char_no)
|
|
|
|
op = TkReading2Token[op] unless op.kind_of?(Symbol)
|
|
|
|
@op = op
|
|
|
|
end
|
|
|
|
attr :op
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkUnknownChar < Token
|
|
|
|
def initialize(line_no, char_no, id)
|
|
|
|
super(line_no, char_no)
|
|
|
|
@name = char_no.chr
|
|
|
|
end
|
|
|
|
attr :name
|
|
|
|
end
|
|
|
|
|
|
|
|
class TkError < Token
|
|
|
|
end
|
|
|
|
|
|
|
|
def set_token_position(line, char)
|
|
|
|
@prev_line_no = line
|
|
|
|
@prev_char_no = char
|
|
|
|
end
|
|
|
|
|
|
|
|
def Token(token, value = nil)
|
|
|
|
tk = nil
|
|
|
|
case token
|
|
|
|
when String, Symbol
|
|
|
|
source = token.kind_of?(String) ? TkReading2Token : TkSymbol2Token
|
|
|
|
if (tk = source[token]).nil?
|
|
|
|
IRB.fail TkReading2TokenNoKey, token
|
|
|
|
end
|
|
|
|
tk = Token(tk[0], value)
|
|
|
|
else
|
|
|
|
tk = if (token.ancestors & [TkId, TkVal, TkOPASGN, TkUnknownChar]).empty?
|
|
|
|
token.new(@prev_line_no, @prev_char_no)
|
|
|
|
else
|
|
|
|
token.new(@prev_line_no, @prev_char_no, value)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
tk
|
|
|
|
end
|
|
|
|
|
|
|
|
TokenDefinitions = [
|
|
|
|
[:TkCLASS, TkKW, "class", EXPR_CLASS],
|
|
|
|
[:TkMODULE, TkKW, "module", EXPR_BEG],
|
|
|
|
[:TkDEF, TkKW, "def", EXPR_FNAME],
|
|
|
|
[:TkUNDEF, TkKW, "undef", EXPR_FNAME],
|
|
|
|
[:TkBEGIN, TkKW, "begin", EXPR_BEG],
|
|
|
|
[:TkRESCUE, TkKW, "rescue", EXPR_MID],
|
|
|
|
[:TkENSURE, TkKW, "ensure", EXPR_BEG],
|
|
|
|
[:TkEND, TkKW, "end", EXPR_END],
|
|
|
|
[:TkIF, TkKW, "if", EXPR_BEG, :TkIF_MOD],
|
|
|
|
[:TkUNLESS, TkKW, "unless", EXPR_BEG, :TkUNLESS_MOD],
|
|
|
|
[:TkTHEN, TkKW, "then", EXPR_BEG],
|
|
|
|
[:TkELSIF, TkKW, "elsif", EXPR_BEG],
|
|
|
|
[:TkELSE, TkKW, "else", EXPR_BEG],
|
|
|
|
[:TkCASE, TkKW, "case", EXPR_BEG],
|
|
|
|
[:TkWHEN, TkKW, "when", EXPR_BEG],
|
|
|
|
[:TkWHILE, TkKW, "while", EXPR_BEG, :TkWHILE_MOD],
|
|
|
|
[:TkUNTIL, TkKW, "until", EXPR_BEG, :TkUNTIL_MOD],
|
|
|
|
[:TkFOR, TkKW, "for", EXPR_BEG],
|
|
|
|
[:TkBREAK, TkKW, "break", EXPR_END],
|
|
|
|
[:TkNEXT, TkKW, "next", EXPR_END],
|
|
|
|
[:TkREDO, TkKW, "redo", EXPR_END],
|
|
|
|
[:TkRETRY, TkKW, "retry", EXPR_END],
|
|
|
|
[:TkIN, TkKW, "in", EXPR_BEG],
|
|
|
|
[:TkDO, TkKW, "do", EXPR_BEG],
|
|
|
|
[:TkRETURN, TkKW, "return", EXPR_MID],
|
|
|
|
[:TkYIELD, TkKW, "yield", EXPR_END],
|
|
|
|
[:TkSUPER, TkKW, "super", EXPR_END],
|
|
|
|
[:TkSELF, TkKW, "self", EXPR_END],
|
|
|
|
[:TkNIL, TkKW, "nil", EXPR_END],
|
|
|
|
[:TkTRUE, TkKW, "true", EXPR_END],
|
|
|
|
[:TkFALSE, TkKW, "false", EXPR_END],
|
|
|
|
[:TkAND, TkKW, "and", EXPR_BEG],
|
|
|
|
[:TkOR, TkKW, "or", EXPR_BEG],
|
|
|
|
[:TkNOT, TkKW, "not", EXPR_BEG],
|
|
|
|
[:TkIF_MOD, TkKW],
|
|
|
|
[:TkUNLESS_MOD, TkKW],
|
|
|
|
[:TkWHILE_MOD, TkKW],
|
|
|
|
[:TkUNTIL_MOD, TkKW],
|
|
|
|
[:TkALIAS, TkKW, "alias", EXPR_FNAME],
|
|
|
|
[:TkDEFINED, TkKW, "defined?", EXPR_END],
|
|
|
|
[:TklBEGIN, TkKW, "BEGIN", EXPR_END],
|
|
|
|
[:TklEND, TkKW, "END", EXPR_END],
|
|
|
|
[:Tk__LINE__, TkKW, "__LINE__", EXPR_END],
|
|
|
|
[:Tk__FILE__, TkKW, "__FILE__", EXPR_END],
|
|
|
|
|
|
|
|
[:TkIDENTIFIER, TkId],
|
|
|
|
[:TkFID, TkId],
|
|
|
|
[:TkGVAR, TkId],
|
|
|
|
[:TkIVAR, TkId],
|
|
|
|
[:TkCONSTANT, TkId],
|
|
|
|
|
|
|
|
[:TkINTEGER, TkVal],
|
|
|
|
[:TkFLOAT, TkVal],
|
|
|
|
[:TkSTRING, TkVal],
|
|
|
|
[:TkXSTRING, TkVal],
|
|
|
|
[:TkREGEXP, TkVal],
|
|
|
|
[:TkCOMMENT, TkVal],
|
|
|
|
|
|
|
|
[:TkDSTRING, TkNode],
|
|
|
|
[:TkDXSTRING, TkNode],
|
|
|
|
[:TkDREGEXP, TkNode],
|
|
|
|
[:TkNTH_REF, TkId],
|
|
|
|
[:TkBACK_REF, TkId],
|
|
|
|
|
|
|
|
[:TkUPLUS, TkOp, "+@"],
|
|
|
|
[:TkUMINUS, TkOp, "-@"],
|
|
|
|
[:TkPOW, TkOp, "**"],
|
|
|
|
[:TkCMP, TkOp, "<=>"],
|
|
|
|
[:TkEQ, TkOp, "=="],
|
|
|
|
[:TkEQQ, TkOp, "==="],
|
|
|
|
[:TkNEQ, TkOp, "!="],
|
|
|
|
[:TkGEQ, TkOp, ">="],
|
|
|
|
[:TkLEQ, TkOp, "<="],
|
|
|
|
[:TkANDOP, TkOp, "&&"],
|
|
|
|
[:TkOROP, TkOp, "||"],
|
|
|
|
[:TkMATCH, TkOp, "=~"],
|
|
|
|
[:TkNMATCH, TkOp, "!~"],
|
|
|
|
[:TkDOT2, TkOp, ".."],
|
|
|
|
[:TkDOT3, TkOp, "..."],
|
|
|
|
[:TkAREF, TkOp, "[]"],
|
|
|
|
[:TkASET, TkOp, "[]="],
|
|
|
|
[:TkLSHFT, TkOp, "<<"],
|
|
|
|
[:TkRSHFT, TkOp, ">>"],
|
|
|
|
[:TkCOLON2, TkOp],
|
|
|
|
[:TkCOLON3, TkOp],
|
|
|
|
# [:OPASGN, TkOp], # +=, -= etc. #
|
|
|
|
[:TkASSOC, TkOp, "=>"],
|
|
|
|
[:TkQUESTION, TkOp, "?"], #?
|
|
|
|
[:TkCOLON, TkOp, ":"], #:
|
|
|
|
|
|
|
|
[:TkfLPAREN], # func( #
|
|
|
|
[:TkfLBRACK], # func[ #
|
|
|
|
[:TkfLBRACE], # func{ #
|
|
|
|
[:TkSTAR], # *arg
|
|
|
|
[:TkAMPER], # &arg #
|
|
|
|
[:TkSYMBOL, TkId], # :SYMBOL
|
|
|
|
[:TkSYMBEG, TkId],
|
|
|
|
[:TkGT, TkOp, ">"],
|
|
|
|
[:TkLT, TkOp, "<"],
|
|
|
|
[:TkPLUS, TkOp, "+"],
|
|
|
|
[:TkMINUS, TkOp, "-"],
|
|
|
|
[:TkMULT, TkOp, "*"],
|
|
|
|
[:TkDIV, TkOp, "/"],
|
|
|
|
[:TkMOD, TkOp, "%"],
|
|
|
|
[:TkBITOR, TkOp, "|"],
|
|
|
|
[:TkBITXOR, TkOp, "^"],
|
|
|
|
[:TkBITAND, TkOp, "&"],
|
|
|
|
[:TkBITNOT, TkOp, "~"],
|
|
|
|
[:TkNOTOP, TkOp, "!"],
|
|
|
|
|
|
|
|
[:TkBACKQUOTE, TkOp, "`"],
|
|
|
|
|
|
|
|
[:TkASSIGN, Token, "="],
|
|
|
|
[:TkDOT, Token, "."],
|
|
|
|
[:TkLPAREN, Token, "("], #(exp)
|
|
|
|
[:TkLBRACK, Token, "["], #[arry]
|
|
|
|
[:TkLBRACE, Token, "{"], #{hash}
|
|
|
|
[:TkRPAREN, Token, ")"],
|
|
|
|
[:TkRBRACK, Token, "]"],
|
|
|
|
[:TkRBRACE, Token, "}"],
|
|
|
|
[:TkCOMMA, Token, ","],
|
|
|
|
[:TkSEMICOLON, Token, ";"],
|
|
|
|
|
|
|
|
[:TkRD_COMMENT],
|
|
|
|
[:TkSPACE],
|
|
|
|
[:TkNL],
|
|
|
|
[:TkEND_OF_SCRIPT],
|
|
|
|
|
|
|
|
[:TkBACKSLASH, TkUnknownChar, "\\"],
|
|
|
|
[:TkAT, TkUnknownChar, "@"],
|
|
|
|
[:TkDOLLAR, TkUnknownChar, "\$"], #"
|
|
|
|
]
|
|
|
|
|
|
|
|
# {reading => token_class}
|
|
|
|
# {reading => [token_class, *opt]}
|
|
|
|
TkReading2Token = {}
|
|
|
|
TkSymbol2Token = {}
|
|
|
|
|
|
|
|
def RubyToken.def_token(token_n, super_token = Token, reading = nil, *opts)
|
|
|
|
token_n = token_n.id2name unless token_n.kind_of?(String)
|
|
|
|
if RubyToken.const_defined?(token_n)
|
|
|
|
IRB.fail AlreadyDefinedToken, token_n
|
|
|
|
end
|
|
|
|
|
|
|
|
token_c = Class.new super_token
|
|
|
|
RubyToken.const_set token_n, token_c
|
|
|
|
# token_c.inspect
|
|
|
|
|
|
|
|
if reading
|
|
|
|
if TkReading2Token[reading]
|
|
|
|
IRB.fail TkReading2TokenDuplicateError, token_n, reading
|
|
|
|
end
|
|
|
|
if opts.empty?
|
|
|
|
TkReading2Token[reading] = [token_c]
|
|
|
|
else
|
|
|
|
TkReading2Token[reading] = [token_c].concat(opts)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
TkSymbol2Token[token_n.intern] = token_c
|
|
|
|
|
|
|
|
if token_c <= TkOp
|
|
|
|
token_c.class_eval %{
|
|
|
|
def self.op_name; "#{reading}"; end
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
for defs in TokenDefinitions
|
|
|
|
def_token(*defs)
|
|
|
|
end
|
|
|
|
|
|
|
|
NEWLINE_TOKEN = TkNL.new(0,0)
|
|
|
|
NEWLINE_TOKEN.set_text("\n")
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Lexical analyzer for Ruby source
|
|
|
|
|
|
|
|
class RubyLex
|
|
|
|
|
|
|
|
######################################################################
|
|
|
|
#
|
|
|
|
# Read an input stream character by character. We allow for unlimited
|
|
|
|
# ungetting of characters just read.
|
|
|
|
#
|
|
|
|
# We simplify the implementation greatly by reading the entire input
|
|
|
|
# into a buffer initially, and then simply traversing it using
|
|
|
|
# pointers.
|
|
|
|
#
|
|
|
|
# We also have to allow for the <i>here document diversion</i>. This
|
|
|
|
# little gem comes about when the lexer encounters a here
|
|
|
|
# document. At this point we effectively need to split the input
|
|
|
|
# stream into two parts: one to read the body of the here document,
|
|
|
|
# the other to read the rest of the input line where the here
|
|
|
|
# document was initially encountered. For example, we might have
|
|
|
|
#
|
|
|
|
# do_something(<<-A, <<-B)
|
|
|
|
# stuff
|
|
|
|
# for
|
|
|
|
# A
|
|
|
|
# stuff
|
|
|
|
# for
|
|
|
|
# B
|
|
|
|
#
|
|
|
|
# When the lexer encounters the <<A, it reads until the end of the
|
|
|
|
# line, and keeps it around for later. It then reads the body of the
|
|
|
|
# here document. Once complete, it needs to read the rest of the
|
|
|
|
# original line, but then skip the here document body.
|
|
|
|
#
|
|
|
|
|
|
|
|
class BufferedReader
|
|
|
|
|
|
|
|
attr_reader :line_num
|
|
|
|
|
|
|
|
def initialize(content)
|
|
|
|
if /\t/ =~ content
|
|
|
|
tab_width = Options.instance.tab_width
|
|
|
|
content = content.split(/\n/).map do |line|
|
|
|
|
1 while line.gsub!(/\t+/) { ' ' * (tab_width*$&.length - $`.length % tab_width)} && $~ #`
|
|
|
|
line
|
|
|
|
end .join("\n")
|
|
|
|
end
|
|
|
|
@content = content
|
|
|
|
@content << "\n" unless @content[-1,1] == "\n"
|
|
|
|
@size = @content.size
|
|
|
|
@offset = 0
|
|
|
|
@hwm = 0
|
|
|
|
@line_num = 1
|
|
|
|
@read_back_offset = 0
|
|
|
|
@last_newline = 0
|
|
|
|
@newline_pending = false
|
|
|
|
end
|
|
|
|
|
|
|
|
def column
|
|
|
|
@offset - @last_newline
|
|
|
|
end
|
|
|
|
|
|
|
|
def getc
|
|
|
|
return nil if @offset >= @size
|
|
|
|
ch = @content[@offset, 1]
|
|
|
|
|
|
|
|
@offset += 1
|
|
|
|
@hwm = @offset if @hwm < @offset
|
|
|
|
|
|
|
|
if @newline_pending
|
|
|
|
@line_num += 1
|
|
|
|
@last_newline = @offset - 1
|
|
|
|
@newline_pending = false
|
|
|
|
end
|
|
|
|
|
|
|
|
if ch == "\n"
|
|
|
|
@newline_pending = true
|
|
|
|
end
|
|
|
|
ch
|
|
|
|
end
|
|
|
|
|
|
|
|
def getc_already_read
|
|
|
|
getc
|
|
|
|
end
|
|
|
|
|
|
|
|
def ungetc(ch)
|
|
|
|
raise "unget past beginning of file" if @offset <= 0
|
|
|
|
@offset -= 1
|
|
|
|
if @content[@offset] == ?\n
|
|
|
|
@newline_pending = false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_read
|
|
|
|
res = @content[@read_back_offset...@offset]
|
|
|
|
@read_back_offset = @offset
|
|
|
|
res
|
|
|
|
end
|
|
|
|
|
|
|
|
def peek(at)
|
|
|
|
pos = @offset + at
|
|
|
|
if pos >= @size
|
|
|
|
nil
|
|
|
|
else
|
|
|
|
@content[pos, 1]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def peek_equal(str)
|
|
|
|
@content[@offset, str.length] == str
|
|
|
|
end
|
|
|
|
|
|
|
|
def divert_read_from(reserve)
|
|
|
|
@content[@offset, 0] = reserve
|
|
|
|
@size = @content.size
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# end of nested class BufferedReader
|
|
|
|
|
|
|
|
extend Exception2MessageMapper
|
|
|
|
def_exception(:AlreadyDefinedToken, "Already defined token(%s)")
|
|
|
|
def_exception(:TkReading2TokenNoKey, "key nothing(key='%s')")
|
|
|
|
def_exception(:TkSymbol2TokenNoKey, "key nothing(key='%s')")
|
|
|
|
def_exception(:TkReading2TokenDuplicateError,
|
|
|
|
"key duplicate(token_n='%s', key='%s')")
|
|
|
|
def_exception(:SyntaxError, "%s")
|
|
|
|
|
|
|
|
include RubyToken
|
2005-11-01 08:04:35 -05:00
|
|
|
include IRB
|
2003-12-01 02:12:49 -05:00
|
|
|
|
|
|
|
attr_reader :continue
|
|
|
|
attr_reader :lex_state
|
|
|
|
|
|
|
|
def RubyLex.debug?
|
|
|
|
false
|
|
|
|
end
|
|
|
|
|
|
|
|
def initialize(content)
|
|
|
|
lex_init
|
|
|
|
|
|
|
|
@reader = BufferedReader.new(content)
|
|
|
|
|
|
|
|
@exp_line_no = @line_no = 1
|
|
|
|
@base_char_no = 0
|
|
|
|
@indent = 0
|
|
|
|
|
|
|
|
@ltype = nil
|
|
|
|
@quoted = nil
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
@space_seen = false
|
|
|
|
|
|
|
|
@continue = false
|
|
|
|
@line = ""
|
|
|
|
|
|
|
|
@skip_space = false
|
|
|
|
@read_auto_clean_up = false
|
|
|
|
@exception_on_syntax_error = true
|
|
|
|
end
|
|
|
|
|
2006-07-20 13:36:36 -04:00
|
|
|
attr_accessor :skip_space
|
|
|
|
attr_accessor :read_auto_clean_up
|
|
|
|
attr_accessor :exception_on_syntax_error
|
|
|
|
attr_reader :indent
|
2003-12-01 02:12:49 -05:00
|
|
|
|
|
|
|
# io functions
|
|
|
|
def line_no
|
|
|
|
@reader.line_num
|
|
|
|
end
|
|
|
|
|
|
|
|
def char_no
|
|
|
|
@reader.column
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_read
|
|
|
|
@reader.get_read
|
|
|
|
end
|
|
|
|
|
|
|
|
def getc
|
|
|
|
@reader.getc
|
|
|
|
end
|
|
|
|
|
|
|
|
def getc_of_rests
|
|
|
|
@reader.getc_already_read
|
|
|
|
end
|
|
|
|
|
|
|
|
def gets
|
|
|
|
c = getc or return
|
|
|
|
l = ""
|
|
|
|
begin
|
|
|
|
l.concat c unless c == "\r"
|
|
|
|
break if c == "\n"
|
|
|
|
end while c = getc
|
|
|
|
l
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
def ungetc(c = nil)
|
|
|
|
@reader.ungetc(c)
|
|
|
|
end
|
|
|
|
|
|
|
|
def peek_equal?(str)
|
|
|
|
@reader.peek_equal(str)
|
|
|
|
end
|
|
|
|
|
|
|
|
def peek(i = 0)
|
|
|
|
@reader.peek(i)
|
|
|
|
end
|
|
|
|
|
|
|
|
def lex
|
|
|
|
until (((tk = token).kind_of?(TkNL) || tk.kind_of?(TkEND_OF_SCRIPT)) &&
|
|
|
|
!@continue or
|
|
|
|
tk.nil?)
|
|
|
|
end
|
|
|
|
line = get_read
|
|
|
|
|
|
|
|
if line == "" and tk.kind_of?(TkEND_OF_SCRIPT) || tk.nil?
|
|
|
|
nil
|
|
|
|
else
|
|
|
|
line
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def token
|
|
|
|
set_token_position(line_no, char_no)
|
|
|
|
begin
|
|
|
|
begin
|
|
|
|
tk = @OP.match(self)
|
|
|
|
@space_seen = tk.kind_of?(TkSPACE)
|
|
|
|
rescue SyntaxError
|
|
|
|
abort if @exception_on_syntax_error
|
|
|
|
tk = TkError.new(line_no, char_no)
|
|
|
|
end
|
|
|
|
end while @skip_space and tk.kind_of?(TkSPACE)
|
|
|
|
if @read_auto_clean_up
|
|
|
|
get_read
|
|
|
|
end
|
|
|
|
# throw :eof unless tk
|
|
|
|
p tk if $DEBUG
|
|
|
|
tk
|
|
|
|
end
|
|
|
|
|
|
|
|
ENINDENT_CLAUSE = [
|
|
|
|
"case", "class", "def", "do", "for", "if",
|
|
|
|
"module", "unless", "until", "while", "begin" #, "when"
|
|
|
|
]
|
|
|
|
DEINDENT_CLAUSE = ["end" #, "when"
|
|
|
|
]
|
|
|
|
|
|
|
|
PERCENT_LTYPE = {
|
|
|
|
"q" => "\'",
|
|
|
|
"Q" => "\"",
|
|
|
|
"x" => "\`",
|
2006-02-28 10:21:24 -05:00
|
|
|
"r" => "/",
|
2003-12-01 02:12:49 -05:00
|
|
|
"w" => "]"
|
|
|
|
}
|
|
|
|
|
|
|
|
PERCENT_PAREN = {
|
|
|
|
"{" => "}",
|
|
|
|
"[" => "]",
|
|
|
|
"<" => ">",
|
|
|
|
"(" => ")"
|
|
|
|
}
|
|
|
|
|
|
|
|
Ltype2Token = {
|
|
|
|
"\'" => TkSTRING,
|
|
|
|
"\"" => TkSTRING,
|
|
|
|
"\`" => TkXSTRING,
|
2006-02-28 10:21:24 -05:00
|
|
|
"/" => TkREGEXP,
|
2003-12-01 02:12:49 -05:00
|
|
|
"]" => TkDSTRING
|
|
|
|
}
|
|
|
|
Ltype2Token.default = TkSTRING
|
|
|
|
|
|
|
|
DLtype2Token = {
|
|
|
|
"\"" => TkDSTRING,
|
|
|
|
"\`" => TkDXSTRING,
|
2006-02-28 10:21:24 -05:00
|
|
|
"/" => TkDREGEXP,
|
2003-12-01 02:12:49 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
def lex_init()
|
2005-04-19 22:04:05 -04:00
|
|
|
@OP = IRB::SLex.new
|
2003-12-01 02:12:49 -05:00
|
|
|
@OP.def_rules("\0", "\004", "\032") do |chars, io|
|
|
|
|
Token(TkEND_OF_SCRIPT).set_text(chars)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules(" ", "\t", "\f", "\r", "\13") do |chars, io|
|
|
|
|
@space_seen = TRUE
|
|
|
|
while (ch = getc) =~ /[ \t\f\r\13]/
|
|
|
|
chars << ch
|
|
|
|
end
|
|
|
|
ungetc
|
|
|
|
Token(TkSPACE).set_text(chars)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("#") do
|
|
|
|
|op, io|
|
|
|
|
identify_comment
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("=begin", proc{@prev_char_no == 0 && peek(0) =~ /\s/}) do
|
|
|
|
|op, io|
|
|
|
|
str = op
|
|
|
|
@ltype = "="
|
|
|
|
|
2003-12-24 10:44:41 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
begin
|
2003-12-24 10:44:41 -05:00
|
|
|
line = ""
|
|
|
|
begin
|
2003-12-01 02:12:49 -05:00
|
|
|
ch = getc
|
2003-12-24 10:44:41 -05:00
|
|
|
line << ch
|
2003-12-01 02:12:49 -05:00
|
|
|
end until ch == "\n"
|
2003-12-24 10:44:41 -05:00
|
|
|
str << line
|
|
|
|
end until line =~ /^=end/
|
2003-12-01 02:12:49 -05:00
|
|
|
|
2003-12-24 10:44:41 -05:00
|
|
|
ungetc
|
2003-12-01 02:12:49 -05:00
|
|
|
|
|
|
|
@ltype = nil
|
2004-04-01 20:20:58 -05:00
|
|
|
|
|
|
|
if str =~ /\A=begin\s+rdoc/i
|
|
|
|
str.sub!(/\A=begin.*\n/, '')
|
|
|
|
str.sub!(/^=end.*/m, '')
|
|
|
|
Token(TkCOMMENT).set_text(str)
|
|
|
|
else
|
|
|
|
Token(TkRD_COMMENT)#.set_text(str)
|
|
|
|
end
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("\n") do
|
|
|
|
print "\\n\n" if RubyLex.debug?
|
|
|
|
case @lex_state
|
|
|
|
when EXPR_BEG, EXPR_FNAME, EXPR_DOT
|
|
|
|
@continue = TRUE
|
|
|
|
else
|
|
|
|
@continue = FALSE
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
end
|
|
|
|
Token(TkNL).set_text("\n")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("*", "**",
|
|
|
|
"!", "!=", "!~",
|
|
|
|
"=", "==", "===",
|
|
|
|
"=~", "<=>",
|
|
|
|
"<", "<=",
|
|
|
|
">", ">=", ">>") do
|
|
|
|
|op, io|
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token(op).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("<<") do
|
|
|
|
|op, io|
|
|
|
|
tk = nil
|
|
|
|
if @lex_state != EXPR_END && @lex_state != EXPR_CLASS &&
|
|
|
|
(@lex_state != EXPR_ARG || @space_seen)
|
|
|
|
c = peek(0)
|
|
|
|
if /[-\w_\"\'\`]/ =~ c
|
|
|
|
tk = identify_here_document
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if !tk
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
tk = Token(op).set_text(op)
|
|
|
|
end
|
|
|
|
tk
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("'", '"') do
|
|
|
|
|op, io|
|
|
|
|
identify_string(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("`") do
|
|
|
|
|op, io|
|
|
|
|
if @lex_state == EXPR_FNAME
|
|
|
|
Token(op).set_text(op)
|
|
|
|
else
|
|
|
|
identify_string(op)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules('?') do
|
|
|
|
|op, io|
|
|
|
|
if @lex_state == EXPR_END
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token(TkQUESTION).set_text(op)
|
|
|
|
else
|
|
|
|
ch = getc
|
|
|
|
if @lex_state == EXPR_ARG && ch !~ /\s/
|
|
|
|
ungetc
|
|
|
|
@lex_state = EXPR_BEG;
|
|
|
|
Token(TkQUESTION).set_text(op)
|
|
|
|
else
|
|
|
|
str = op
|
|
|
|
str << ch
|
|
|
|
if (ch == '\\') #'
|
|
|
|
str << read_escape
|
|
|
|
end
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
Token(TkINTEGER).set_text(str)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("&", "&&", "|", "||") do
|
|
|
|
|op, io|
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token(op).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("+=", "-=", "*=", "**=",
|
|
|
|
"&=", "|=", "^=", "<<=", ">>=", "||=", "&&=") do
|
|
|
|
|op, io|
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
op =~ /^(.*)=$/
|
|
|
|
Token(TkOPASGN, $1).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("+@", proc{@lex_state == EXPR_FNAME}) do |op, io|
|
|
|
|
Token(TkUPLUS).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("-@", proc{@lex_state == EXPR_FNAME}) do |op, io|
|
|
|
|
Token(TkUMINUS).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("+", "-") do
|
|
|
|
|op, io|
|
|
|
|
catch(:RET) do
|
|
|
|
if @lex_state == EXPR_ARG
|
|
|
|
if @space_seen and peek(0) =~ /[0-9]/
|
|
|
|
throw :RET, identify_number(op)
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
end
|
|
|
|
elsif @lex_state != EXPR_END and peek(0) =~ /[0-9]/
|
|
|
|
throw :RET, identify_number(op)
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
end
|
|
|
|
Token(op).set_text(op)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule(".") do
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
if peek(0) =~ /[0-9]/
|
|
|
|
ungetc
|
|
|
|
identify_number("")
|
|
|
|
else
|
|
|
|
# for obj.if
|
|
|
|
@lex_state = EXPR_DOT
|
|
|
|
Token(TkDOT).set_text(".")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("..", "...") do
|
|
|
|
|op, io|
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token(op).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
lex_int2
|
|
|
|
end
|
|
|
|
|
|
|
|
def lex_int2
|
|
|
|
@OP.def_rules("]", "}", ")") do
|
|
|
|
|op, io|
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
@indent -= 1
|
|
|
|
Token(op).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule(":") do
|
|
|
|
if @lex_state == EXPR_END || peek(0) =~ /\s/
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
tk = Token(TkCOLON)
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_FNAME;
|
|
|
|
tk = Token(TkSYMBEG)
|
|
|
|
end
|
|
|
|
tk.set_text(":")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("::") do
|
|
|
|
# p @lex_state.id2name, @space_seen
|
|
|
|
if @lex_state == EXPR_BEG or @lex_state == EXPR_ARG && @space_seen
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
tk = Token(TkCOLON3)
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_DOT
|
|
|
|
tk = Token(TkCOLON2)
|
|
|
|
end
|
|
|
|
tk.set_text("::")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("/") do
|
|
|
|
|op, io|
|
|
|
|
if @lex_state == EXPR_BEG || @lex_state == EXPR_MID
|
|
|
|
identify_string(op)
|
|
|
|
elsif peek(0) == '='
|
|
|
|
getc
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token(TkOPASGN, :/).set_text("/=") #")
|
|
|
|
elsif @lex_state == EXPR_ARG and @space_seen and peek(0) !~ /\s/
|
|
|
|
identify_string(op)
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token("/").set_text(op)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rules("^") do
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token("^").set_text("^")
|
|
|
|
end
|
|
|
|
|
|
|
|
# @OP.def_rules("^=") do
|
|
|
|
# @lex_state = EXPR_BEG
|
|
|
|
# Token(TkOPASGN, :^)
|
|
|
|
# end
|
|
|
|
|
|
|
|
@OP.def_rules(",", ";") do
|
|
|
|
|op, io|
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token(op).set_text(op)
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("~") do
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token("~").set_text("~")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("~@", proc{@lex_state = EXPR_FNAME}) do
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token("~").set_text("~@")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("(") do
|
|
|
|
@indent += 1
|
|
|
|
if @lex_state == EXPR_BEG || @lex_state == EXPR_MID
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
tk = Token(TkfLPAREN)
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
tk = Token(TkLPAREN)
|
|
|
|
end
|
|
|
|
tk.set_text("(")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("[]", proc{@lex_state == EXPR_FNAME}) do
|
|
|
|
Token("[]").set_text("[]")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("[]=", proc{@lex_state == EXPR_FNAME}) do
|
|
|
|
Token("[]=").set_text("[]=")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("[") do
|
|
|
|
@indent += 1
|
|
|
|
if @lex_state == EXPR_FNAME
|
|
|
|
t = Token(TkfLBRACK)
|
|
|
|
else
|
|
|
|
if @lex_state == EXPR_BEG || @lex_state == EXPR_MID
|
|
|
|
t = Token(TkLBRACK)
|
|
|
|
elsif @lex_state == EXPR_ARG && @space_seen
|
|
|
|
t = Token(TkLBRACK)
|
|
|
|
else
|
|
|
|
t = Token(TkfLBRACK)
|
|
|
|
end
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
end
|
|
|
|
t.set_text("[")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("{") do
|
|
|
|
@indent += 1
|
|
|
|
if @lex_state != EXPR_END && @lex_state != EXPR_ARG
|
|
|
|
t = Token(TkLBRACE)
|
|
|
|
else
|
|
|
|
t = Token(TkfLBRACE)
|
|
|
|
end
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
t.set_text("{")
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule('\\') do #'
|
|
|
|
if getc == "\n"
|
|
|
|
@space_seen = true
|
|
|
|
@continue = true
|
|
|
|
Token(TkSPACE).set_text("\\\n")
|
|
|
|
else
|
|
|
|
ungetc
|
|
|
|
Token("\\").set_text("\\") #"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule('%') do
|
|
|
|
|op, io|
|
|
|
|
if @lex_state == EXPR_BEG || @lex_state == EXPR_MID
|
|
|
|
identify_quotation('%')
|
|
|
|
elsif peek(0) == '='
|
|
|
|
getc
|
|
|
|
Token(TkOPASGN, "%").set_text("%=")
|
|
|
|
elsif @lex_state == EXPR_ARG and @space_seen and peek(0) !~ /\s/
|
|
|
|
identify_quotation('%')
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_BEG
|
|
|
|
Token("%").set_text("%")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule('$') do #'
|
|
|
|
identify_gvar
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule('@') do
|
2004-02-23 16:41:14 -05:00
|
|
|
if peek(0) =~ /[@\w_]/
|
2003-12-01 02:12:49 -05:00
|
|
|
ungetc
|
|
|
|
identify_identifier
|
|
|
|
else
|
|
|
|
Token("@").set_text("@")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# @OP.def_rule("def", proc{|op, io| /\s/ =~ io.peek(0)}) do
|
|
|
|
# |op, io|
|
|
|
|
# @indent += 1
|
|
|
|
# @lex_state = EXPR_FNAME
|
|
|
|
# # @lex_state = EXPR_END
|
|
|
|
# # until @rests[0] == "\n" or @rests[0] == ";"
|
|
|
|
# # rests.shift
|
|
|
|
# # end
|
|
|
|
# end
|
|
|
|
|
|
|
|
@OP.def_rule("__END__", proc{@prev_char_no == 0 && peek(0) =~ /[\r\n]/}) do
|
|
|
|
throw :eof
|
|
|
|
end
|
|
|
|
|
|
|
|
@OP.def_rule("") do
|
|
|
|
|op, io|
|
|
|
|
printf "MATCH: start %s: %s\n", op, io.inspect if RubyLex.debug?
|
|
|
|
if peek(0) =~ /[0-9]/
|
|
|
|
t = identify_number("")
|
|
|
|
elsif peek(0) =~ /[\w_]/
|
|
|
|
t = identify_identifier
|
|
|
|
end
|
|
|
|
printf "MATCH: end %s: %s\n", op, io.inspect if RubyLex.debug?
|
|
|
|
t
|
|
|
|
end
|
|
|
|
|
|
|
|
p @OP if RubyLex.debug?
|
|
|
|
end
|
|
|
|
|
|
|
|
def identify_gvar
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
str = "$"
|
|
|
|
|
|
|
|
tk = case ch = getc
|
|
|
|
when /[~_*$?!@\/\\;,=:<>".]/ #"
|
|
|
|
str << ch
|
|
|
|
Token(TkGVAR, str)
|
|
|
|
|
|
|
|
when "-"
|
|
|
|
str << "-" << getc
|
|
|
|
Token(TkGVAR, str)
|
|
|
|
|
|
|
|
when "&", "`", "'", "+"
|
|
|
|
str << ch
|
|
|
|
Token(TkBACK_REF, str)
|
|
|
|
|
|
|
|
when /[1-9]/
|
|
|
|
str << ch
|
|
|
|
while (ch = getc) =~ /[0-9]/
|
|
|
|
str << ch
|
|
|
|
end
|
|
|
|
ungetc
|
|
|
|
Token(TkNTH_REF)
|
|
|
|
when /\w/
|
|
|
|
ungetc
|
|
|
|
ungetc
|
|
|
|
return identify_identifier
|
|
|
|
else
|
|
|
|
ungetc
|
|
|
|
Token("$")
|
|
|
|
end
|
|
|
|
tk.set_text(str)
|
|
|
|
end
|
|
|
|
|
|
|
|
def identify_identifier
|
|
|
|
token = ""
|
|
|
|
token.concat getc if peek(0) =~ /[$@]/
|
2004-02-23 16:41:14 -05:00
|
|
|
token.concat getc if peek(0) == "@"
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
while (ch = getc) =~ /\w|_/
|
|
|
|
print ":", ch, ":" if RubyLex.debug?
|
|
|
|
token.concat ch
|
|
|
|
end
|
|
|
|
ungetc
|
|
|
|
|
|
|
|
if ch == "!" or ch == "?"
|
|
|
|
token.concat getc
|
|
|
|
end
|
|
|
|
# fix token
|
|
|
|
|
2004-02-23 16:41:14 -05:00
|
|
|
# $stderr.puts "identifier - #{token}, state = #@lex_state"
|
2003-12-01 02:12:49 -05:00
|
|
|
|
|
|
|
case token
|
|
|
|
when /^\$/
|
|
|
|
return Token(TkGVAR, token).set_text(token)
|
|
|
|
when /^\@/
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
return Token(TkIVAR, token).set_text(token)
|
|
|
|
end
|
|
|
|
|
|
|
|
if @lex_state != EXPR_DOT
|
|
|
|
print token, "\n" if RubyLex.debug?
|
|
|
|
|
|
|
|
token_c, *trans = TkReading2Token[token]
|
|
|
|
if token_c
|
|
|
|
# reserved word?
|
|
|
|
|
|
|
|
if (@lex_state != EXPR_BEG &&
|
|
|
|
@lex_state != EXPR_FNAME &&
|
|
|
|
trans[1])
|
|
|
|
# modifiers
|
|
|
|
token_c = TkSymbol2Token[trans[1]]
|
|
|
|
@lex_state = trans[0]
|
|
|
|
else
|
|
|
|
if @lex_state != EXPR_FNAME
|
|
|
|
if ENINDENT_CLAUSE.include?(token)
|
|
|
|
@indent += 1
|
|
|
|
elsif DEINDENT_CLAUSE.include?(token)
|
|
|
|
@indent -= 1
|
|
|
|
end
|
|
|
|
@lex_state = trans[0]
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return Token(token_c, token).set_text(token)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
if @lex_state == EXPR_FNAME
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
if peek(0) == '='
|
|
|
|
token.concat getc
|
|
|
|
end
|
|
|
|
elsif @lex_state == EXPR_BEG || @lex_state == EXPR_DOT
|
|
|
|
@lex_state = EXPR_ARG
|
|
|
|
else
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
end
|
|
|
|
|
|
|
|
if token[0, 1] =~ /[A-Z]/
|
|
|
|
return Token(TkCONSTANT, token).set_text(token)
|
|
|
|
elsif token[token.size - 1, 1] =~ /[!?]/
|
|
|
|
return Token(TkFID, token).set_text(token)
|
|
|
|
else
|
|
|
|
return Token(TkIDENTIFIER, token).set_text(token)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def identify_here_document
|
|
|
|
ch = getc
|
|
|
|
if ch == "-"
|
|
|
|
ch = getc
|
|
|
|
indent = true
|
|
|
|
end
|
|
|
|
if /['"`]/ =~ ch # '
|
|
|
|
lt = ch
|
|
|
|
quoted = ""
|
|
|
|
while (c = getc) && c != lt
|
|
|
|
quoted.concat c
|
|
|
|
end
|
|
|
|
else
|
|
|
|
lt = '"'
|
|
|
|
quoted = ch.dup
|
|
|
|
while (c = getc) && c =~ /\w/
|
|
|
|
quoted.concat c
|
|
|
|
end
|
|
|
|
ungetc
|
|
|
|
end
|
|
|
|
|
|
|
|
ltback, @ltype = @ltype, lt
|
|
|
|
reserve = ""
|
|
|
|
|
|
|
|
while ch = getc
|
|
|
|
reserve << ch
|
|
|
|
if ch == "\\" #"
|
|
|
|
ch = getc
|
|
|
|
reserve << ch
|
|
|
|
elsif ch == "\n"
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
str = ""
|
|
|
|
while (l = gets)
|
|
|
|
l.chomp!
|
|
|
|
l.strip! if indent
|
|
|
|
break if l == quoted
|
|
|
|
str << l.chomp << "\n"
|
|
|
|
end
|
|
|
|
|
|
|
|
@reader.divert_read_from(reserve)
|
|
|
|
|
|
|
|
@ltype = ltback
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
Token(Ltype2Token[lt], str).set_text(str.dump)
|
|
|
|
end
|
|
|
|
|
|
|
|
def identify_quotation(initial_char)
|
|
|
|
ch = getc
|
|
|
|
if lt = PERCENT_LTYPE[ch]
|
2004-06-22 12:47:31 -04:00
|
|
|
initial_char += ch
|
2003-12-01 02:12:49 -05:00
|
|
|
ch = getc
|
|
|
|
elsif ch =~ /\W/
|
|
|
|
lt = "\""
|
|
|
|
else
|
|
|
|
RubyLex.fail SyntaxError, "unknown type of %string ('#{ch}')"
|
|
|
|
end
|
|
|
|
# if ch !~ /\W/
|
|
|
|
# ungetc
|
|
|
|
# next
|
|
|
|
# end
|
|
|
|
#@ltype = lt
|
|
|
|
@quoted = ch unless @quoted = PERCENT_PAREN[ch]
|
|
|
|
identify_string(lt, @quoted, ch, initial_char)
|
|
|
|
end
|
|
|
|
|
|
|
|
def identify_number(start)
|
|
|
|
str = start.dup
|
|
|
|
|
|
|
|
if start == "+" or start == "-" or start == ""
|
|
|
|
start = getc
|
|
|
|
str << start
|
|
|
|
end
|
|
|
|
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
|
|
|
|
if start == "0"
|
|
|
|
if peek(0) == "x"
|
|
|
|
ch = getc
|
|
|
|
str << ch
|
|
|
|
match = /[0-9a-f_]/
|
|
|
|
else
|
|
|
|
match = /[0-7_]/
|
|
|
|
end
|
|
|
|
while ch = getc
|
|
|
|
if ch !~ match
|
|
|
|
ungetc
|
|
|
|
break
|
|
|
|
else
|
|
|
|
str << ch
|
|
|
|
end
|
|
|
|
end
|
|
|
|
return Token(TkINTEGER).set_text(str)
|
|
|
|
end
|
|
|
|
|
|
|
|
type = TkINTEGER
|
|
|
|
allow_point = TRUE
|
|
|
|
allow_e = TRUE
|
|
|
|
while ch = getc
|
|
|
|
case ch
|
|
|
|
when /[0-9_]/
|
|
|
|
str << ch
|
|
|
|
|
|
|
|
when allow_point && "."
|
|
|
|
type = TkFLOAT
|
|
|
|
if peek(0) !~ /[0-9]/
|
|
|
|
ungetc
|
|
|
|
break
|
|
|
|
end
|
|
|
|
str << ch
|
|
|
|
allow_point = false
|
|
|
|
|
|
|
|
when allow_e && "e", allow_e && "E"
|
|
|
|
str << ch
|
|
|
|
type = TkFLOAT
|
|
|
|
if peek(0) =~ /[+-]/
|
|
|
|
str << getc
|
|
|
|
end
|
|
|
|
allow_e = false
|
|
|
|
allow_point = false
|
|
|
|
else
|
|
|
|
ungetc
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
Token(type).set_text(str)
|
|
|
|
end
|
|
|
|
|
|
|
|
def identify_string(ltype, quoted = ltype, opener=nil, initial_char = nil)
|
|
|
|
@ltype = ltype
|
|
|
|
@quoted = quoted
|
|
|
|
subtype = nil
|
|
|
|
|
|
|
|
str = ""
|
|
|
|
str << initial_char if initial_char
|
|
|
|
str << (opener||quoted)
|
|
|
|
|
|
|
|
nest = 0
|
|
|
|
begin
|
|
|
|
while ch = getc
|
|
|
|
str << ch
|
|
|
|
if @quoted == ch
|
|
|
|
if nest == 0
|
|
|
|
break
|
|
|
|
else
|
|
|
|
nest -= 1
|
|
|
|
end
|
|
|
|
elsif opener == ch
|
|
|
|
nest += 1
|
|
|
|
elsif @ltype != "'" && @ltype != "]" and ch == "#"
|
|
|
|
ch = getc
|
|
|
|
if ch == "{"
|
|
|
|
subtype = true
|
|
|
|
str << ch << skip_inner_expression
|
|
|
|
else
|
|
|
|
ungetc(ch)
|
|
|
|
end
|
|
|
|
elsif ch == '\\' #'
|
|
|
|
str << read_escape
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if @ltype == "/"
|
|
|
|
if peek(0) =~ /i|o|n|e|s/
|
|
|
|
str << getc
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if subtype
|
|
|
|
Token(DLtype2Token[ltype], str)
|
|
|
|
else
|
|
|
|
Token(Ltype2Token[ltype], str)
|
|
|
|
end.set_text(str)
|
|
|
|
ensure
|
|
|
|
@ltype = nil
|
|
|
|
@quoted = nil
|
|
|
|
@lex_state = EXPR_END
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def skip_inner_expression
|
|
|
|
res = ""
|
|
|
|
nest = 0
|
|
|
|
while (ch = getc)
|
|
|
|
res << ch
|
|
|
|
if ch == '}'
|
|
|
|
break if nest.zero?
|
|
|
|
nest -= 1
|
|
|
|
elsif ch == '{'
|
|
|
|
nest += 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
res
|
|
|
|
end
|
|
|
|
|
|
|
|
def identify_comment
|
|
|
|
@ltype = "#"
|
|
|
|
comment = "#"
|
|
|
|
while ch = getc
|
|
|
|
if ch == "\\"
|
|
|
|
ch = getc
|
|
|
|
if ch == "\n"
|
|
|
|
ch = " "
|
|
|
|
else
|
|
|
|
comment << "\\"
|
|
|
|
end
|
|
|
|
else
|
|
|
|
if ch == "\n"
|
|
|
|
@ltype = nil
|
|
|
|
ungetc
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
comment << ch
|
|
|
|
end
|
|
|
|
return Token(TkCOMMENT).set_text(comment)
|
|
|
|
end
|
|
|
|
|
|
|
|
def read_escape
|
|
|
|
res = ""
|
|
|
|
case ch = getc
|
|
|
|
when /[0-7]/
|
|
|
|
ungetc ch
|
|
|
|
3.times do
|
|
|
|
case ch = getc
|
|
|
|
when /[0-7]/
|
|
|
|
when nil
|
|
|
|
break
|
|
|
|
else
|
|
|
|
ungetc
|
|
|
|
break
|
|
|
|
end
|
|
|
|
res << ch
|
|
|
|
end
|
|
|
|
|
|
|
|
when "x"
|
|
|
|
res << ch
|
|
|
|
2.times do
|
|
|
|
case ch = getc
|
|
|
|
when /[0-9a-fA-F]/
|
|
|
|
when nil
|
|
|
|
break
|
|
|
|
else
|
|
|
|
ungetc
|
|
|
|
break
|
|
|
|
end
|
|
|
|
res << ch
|
|
|
|
end
|
|
|
|
|
|
|
|
when "M"
|
|
|
|
res << ch
|
|
|
|
if (ch = getc) != '-'
|
|
|
|
ungetc
|
|
|
|
else
|
|
|
|
res << ch
|
|
|
|
if (ch = getc) == "\\" #"
|
|
|
|
res << ch
|
|
|
|
res << read_escape
|
|
|
|
else
|
|
|
|
res << ch
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2006-02-28 10:21:24 -05:00
|
|
|
when "C", "c" #, "^"
|
2003-12-01 02:12:49 -05:00
|
|
|
res << ch
|
|
|
|
if ch == "C" and (ch = getc) != "-"
|
|
|
|
ungetc
|
|
|
|
else
|
|
|
|
res << ch
|
|
|
|
if (ch = getc) == "\\" #"
|
|
|
|
res << ch
|
|
|
|
res << read_escape
|
|
|
|
else
|
|
|
|
res << ch
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
res << ch
|
|
|
|
end
|
|
|
|
res
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Extract code elements from a source file, returning a TopLevel
|
|
|
|
# object containing the constituent file elements.
|
|
|
|
#
|
|
|
|
# This file is based on rtags
|
|
|
|
|
|
|
|
module RDoc
|
|
|
|
|
|
|
|
GENERAL_MODIFIERS = [ 'nodoc' ].freeze
|
|
|
|
|
|
|
|
CLASS_MODIFIERS = GENERAL_MODIFIERS
|
|
|
|
|
|
|
|
ATTR_MODIFIERS = GENERAL_MODIFIERS
|
|
|
|
|
|
|
|
CONSTANT_MODIFIERS = GENERAL_MODIFIERS
|
|
|
|
|
|
|
|
METHOD_MODIFIERS = GENERAL_MODIFIERS +
|
|
|
|
[ 'arg', 'args', 'yield', 'yields', 'notnew', 'not-new', 'not_new', 'doc' ]
|
|
|
|
|
|
|
|
|
|
|
|
class RubyParser
|
|
|
|
include RubyToken
|
|
|
|
include TokenStream
|
|
|
|
|
|
|
|
extend ParserFactory
|
|
|
|
|
|
|
|
parse_files_matching(/\.rbw?$/)
|
|
|
|
|
|
|
|
|
2004-01-02 01:01:12 -05:00
|
|
|
def initialize(top_level, file_name, content, options, stats)
|
2003-12-01 02:12:49 -05:00
|
|
|
@options = options
|
2004-01-02 01:01:12 -05:00
|
|
|
@stats = stats
|
2003-12-01 02:12:49 -05:00
|
|
|
@size = 0
|
|
|
|
@token_listeners = nil
|
|
|
|
@input_file_name = file_name
|
|
|
|
@scanner = RubyLex.new(content)
|
|
|
|
@scanner.exception_on_syntax_error = false
|
|
|
|
@top_level = top_level
|
|
|
|
@progress = $stderr unless options.quiet
|
|
|
|
end
|
|
|
|
|
|
|
|
def scan
|
|
|
|
@tokens = []
|
|
|
|
@unget_read = []
|
|
|
|
@read = []
|
|
|
|
catch(:eof) do
|
2004-04-16 23:23:10 -04:00
|
|
|
catch(:enddoc) do
|
|
|
|
begin
|
|
|
|
parse_toplevel_statements(@top_level)
|
|
|
|
rescue Exception => e
|
|
|
|
$stderr.puts "\n\n"
|
|
|
|
$stderr.puts "RDoc failure in #@input_file_name at or around " +
|
|
|
|
"line #{@scanner.line_no} column #{@scanner.char_no}"
|
|
|
|
$stderr.puts
|
|
|
|
$stderr.puts "Before reporting this, could you check that the file"
|
|
|
|
$stderr.puts "you're documenting compiles cleanly--RDoc is not a"
|
|
|
|
$stderr.puts "full Ruby parser, and gets confused easily if fed"
|
|
|
|
$stderr.puts "invalid programs."
|
|
|
|
$stderr.puts
|
|
|
|
$stderr.puts "The internal error was:\n\n"
|
|
|
|
|
|
|
|
e.set_backtrace(e.backtrace[0,4])
|
|
|
|
raise
|
|
|
|
end
|
2003-12-04 10:10:38 -05:00
|
|
|
end
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
@top_level
|
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
2006-06-14 18:09:28 -04:00
|
|
|
def make_message(msg)
|
2003-12-01 02:12:49 -05:00
|
|
|
prefix = "\n" + @input_file_name + ":"
|
|
|
|
if @scanner
|
|
|
|
prefix << "#{@scanner.line_no}:#{@scanner.char_no}: "
|
|
|
|
end
|
2006-06-14 18:09:28 -04:00
|
|
|
return prefix + msg
|
|
|
|
end
|
|
|
|
|
|
|
|
def warn(msg)
|
|
|
|
return if @options.quiet
|
|
|
|
msg = make_message msg
|
|
|
|
$stderr.puts msg
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def error(msg)
|
2006-06-14 18:09:28 -04:00
|
|
|
msg = make_message msg
|
|
|
|
$stderr.puts msg
|
2003-12-01 02:12:49 -05:00
|
|
|
exit(1)
|
|
|
|
end
|
|
|
|
|
|
|
|
def progress(char)
|
|
|
|
unless @options.quiet
|
|
|
|
@progress.print(char)
|
|
|
|
@progress.flush
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_token_listener(obj)
|
|
|
|
@token_listeners ||= []
|
|
|
|
@token_listeners << obj
|
|
|
|
end
|
|
|
|
|
|
|
|
def remove_token_listener(obj)
|
|
|
|
@token_listeners.delete(obj)
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_tk
|
|
|
|
tk = nil
|
|
|
|
if @tokens.empty?
|
|
|
|
tk = @scanner.token
|
|
|
|
@read.push @scanner.get_read
|
|
|
|
puts "get_tk1 => #{tk.inspect}" if $TOKEN_DEBUG
|
|
|
|
else
|
|
|
|
@read.push @unget_read.shift
|
|
|
|
tk = @tokens.shift
|
|
|
|
puts "get_tk2 => #{tk.inspect}" if $TOKEN_DEBUG
|
|
|
|
end
|
|
|
|
|
|
|
|
if tk.kind_of?(TkSYMBEG)
|
|
|
|
set_token_position(tk.line_no, tk.char_no)
|
|
|
|
tk1 = get_tk
|
2006-02-02 03:16:30 -05:00
|
|
|
if tk1.kind_of?(TkId) || tk1.kind_of?(TkOp) || tk1.kind_of?(TkSTRING)
|
|
|
|
if tk1.respond_to?(:name)
|
|
|
|
tk = Token(TkSYMBOL).set_text(":" + tk1.name)
|
|
|
|
else
|
|
|
|
tk = Token(TkSYMBOL).set_text(":" + tk1.text)
|
|
|
|
end
|
2003-12-01 02:12:49 -05:00
|
|
|
# remove the identifier we just read (we're about to
|
|
|
|
# replace it with a symbol)
|
|
|
|
@token_listeners.each do |obj|
|
|
|
|
obj.pop_token
|
|
|
|
end if @token_listeners
|
|
|
|
else
|
2006-08-27 19:03:20 -04:00
|
|
|
warn("':' not followed by identifier or operator")
|
2003-12-01 02:12:49 -05:00
|
|
|
tk = tk1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# inform any listeners of our shiny new token
|
|
|
|
@token_listeners.each do |obj|
|
|
|
|
obj.add_token(tk)
|
|
|
|
end if @token_listeners
|
|
|
|
|
|
|
|
tk
|
|
|
|
end
|
|
|
|
|
|
|
|
def peek_tk
|
|
|
|
unget_tk(tk = get_tk)
|
|
|
|
tk
|
|
|
|
end
|
|
|
|
|
|
|
|
def unget_tk(tk)
|
|
|
|
@tokens.unshift tk
|
|
|
|
@unget_read.unshift @read.pop
|
|
|
|
|
|
|
|
# Remove this token from any listeners
|
|
|
|
@token_listeners.each do |obj|
|
|
|
|
obj.pop_token
|
|
|
|
end if @token_listeners
|
|
|
|
end
|
|
|
|
|
|
|
|
def skip_tkspace(skip_nl = true)
|
|
|
|
tokens = []
|
|
|
|
while ((tk = get_tk).kind_of?(TkSPACE) ||
|
|
|
|
(skip_nl && tk.kind_of?(TkNL)))
|
|
|
|
tokens.push tk
|
|
|
|
end
|
|
|
|
unget_tk(tk)
|
|
|
|
tokens
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_tkread
|
|
|
|
read = @read.join("")
|
|
|
|
@read = []
|
|
|
|
read
|
|
|
|
end
|
|
|
|
|
2004-02-23 16:10:42 -05:00
|
|
|
def peek_read
|
|
|
|
@read.join('')
|
|
|
|
end
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
NORMAL = "::"
|
|
|
|
SINGLE = "<<"
|
|
|
|
|
|
|
|
# Look for the first comment in a file that isn't
|
|
|
|
# a shebang line.
|
|
|
|
|
|
|
|
def collect_first_comment
|
|
|
|
skip_tkspace
|
|
|
|
res = ''
|
|
|
|
first_line = true
|
|
|
|
|
|
|
|
tk = get_tk
|
|
|
|
while tk.kind_of?(TkCOMMENT)
|
|
|
|
if first_line && tk.text[0,2] == "#!"
|
|
|
|
skip_tkspace
|
|
|
|
tk = get_tk
|
|
|
|
else
|
|
|
|
res << tk.text << "\n"
|
|
|
|
tk = get_tk
|
|
|
|
if tk.kind_of? TkNL
|
|
|
|
skip_tkspace(false)
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
end
|
|
|
|
first_line = false
|
|
|
|
end
|
|
|
|
unget_tk(tk)
|
|
|
|
res
|
|
|
|
end
|
|
|
|
|
2004-04-01 20:20:58 -05:00
|
|
|
def parse_toplevel_statements(container)
|
|
|
|
comment = collect_first_comment
|
|
|
|
look_for_directives_in(container, comment)
|
|
|
|
container.comment = comment unless comment.empty?
|
|
|
|
parse_statements(container, NORMAL, nil, comment)
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_statements(container, single=NORMAL, current_method=nil, comment='')
|
2003-12-01 02:12:49 -05:00
|
|
|
nest = 1
|
|
|
|
save_visibility = container.visibility
|
2004-04-01 20:20:58 -05:00
|
|
|
|
|
|
|
# if container.kind_of?(TopLevel)
|
|
|
|
# else
|
|
|
|
# comment = ''
|
|
|
|
# end
|
2003-12-01 02:12:49 -05:00
|
|
|
|
|
|
|
non_comment_seen = true
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
while tk = get_tk
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
keep_comment = false
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
non_comment_seen = true unless tk.kind_of?(TkCOMMENT)
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
case tk
|
|
|
|
|
|
|
|
when TkNL
|
|
|
|
skip_tkspace(true) # Skip blanks and newlines
|
|
|
|
tk = get_tk
|
|
|
|
if tk.kind_of?(TkCOMMENT)
|
|
|
|
if non_comment_seen
|
|
|
|
comment = ''
|
|
|
|
non_comment_seen = false
|
|
|
|
end
|
|
|
|
while tk.kind_of?(TkCOMMENT)
|
|
|
|
comment << tk.text << "\n"
|
|
|
|
tk = get_tk # this is the newline
|
|
|
|
skip_tkspace(false) # leading spaces
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
unless comment.empty?
|
|
|
|
look_for_directives_in(container, comment)
|
|
|
|
if container.done_documenting
|
|
|
|
container.ongoing_visibility = save_visibility
|
2004-04-10 10:51:58 -04:00
|
|
|
# return
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
keep_comment = true
|
|
|
|
else
|
|
|
|
non_comment_seen = true
|
|
|
|
end
|
|
|
|
unget_tk(tk)
|
|
|
|
keep_comment = true
|
|
|
|
|
|
|
|
|
|
|
|
when TkCLASS
|
|
|
|
if container.document_children
|
|
|
|
parse_class(container, single, tk, comment)
|
|
|
|
else
|
|
|
|
nest += 1
|
|
|
|
end
|
|
|
|
|
|
|
|
when TkMODULE
|
|
|
|
if container.document_children
|
|
|
|
parse_module(container, single, tk, comment)
|
|
|
|
else
|
|
|
|
nest += 1
|
|
|
|
end
|
|
|
|
|
|
|
|
when TkDEF
|
|
|
|
if container.document_self
|
|
|
|
parse_method(container, single, tk, comment)
|
|
|
|
else
|
|
|
|
nest += 1
|
|
|
|
end
|
|
|
|
|
|
|
|
when TkCONSTANT
|
|
|
|
if container.document_self
|
|
|
|
parse_constant(container, single, tk, comment)
|
|
|
|
end
|
|
|
|
|
|
|
|
when TkALIAS
|
|
|
|
if container.document_self
|
|
|
|
parse_alias(container, single, tk, comment)
|
|
|
|
end
|
|
|
|
|
|
|
|
when TkYIELD
|
|
|
|
if current_method.nil?
|
|
|
|
warn("Warning: yield outside of method") if container.document_self
|
|
|
|
else
|
|
|
|
parse_yield(container, single, tk, current_method)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Until and While can have a 'do', which shouldn't increas
|
|
|
|
# the nesting. We can't solve the general case, but we can
|
|
|
|
# handle most occurrences by ignoring a do at the end of a line
|
|
|
|
|
|
|
|
when TkUNTIL, TkWHILE
|
|
|
|
nest += 1
|
|
|
|
puts "FOUND #{tk.class} in #{container.name}, nest = #{nest}, " +
|
|
|
|
"line #{tk.line_no}" if $DEBUG
|
|
|
|
skip_optional_do_after_expression
|
|
|
|
|
2004-05-26 01:02:55 -04:00
|
|
|
# 'for' is trickier
|
|
|
|
when TkFOR
|
|
|
|
nest += 1
|
|
|
|
puts "FOUND #{tk.class} in #{container.name}, nest = #{nest}, " +
|
|
|
|
"line #{tk.line_no}" if $DEBUG
|
|
|
|
skip_for_variable
|
|
|
|
skip_optional_do_after_expression
|
|
|
|
|
|
|
|
when TkCASE, TkDO, TkIF, TkUNLESS, TkBEGIN
|
2003-12-01 02:12:49 -05:00
|
|
|
nest += 1
|
|
|
|
puts "Found #{tk.class} in #{container.name}, nest = #{nest}, " +
|
|
|
|
"line #{tk.line_no}" if $DEBUG
|
|
|
|
|
|
|
|
when TkIDENTIFIER
|
|
|
|
if nest == 1 and current_method.nil?
|
|
|
|
case tk.name
|
|
|
|
when "private", "protected", "public",
|
|
|
|
"private_class_method", "public_class_method"
|
|
|
|
parse_visibility(container, single, tk)
|
|
|
|
keep_comment = true
|
|
|
|
when "attr"
|
|
|
|
parse_attr(container, single, tk, comment)
|
|
|
|
when /^attr_(reader|writer|accessor)$/, @options.extra_accessors
|
|
|
|
parse_attr_accessor(container, single, tk, comment)
|
|
|
|
when "alias_method"
|
|
|
|
if container.document_self
|
|
|
|
parse_alias(container, single, tk, comment)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
case tk.name
|
|
|
|
when "require"
|
|
|
|
parse_require(container, comment)
|
|
|
|
when "include"
|
|
|
|
parse_include(container, comment)
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
when TkEND
|
|
|
|
nest -= 1
|
|
|
|
puts "Found 'end' in #{container.name}, nest = #{nest}, line #{tk.line_no}" if $DEBUG
|
|
|
|
puts "Method = #{current_method.name}" if $DEBUG and current_method
|
|
|
|
if nest == 0
|
|
|
|
read_documentation_modifiers(container, CLASS_MODIFIERS)
|
|
|
|
container.ongoing_visibility = save_visibility
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
comment = '' unless keep_comment
|
|
|
|
begin
|
|
|
|
get_tkread
|
|
|
|
skip_tkspace(false)
|
|
|
|
end while peek_tk == TkNL
|
|
|
|
|
|
|
|
end
|
|
|
|
end
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
def parse_class(container, single, tk, comment, &block)
|
|
|
|
progress("c")
|
|
|
|
|
2004-01-02 01:01:12 -05:00
|
|
|
@stats.num_classes += 1
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
container, name_t = get_class_or_module(container)
|
|
|
|
|
|
|
|
case name_t
|
|
|
|
when TkCONSTANT
|
|
|
|
name = name_t.name
|
|
|
|
superclass = "Object"
|
2004-11-08 08:25:26 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
if peek_tk.kind_of?(TkLT)
|
|
|
|
get_tk
|
|
|
|
skip_tkspace(true)
|
|
|
|
superclass = get_class_specification
|
|
|
|
superclass = "<unknown>" if superclass.empty?
|
|
|
|
end
|
|
|
|
|
|
|
|
if single == SINGLE
|
|
|
|
cls_type = SingleClass
|
|
|
|
else
|
|
|
|
cls_type = NormalClass
|
|
|
|
end
|
2004-09-11 03:11:46 -04:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
cls = container.add_class(cls_type, name, superclass)
|
|
|
|
read_documentation_modifiers(cls, CLASS_MODIFIERS)
|
|
|
|
cls.record_location(@top_level)
|
|
|
|
parse_statements(cls)
|
|
|
|
cls.comment = comment
|
|
|
|
|
|
|
|
when TkLSHFT
|
|
|
|
case name = get_class_specification
|
2004-02-29 09:16:33 -05:00
|
|
|
when "self", container.name
|
2003-12-01 02:12:49 -05:00
|
|
|
parse_statements(container, SINGLE, &block)
|
|
|
|
else
|
2004-02-29 09:16:33 -05:00
|
|
|
other = TopLevel.find_class_named(name)
|
|
|
|
unless other
|
2004-03-23 22:35:24 -05:00
|
|
|
# other = @top_level.add_class(NormalClass, name, nil)
|
|
|
|
# other.record_location(@top_level)
|
|
|
|
# other.comment = comment
|
|
|
|
other = NormalClass.new("Dummy", nil)
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
2004-02-29 09:16:33 -05:00
|
|
|
read_documentation_modifiers(other, CLASS_MODIFIERS)
|
|
|
|
parse_statements(other, SINGLE, &block)
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
else
|
|
|
|
warn("Expected class name or '<<'. Got #{name_t.class}: #{name_t.text.inspect}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_module(container, single, tk, comment)
|
|
|
|
progress("m")
|
2004-01-02 01:01:12 -05:00
|
|
|
@stats.num_modules += 1
|
2003-12-01 02:12:49 -05:00
|
|
|
container, name_t = get_class_or_module(container)
|
|
|
|
# skip_tkspace
|
|
|
|
name = name_t.name
|
|
|
|
mod = container.add_module(NormalModule, name)
|
|
|
|
mod.record_location(@top_level)
|
|
|
|
read_documentation_modifiers(mod, CLASS_MODIFIERS)
|
|
|
|
parse_statements(mod)
|
|
|
|
mod.comment = comment
|
|
|
|
end
|
|
|
|
|
|
|
|
# Look for the name of a class of module (optionally with a leading :: or
|
|
|
|
# with :: separated named) and return the ultimate name and container
|
|
|
|
|
|
|
|
def get_class_or_module(container)
|
|
|
|
skip_tkspace
|
|
|
|
name_t = get_tk
|
|
|
|
|
|
|
|
# class ::A -> A is in the top level
|
|
|
|
if name_t.kind_of?(TkCOLON2)
|
|
|
|
name_t = get_tk
|
|
|
|
container = @top_level
|
|
|
|
end
|
|
|
|
|
|
|
|
skip_tkspace(false)
|
|
|
|
|
|
|
|
while peek_tk.kind_of?(TkCOLON2)
|
|
|
|
prev_container = container
|
|
|
|
container = container.find_module_named(name_t.name)
|
|
|
|
if !container
|
|
|
|
# warn("Couldn't find module #{name_t.name}")
|
|
|
|
container = prev_container.add_module(NormalModule, name_t.name)
|
|
|
|
end
|
|
|
|
get_tk
|
|
|
|
name_t = get_tk
|
|
|
|
end
|
2004-11-08 08:25:26 -05:00
|
|
|
skip_tkspace(false)
|
2003-12-01 02:12:49 -05:00
|
|
|
return [container, name_t]
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_constant(container, single, tk, comment)
|
|
|
|
name = tk.name
|
|
|
|
skip_tkspace(false)
|
|
|
|
eq_tk = get_tk
|
|
|
|
|
|
|
|
unless eq_tk.kind_of?(TkASSIGN)
|
|
|
|
unget_tk(eq_tk)
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2004-02-23 16:10:42 -05:00
|
|
|
|
|
|
|
nest = 0
|
|
|
|
get_tkread
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
tk = get_tk
|
|
|
|
if tk.kind_of? TkGT
|
|
|
|
unget_tk(tk)
|
|
|
|
unget_tk(eq_tk)
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
loop do
|
|
|
|
puts("Param: #{tk}, #{@scanner.continue} " +
|
|
|
|
"#{@scanner.lex_state} #{nest}") if $DEBUG
|
2004-02-23 16:10:42 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
case tk
|
|
|
|
when TkSEMICOLON
|
|
|
|
break
|
|
|
|
when TkLPAREN, TkfLPAREN
|
|
|
|
nest += 1
|
|
|
|
when TkRPAREN
|
|
|
|
nest -= 1
|
|
|
|
when TkCOMMENT
|
|
|
|
if nest <= 0 && @scanner.lex_state == EXPR_END
|
|
|
|
unget_tk(tk)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
when TkNL
|
|
|
|
if (@scanner.lex_state == EXPR_END and nest <= 0) || !@scanner.continue
|
|
|
|
unget_tk(tk)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
tk = get_tk
|
|
|
|
end
|
2004-02-23 16:10:42 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
res = get_tkread.tr("\n", " ").strip
|
|
|
|
res = "" if res == ";"
|
|
|
|
con = Constant.new(name, res, comment)
|
|
|
|
read_documentation_modifiers(con, CONSTANT_MODIFIERS)
|
|
|
|
if con.document_self
|
|
|
|
container.add_constant(con)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_method(container, single, tk, comment)
|
|
|
|
progress(".")
|
2004-01-02 01:01:12 -05:00
|
|
|
@stats.num_methods += 1
|
2003-12-01 02:12:49 -05:00
|
|
|
line_no = tk.line_no
|
|
|
|
column = tk.char_no
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
start_collecting_tokens
|
|
|
|
add_token(tk)
|
|
|
|
add_token_listener(self)
|
|
|
|
|
|
|
|
@scanner.instance_eval{@lex_state = EXPR_FNAME}
|
|
|
|
skip_tkspace(false)
|
|
|
|
name_t = get_tk
|
|
|
|
back_tk = skip_tkspace
|
|
|
|
meth = nil
|
* array.c: replace rb_protect_inspect() and rb_inspecting_p() by
rb_exec_recursive() in eval.c.
* eval.c (rb_exec_recursive): new function.
* array.c (rb_ary_join): use rb_exec_recursive().
* array.c (rb_ary_inspect, rb_ary_hash): ditto.
* file.c (rb_file_join): ditto.
* hash.c (rb_hash_inspect, rb_hash_to_s, rb_hash_hash): ditto.
* io.c (rb_io_puts): ditto.
* object.c (rb_obj_inspect): ditto
* struct.c (rb_struct_inspect): ditto.
* lib/set.rb (SortedSet::setup): a hack to shut up warning.
[ruby-talk:132866]
* lib/time.rb (Time::strptime): add new function. inspired by
[ruby-talk:132815].
* lib/parsedate.rb (ParseDate::strptime): ditto.
* regparse.c: move st_*_strend() functions from st.c. fixed some
potential memory leaks.
* exception error messages updated. [ruby-core:04497]
* ext/socket/socket.c (Init_socket): add bunch of Socket
constants. Patch from Sam Roberts <sroberts@uniserve.com>.
[ruby-core:04409]
* array.c (rb_ary_s_create): no need for negative argc check.
[ruby-core:04463]
* array.c (rb_ary_unshift_m): ditto.
* lib/xmlrpc/parser.rb (XMLRPC::FaultException): make it subclass
of StandardError class, not Exception class. [ruby-core:04429]
* parse.y (fcall_gen): lvar(arg) will be evaluated as
lvar.call(arg) when lvar is a defined local variable. [new]
* object.c (rb_class_initialize): call inherited method before
calling initializing block.
* eval.c (rb_thread_start_1): initialize newly pushed frame.
* lib/open3.rb (Open3::popen3): $? should not be EXIT_FAILURE.
fixed: [ruby-core:04444]
* eval.c (is_defined): NODE_IASGN is an assignment.
* ext/readline/readline.c (Readline.readline): use rl_outstream
and rl_instream. [ruby-dev:25699]
* ext/etc/etc.c (Init_etc): sGroup needs HAVE_ST_GR_PASSWD check
[ruby-dev:25675]
* misc/ruby-mode.el: [ruby-core:04415]
* lib/rdoc/generators/html_generator.rb: [ruby-core:04412]
* lib/rdoc/generators/ri_generator.rb: ditto.
* struct.c (make_struct): fixed: [ruby-core:04402]
* ext/curses/curses.c (window_color_set): [ruby-core:04393]
* ext/socket/socket.c (Init_socket): SO_REUSEPORT added.
[ruby-talk:130092]
* object.c: [ruby-doc:818]
* parse.y (open_args): fix too verbose warnings for the space
before argument parentheses. [ruby-dev:25492]
* parse.y (parser_yylex): ditto.
* parse.y (parser_yylex): the first expression in the parentheses
should not be a command. [ruby-dev:25492]
* lib/irb/context.rb (IRB::Context::initialize): [ruby-core:04330]
* object.c (Init_Object): remove Object#type. [ruby-core:04335]
* st.c (st_foreach): report success/failure by return value.
[ruby-Bugs-1396]
* parse.y: forgot to initialize parser struct. [ruby-dev:25492]
* parse.y (parser_yylex): no tLABEL on EXPR_BEG.
[ruby-talk:127711]
* document updates - [ruby-core:04296], [ruby-core:04301],
[ruby-core:04302], [ruby-core:04307]
* dir.c (rb_push_glob): should work for NUL delimited patterns.
* dir.c (rb_glob2): should aware of offset in the pattern.
* string.c (rb_str_new4): should propagate taintedness.
* env.h: rename member names in struct FRAME; last_func -> callee,
orig_func -> this_func, last_class -> this_class.
* struct.c (rb_struct_set): use original method name, not callee
name, to retrieve member slot. [ruby-core:04268]
* time.c (time_strftime): protect from format modification from GC
finalizers.
* object.c (Init_Object): remove rb_obj_id_obsolete()
* eval.c (rb_mod_define_method): incomplete subclass check.
[ruby-dev:25464]
* gc.c (rb_data_object_alloc): klass may be NULL.
[ruby-list:40498]
* bignum.c (rb_big_rand): should return positive random number.
[ruby-dev:25401]
* bignum.c (rb_big_rand): do not use rb_big_modulo to generate
random bignums. [ruby-dev:25396]
* variable.c (rb_autoload): [ruby-dev:25373]
* eval.c (svalue_to_avalue): [ruby-dev:25366]
* string.c (rb_str_justify): [ruby-dev:25367]
* io.c (rb_f_select): [ruby-dev:25312]
* ext/socket/socket.c (sock_s_getservbyport): [ruby-talk:124072]
* struct.c (make_struct): [ruby-dev:25249]
* dir.c (dir_open_dir): new function. [ruby-dev:25242]
* io.c (rb_f_open): add type check for return value from to_open.
* lib/pstore.rb (PStore#transaction): Use the empty content when a
file is not found. [ruby-dev:24561]
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@8068 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2005-03-04 01:47:45 -05:00
|
|
|
added_container = false
|
2003-12-01 02:12:49 -05:00
|
|
|
|
|
|
|
dot = get_tk
|
|
|
|
if dot.kind_of?(TkDOT) or dot.kind_of?(TkCOLON2)
|
|
|
|
@scanner.instance_eval{@lex_state = EXPR_FNAME}
|
|
|
|
skip_tkspace
|
|
|
|
name_t2 = get_tk
|
|
|
|
case name_t
|
|
|
|
when TkSELF
|
|
|
|
name = name_t2.name
|
|
|
|
when TkCONSTANT
|
|
|
|
name = name_t2.name
|
2003-12-23 11:21:17 -05:00
|
|
|
prev_container = container
|
2003-12-01 02:12:49 -05:00
|
|
|
container = container.find_module_named(name_t.name)
|
|
|
|
if !container
|
* array.c: replace rb_protect_inspect() and rb_inspecting_p() by
rb_exec_recursive() in eval.c.
* eval.c (rb_exec_recursive): new function.
* array.c (rb_ary_join): use rb_exec_recursive().
* array.c (rb_ary_inspect, rb_ary_hash): ditto.
* file.c (rb_file_join): ditto.
* hash.c (rb_hash_inspect, rb_hash_to_s, rb_hash_hash): ditto.
* io.c (rb_io_puts): ditto.
* object.c (rb_obj_inspect): ditto
* struct.c (rb_struct_inspect): ditto.
* lib/set.rb (SortedSet::setup): a hack to shut up warning.
[ruby-talk:132866]
* lib/time.rb (Time::strptime): add new function. inspired by
[ruby-talk:132815].
* lib/parsedate.rb (ParseDate::strptime): ditto.
* regparse.c: move st_*_strend() functions from st.c. fixed some
potential memory leaks.
* exception error messages updated. [ruby-core:04497]
* ext/socket/socket.c (Init_socket): add bunch of Socket
constants. Patch from Sam Roberts <sroberts@uniserve.com>.
[ruby-core:04409]
* array.c (rb_ary_s_create): no need for negative argc check.
[ruby-core:04463]
* array.c (rb_ary_unshift_m): ditto.
* lib/xmlrpc/parser.rb (XMLRPC::FaultException): make it subclass
of StandardError class, not Exception class. [ruby-core:04429]
* parse.y (fcall_gen): lvar(arg) will be evaluated as
lvar.call(arg) when lvar is a defined local variable. [new]
* object.c (rb_class_initialize): call inherited method before
calling initializing block.
* eval.c (rb_thread_start_1): initialize newly pushed frame.
* lib/open3.rb (Open3::popen3): $? should not be EXIT_FAILURE.
fixed: [ruby-core:04444]
* eval.c (is_defined): NODE_IASGN is an assignment.
* ext/readline/readline.c (Readline.readline): use rl_outstream
and rl_instream. [ruby-dev:25699]
* ext/etc/etc.c (Init_etc): sGroup needs HAVE_ST_GR_PASSWD check
[ruby-dev:25675]
* misc/ruby-mode.el: [ruby-core:04415]
* lib/rdoc/generators/html_generator.rb: [ruby-core:04412]
* lib/rdoc/generators/ri_generator.rb: ditto.
* struct.c (make_struct): fixed: [ruby-core:04402]
* ext/curses/curses.c (window_color_set): [ruby-core:04393]
* ext/socket/socket.c (Init_socket): SO_REUSEPORT added.
[ruby-talk:130092]
* object.c: [ruby-doc:818]
* parse.y (open_args): fix too verbose warnings for the space
before argument parentheses. [ruby-dev:25492]
* parse.y (parser_yylex): ditto.
* parse.y (parser_yylex): the first expression in the parentheses
should not be a command. [ruby-dev:25492]
* lib/irb/context.rb (IRB::Context::initialize): [ruby-core:04330]
* object.c (Init_Object): remove Object#type. [ruby-core:04335]
* st.c (st_foreach): report success/failure by return value.
[ruby-Bugs-1396]
* parse.y: forgot to initialize parser struct. [ruby-dev:25492]
* parse.y (parser_yylex): no tLABEL on EXPR_BEG.
[ruby-talk:127711]
* document updates - [ruby-core:04296], [ruby-core:04301],
[ruby-core:04302], [ruby-core:04307]
* dir.c (rb_push_glob): should work for NUL delimited patterns.
* dir.c (rb_glob2): should aware of offset in the pattern.
* string.c (rb_str_new4): should propagate taintedness.
* env.h: rename member names in struct FRAME; last_func -> callee,
orig_func -> this_func, last_class -> this_class.
* struct.c (rb_struct_set): use original method name, not callee
name, to retrieve member slot. [ruby-core:04268]
* time.c (time_strftime): protect from format modification from GC
finalizers.
* object.c (Init_Object): remove rb_obj_id_obsolete()
* eval.c (rb_mod_define_method): incomplete subclass check.
[ruby-dev:25464]
* gc.c (rb_data_object_alloc): klass may be NULL.
[ruby-list:40498]
* bignum.c (rb_big_rand): should return positive random number.
[ruby-dev:25401]
* bignum.c (rb_big_rand): do not use rb_big_modulo to generate
random bignums. [ruby-dev:25396]
* variable.c (rb_autoload): [ruby-dev:25373]
* eval.c (svalue_to_avalue): [ruby-dev:25366]
* string.c (rb_str_justify): [ruby-dev:25367]
* io.c (rb_f_select): [ruby-dev:25312]
* ext/socket/socket.c (sock_s_getservbyport): [ruby-talk:124072]
* struct.c (make_struct): [ruby-dev:25249]
* dir.c (dir_open_dir): new function. [ruby-dev:25242]
* io.c (rb_f_open): add type check for return value from to_open.
* lib/pstore.rb (PStore#transaction): Use the empty content when a
file is not found. [ruby-dev:24561]
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@8068 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2005-03-04 01:47:45 -05:00
|
|
|
added_container = true
|
|
|
|
obj = name_t.name.split("::").inject(Object) do |state, item|
|
|
|
|
state.const_get(item)
|
|
|
|
end rescue nil
|
|
|
|
|
|
|
|
type = obj.class == Class ? NormalClass : NormalModule
|
|
|
|
if not [Class, Module].include?(obj.class)
|
|
|
|
warn("Couldn't find #{name_t.name}. Assuming it's a module")
|
|
|
|
end
|
|
|
|
|
|
|
|
if type == NormalClass then
|
|
|
|
container = prev_container.add_class(type, name_t.name, obj.superclass.name)
|
|
|
|
else
|
|
|
|
container = prev_container.add_module(type, name_t.name)
|
|
|
|
end
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
else
|
|
|
|
# warn("Unexpected token '#{name_t2.inspect}'")
|
|
|
|
# break
|
|
|
|
skip_method(container)
|
|
|
|
return
|
|
|
|
end
|
|
|
|
meth = AnyMethod.new(get_tkread, name)
|
|
|
|
meth.singleton = true
|
|
|
|
else
|
|
|
|
unget_tk dot
|
|
|
|
back_tk.reverse_each do
|
|
|
|
|tk|
|
|
|
|
unget_tk tk
|
|
|
|
end
|
|
|
|
name = name_t.name
|
|
|
|
|
|
|
|
meth = AnyMethod.new(get_tkread, name)
|
|
|
|
meth.singleton = (single == SINGLE)
|
|
|
|
end
|
|
|
|
|
|
|
|
remove_token_listener(self)
|
|
|
|
|
|
|
|
meth.start_collecting_tokens
|
|
|
|
indent = TkSPACE.new(1,1)
|
|
|
|
indent.set_text(" " * column)
|
|
|
|
|
|
|
|
meth.add_tokens([TkCOMMENT.new(line_no,
|
|
|
|
1,
|
|
|
|
"# File #{@top_level.file_absolute_name}, line #{line_no}"),
|
|
|
|
NEWLINE_TOKEN,
|
|
|
|
indent])
|
|
|
|
|
|
|
|
meth.add_tokens(@token_stream)
|
|
|
|
|
|
|
|
add_token_listener(meth)
|
|
|
|
|
|
|
|
@scanner.instance_eval{@continue = false}
|
|
|
|
parse_method_parameters(meth)
|
|
|
|
|
|
|
|
if meth.document_self
|
* array.c: replace rb_protect_inspect() and rb_inspecting_p() by
rb_exec_recursive() in eval.c.
* eval.c (rb_exec_recursive): new function.
* array.c (rb_ary_join): use rb_exec_recursive().
* array.c (rb_ary_inspect, rb_ary_hash): ditto.
* file.c (rb_file_join): ditto.
* hash.c (rb_hash_inspect, rb_hash_to_s, rb_hash_hash): ditto.
* io.c (rb_io_puts): ditto.
* object.c (rb_obj_inspect): ditto
* struct.c (rb_struct_inspect): ditto.
* lib/set.rb (SortedSet::setup): a hack to shut up warning.
[ruby-talk:132866]
* lib/time.rb (Time::strptime): add new function. inspired by
[ruby-talk:132815].
* lib/parsedate.rb (ParseDate::strptime): ditto.
* regparse.c: move st_*_strend() functions from st.c. fixed some
potential memory leaks.
* exception error messages updated. [ruby-core:04497]
* ext/socket/socket.c (Init_socket): add bunch of Socket
constants. Patch from Sam Roberts <sroberts@uniserve.com>.
[ruby-core:04409]
* array.c (rb_ary_s_create): no need for negative argc check.
[ruby-core:04463]
* array.c (rb_ary_unshift_m): ditto.
* lib/xmlrpc/parser.rb (XMLRPC::FaultException): make it subclass
of StandardError class, not Exception class. [ruby-core:04429]
* parse.y (fcall_gen): lvar(arg) will be evaluated as
lvar.call(arg) when lvar is a defined local variable. [new]
* object.c (rb_class_initialize): call inherited method before
calling initializing block.
* eval.c (rb_thread_start_1): initialize newly pushed frame.
* lib/open3.rb (Open3::popen3): $? should not be EXIT_FAILURE.
fixed: [ruby-core:04444]
* eval.c (is_defined): NODE_IASGN is an assignment.
* ext/readline/readline.c (Readline.readline): use rl_outstream
and rl_instream. [ruby-dev:25699]
* ext/etc/etc.c (Init_etc): sGroup needs HAVE_ST_GR_PASSWD check
[ruby-dev:25675]
* misc/ruby-mode.el: [ruby-core:04415]
* lib/rdoc/generators/html_generator.rb: [ruby-core:04412]
* lib/rdoc/generators/ri_generator.rb: ditto.
* struct.c (make_struct): fixed: [ruby-core:04402]
* ext/curses/curses.c (window_color_set): [ruby-core:04393]
* ext/socket/socket.c (Init_socket): SO_REUSEPORT added.
[ruby-talk:130092]
* object.c: [ruby-doc:818]
* parse.y (open_args): fix too verbose warnings for the space
before argument parentheses. [ruby-dev:25492]
* parse.y (parser_yylex): ditto.
* parse.y (parser_yylex): the first expression in the parentheses
should not be a command. [ruby-dev:25492]
* lib/irb/context.rb (IRB::Context::initialize): [ruby-core:04330]
* object.c (Init_Object): remove Object#type. [ruby-core:04335]
* st.c (st_foreach): report success/failure by return value.
[ruby-Bugs-1396]
* parse.y: forgot to initialize parser struct. [ruby-dev:25492]
* parse.y (parser_yylex): no tLABEL on EXPR_BEG.
[ruby-talk:127711]
* document updates - [ruby-core:04296], [ruby-core:04301],
[ruby-core:04302], [ruby-core:04307]
* dir.c (rb_push_glob): should work for NUL delimited patterns.
* dir.c (rb_glob2): should aware of offset in the pattern.
* string.c (rb_str_new4): should propagate taintedness.
* env.h: rename member names in struct FRAME; last_func -> callee,
orig_func -> this_func, last_class -> this_class.
* struct.c (rb_struct_set): use original method name, not callee
name, to retrieve member slot. [ruby-core:04268]
* time.c (time_strftime): protect from format modification from GC
finalizers.
* object.c (Init_Object): remove rb_obj_id_obsolete()
* eval.c (rb_mod_define_method): incomplete subclass check.
[ruby-dev:25464]
* gc.c (rb_data_object_alloc): klass may be NULL.
[ruby-list:40498]
* bignum.c (rb_big_rand): should return positive random number.
[ruby-dev:25401]
* bignum.c (rb_big_rand): do not use rb_big_modulo to generate
random bignums. [ruby-dev:25396]
* variable.c (rb_autoload): [ruby-dev:25373]
* eval.c (svalue_to_avalue): [ruby-dev:25366]
* string.c (rb_str_justify): [ruby-dev:25367]
* io.c (rb_f_select): [ruby-dev:25312]
* ext/socket/socket.c (sock_s_getservbyport): [ruby-talk:124072]
* struct.c (make_struct): [ruby-dev:25249]
* dir.c (dir_open_dir): new function. [ruby-dev:25242]
* io.c (rb_f_open): add type check for return value from to_open.
* lib/pstore.rb (PStore#transaction): Use the empty content when a
file is not found. [ruby-dev:24561]
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@8068 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2005-03-04 01:47:45 -05:00
|
|
|
container.add_method(meth)
|
|
|
|
elsif added_container
|
|
|
|
container.document_self = false
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
# Having now read the method parameters and documentation modifiers, we
|
|
|
|
# now know whether we have to rename #initialize to ::new
|
|
|
|
|
|
|
|
if name == "initialize" && !meth.singleton
|
|
|
|
if meth.dont_rename_initialize
|
|
|
|
meth.visibility = :protected
|
|
|
|
else
|
|
|
|
meth.singleton = true
|
|
|
|
meth.name = "new"
|
|
|
|
meth.visibility = :public
|
|
|
|
end
|
|
|
|
end
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
parse_statements(container, single, meth)
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
remove_token_listener(meth)
|
2004-07-12 11:52:35 -04:00
|
|
|
|
|
|
|
# Look for a 'call-seq' in the comment, and override the
|
|
|
|
# normal parameter stuff
|
|
|
|
|
2004-07-12 13:21:21 -04:00
|
|
|
if comment.sub!(/:?call-seq:(.*?)^\s*\#?\s*$/m, '')
|
2004-07-12 11:52:35 -04:00
|
|
|
seq = $1
|
|
|
|
seq.gsub!(/^\s*\#\s*/, '')
|
|
|
|
meth.call_seq = seq
|
|
|
|
end
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
meth.comment = comment
|
2004-05-05 13:02:47 -04:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
def skip_method(container)
|
|
|
|
meth = AnyMethod.new("", "anon")
|
|
|
|
parse_method_parameters(meth)
|
|
|
|
parse_statements(container, false, meth)
|
|
|
|
end
|
2004-04-01 20:20:58 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
# Capture the method's parameters. Along the way,
|
|
|
|
# look for a comment containing
|
|
|
|
#
|
|
|
|
# # yields: ....
|
|
|
|
#
|
|
|
|
# and add this as the block_params for the method
|
|
|
|
|
|
|
|
def parse_method_parameters(method)
|
|
|
|
res = parse_method_or_yield_parameters(method)
|
2004-05-20 20:23:34 -04:00
|
|
|
res = "(" + res + ")" unless res[0] == ?(
|
2003-12-01 02:12:49 -05:00
|
|
|
method.params = res unless method.params
|
|
|
|
if method.block_params.nil?
|
|
|
|
skip_tkspace(false)
|
|
|
|
read_documentation_modifiers(method, METHOD_MODIFIERS)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_method_or_yield_parameters(method=nil, modifiers=METHOD_MODIFIERS)
|
|
|
|
skip_tkspace(false)
|
|
|
|
tk = get_tk
|
|
|
|
|
|
|
|
# Little hack going on here. In the statement
|
|
|
|
# f = 2*(1+yield)
|
|
|
|
# We see the RPAREN as the next token, so we need
|
|
|
|
# to exit early. This still won't catch all cases
|
|
|
|
# (such as "a = yield + 1"
|
|
|
|
end_token = case tk
|
|
|
|
when TkLPAREN, TkfLPAREN
|
|
|
|
TkRPAREN
|
|
|
|
when TkRPAREN
|
|
|
|
return ""
|
|
|
|
else
|
|
|
|
TkNL
|
|
|
|
end
|
|
|
|
nest = 0
|
|
|
|
|
|
|
|
loop do
|
2003-12-29 18:38:09 -05:00
|
|
|
puts("Param: #{tk.inspect}, #{@scanner.continue} " +
|
|
|
|
"#{@scanner.lex_state} #{nest}") if $DEBUG
|
2003-12-01 02:12:49 -05:00
|
|
|
case tk
|
|
|
|
when TkSEMICOLON
|
|
|
|
break
|
2004-01-05 16:48:16 -05:00
|
|
|
when TkLBRACE
|
|
|
|
nest += 1
|
2003-12-29 18:38:09 -05:00
|
|
|
when TkRBRACE
|
2004-05-02 20:49:33 -04:00
|
|
|
# we might have a.each {|i| yield i }
|
|
|
|
unget_tk(tk) if nest.zero?
|
2004-01-05 16:48:16 -05:00
|
|
|
nest -= 1
|
|
|
|
break if nest <= 0
|
2003-12-01 02:12:49 -05:00
|
|
|
when TkLPAREN, TkfLPAREN
|
|
|
|
nest += 1
|
|
|
|
when end_token
|
|
|
|
if end_token == TkRPAREN
|
|
|
|
nest -= 1
|
|
|
|
break if @scanner.lex_state == EXPR_END and nest <= 0
|
|
|
|
else
|
|
|
|
break unless @scanner.continue
|
|
|
|
end
|
|
|
|
when method && method.block_params.nil? && TkCOMMENT
|
|
|
|
unget_tk(tk)
|
|
|
|
read_documentation_modifiers(method, modifiers)
|
|
|
|
end
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
res = get_tkread.tr("\n", " ").strip
|
|
|
|
res = "" if res == ";"
|
|
|
|
res
|
|
|
|
end
|
|
|
|
|
2004-05-26 01:02:55 -04:00
|
|
|
# skip the var [in] part of a 'for' statement
|
|
|
|
def skip_for_variable
|
|
|
|
skip_tkspace(false)
|
|
|
|
tk = get_tk
|
|
|
|
skip_tkspace(false)
|
|
|
|
tk = get_tk
|
|
|
|
unget_tk(tk) unless tk.kind_of?(TkIN)
|
|
|
|
end
|
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
# while, until, and for have an optional
|
|
|
|
def skip_optional_do_after_expression
|
|
|
|
skip_tkspace(false)
|
|
|
|
tk = get_tk
|
|
|
|
case tk
|
|
|
|
when TkLPAREN, TkfLPAREN
|
|
|
|
end_token = TkRPAREN
|
|
|
|
else
|
|
|
|
end_token = TkNL
|
|
|
|
end
|
|
|
|
|
|
|
|
nest = 0
|
|
|
|
@scanner.instance_eval{@continue = false}
|
|
|
|
|
|
|
|
loop do
|
|
|
|
puts("\nWhile: #{tk}, #{@scanner.continue} " +
|
|
|
|
"#{@scanner.lex_state} #{nest}") if $DEBUG
|
|
|
|
case tk
|
|
|
|
when TkSEMICOLON
|
|
|
|
break
|
|
|
|
when TkLPAREN, TkfLPAREN
|
|
|
|
nest += 1
|
|
|
|
when TkDO
|
|
|
|
break if nest.zero?
|
|
|
|
when end_token
|
|
|
|
if end_token == TkRPAREN
|
|
|
|
nest -= 1
|
|
|
|
break if @scanner.lex_state == EXPR_END and nest.zero?
|
|
|
|
else
|
|
|
|
break unless @scanner.continue
|
|
|
|
end
|
|
|
|
end
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
skip_tkspace(false)
|
|
|
|
if peek_tk.kind_of? TkDO
|
|
|
|
get_tk
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Return a superclass, which can be either a constant
|
|
|
|
# of an expression
|
|
|
|
|
|
|
|
def get_class_specification
|
|
|
|
tk = get_tk
|
|
|
|
return "self" if tk.kind_of?(TkSELF)
|
|
|
|
|
|
|
|
res = ""
|
|
|
|
while tk.kind_of?(TkCOLON2) ||
|
|
|
|
tk.kind_of?(TkCOLON3) ||
|
|
|
|
tk.kind_of?(TkCONSTANT)
|
|
|
|
|
|
|
|
res += tk.text
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
|
|
|
|
unget_tk(tk)
|
|
|
|
skip_tkspace(false)
|
|
|
|
|
|
|
|
get_tkread # empty out read buffer
|
|
|
|
|
|
|
|
tk = get_tk
|
|
|
|
|
|
|
|
case tk
|
|
|
|
when TkNL, TkCOMMENT, TkSEMICOLON
|
|
|
|
unget_tk(tk)
|
|
|
|
return res
|
|
|
|
end
|
|
|
|
|
|
|
|
res += parse_call_parameters(tk)
|
|
|
|
res
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_call_parameters(tk)
|
|
|
|
|
|
|
|
end_token = case tk
|
|
|
|
when TkLPAREN, TkfLPAREN
|
|
|
|
TkRPAREN
|
|
|
|
when TkRPAREN
|
|
|
|
return ""
|
|
|
|
else
|
|
|
|
TkNL
|
|
|
|
end
|
|
|
|
nest = 0
|
|
|
|
|
|
|
|
loop do
|
|
|
|
puts("Call param: #{tk}, #{@scanner.continue} " +
|
|
|
|
"#{@scanner.lex_state} #{nest}") if $DEBUG
|
|
|
|
case tk
|
|
|
|
when TkSEMICOLON
|
|
|
|
break
|
|
|
|
when TkLPAREN, TkfLPAREN
|
|
|
|
nest += 1
|
|
|
|
when end_token
|
|
|
|
if end_token == TkRPAREN
|
|
|
|
nest -= 1
|
|
|
|
break if @scanner.lex_state == EXPR_END and nest <= 0
|
|
|
|
else
|
|
|
|
break unless @scanner.continue
|
|
|
|
end
|
|
|
|
when TkCOMMENT
|
|
|
|
unget_tk(tk)
|
|
|
|
break
|
|
|
|
end
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
res = get_tkread.tr("\n", " ").strip
|
|
|
|
res = "" if res == ";"
|
|
|
|
res
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
# Parse a constant, which might be qualified by
|
|
|
|
# one or more class or module names
|
|
|
|
|
|
|
|
def get_constant
|
|
|
|
res = ""
|
|
|
|
skip_tkspace(false)
|
|
|
|
tk = get_tk
|
|
|
|
|
|
|
|
while tk.kind_of?(TkCOLON2) ||
|
|
|
|
tk.kind_of?(TkCOLON3) ||
|
|
|
|
tk.kind_of?(TkCONSTANT)
|
|
|
|
|
|
|
|
res += tk.text
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
|
|
|
|
# if res.empty?
|
|
|
|
# warn("Unexpected token #{tk} in constant")
|
|
|
|
# end
|
|
|
|
unget_tk(tk)
|
|
|
|
res
|
|
|
|
end
|
|
|
|
|
|
|
|
# Get a constant that may be surrounded by parens
|
|
|
|
|
|
|
|
def get_constant_with_optional_parens
|
|
|
|
skip_tkspace(false)
|
|
|
|
nest = 0
|
|
|
|
while (tk = peek_tk).kind_of?(TkLPAREN) || tk.kind_of?(TkfLPAREN)
|
|
|
|
get_tk
|
|
|
|
skip_tkspace(true)
|
|
|
|
nest += 1
|
|
|
|
end
|
|
|
|
|
|
|
|
name = get_constant
|
|
|
|
|
|
|
|
while nest > 0
|
|
|
|
skip_tkspace(true)
|
|
|
|
tk = get_tk
|
|
|
|
nest -= 1 if tk.kind_of?(TkRPAREN)
|
|
|
|
end
|
|
|
|
name
|
|
|
|
end
|
|
|
|
|
|
|
|
# Directives are modifier comments that can appear after class, module,
|
|
|
|
# or method names. For example
|
|
|
|
#
|
|
|
|
# def fred # :yields: a, b
|
|
|
|
#
|
|
|
|
# or
|
|
|
|
#
|
|
|
|
# class SM # :nodoc:
|
|
|
|
#
|
|
|
|
# we return the directive name and any parameters as a two element array
|
|
|
|
|
|
|
|
def read_directive(allowed)
|
|
|
|
tk = get_tk
|
|
|
|
puts "directive: #{tk.inspect}" if $DEBUG
|
|
|
|
result = nil
|
|
|
|
if tk.kind_of?(TkCOMMENT)
|
|
|
|
if tk.text =~ /\s*:?(\w+):\s*(.*)/
|
|
|
|
directive = $1.downcase
|
|
|
|
if allowed.include?(directive)
|
|
|
|
result = [directive, $2]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
unget_tk(tk)
|
|
|
|
end
|
|
|
|
result
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
def read_documentation_modifiers(context, allow)
|
|
|
|
dir = read_directive(allow)
|
|
|
|
|
|
|
|
case dir[0]
|
|
|
|
|
|
|
|
when "notnew", "not_new", "not-new"
|
|
|
|
context.dont_rename_initialize = true
|
|
|
|
|
|
|
|
when "nodoc"
|
|
|
|
context.document_self = false
|
|
|
|
if dir[1].downcase == "all"
|
|
|
|
context.document_children = false
|
|
|
|
end
|
|
|
|
|
|
|
|
when "doc"
|
|
|
|
context.document_self = true
|
|
|
|
context.force_documentation = true
|
|
|
|
|
|
|
|
when "yield", "yields"
|
|
|
|
unless context.params.nil?
|
|
|
|
context.params.sub!(/(,|)\s*&\w+/,'') # remove parameter &proc
|
|
|
|
end
|
|
|
|
context.block_params = dir[1]
|
|
|
|
|
|
|
|
when "arg", "args"
|
|
|
|
context.params = dir[1]
|
|
|
|
end if dir
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
# Look for directives in a normal comment block:
|
|
|
|
#
|
|
|
|
# #-- - don't display comment from this point forward
|
|
|
|
#
|
|
|
|
#
|
|
|
|
# This routine modifies it's parameter
|
|
|
|
|
|
|
|
def look_for_directives_in(context, comment)
|
|
|
|
|
|
|
|
preprocess = SM::PreProcess.new(@input_file_name,
|
|
|
|
@options.rdoc_include)
|
|
|
|
|
|
|
|
preprocess.handle(comment) do |directive, param|
|
|
|
|
case directive
|
|
|
|
when "stopdoc"
|
|
|
|
context.stop_doc
|
|
|
|
""
|
|
|
|
when "startdoc"
|
|
|
|
context.start_doc
|
|
|
|
context.force_documentation = true
|
|
|
|
""
|
|
|
|
|
|
|
|
when "enddoc"
|
2004-04-16 23:23:10 -04:00
|
|
|
#context.done_documenting = true
|
|
|
|
#""
|
|
|
|
throw :enddoc
|
2003-12-01 02:12:49 -05:00
|
|
|
|
|
|
|
when "main"
|
|
|
|
options = Options.instance
|
|
|
|
options.main_page = param
|
|
|
|
""
|
|
|
|
|
|
|
|
when "title"
|
|
|
|
options = Options.instance
|
|
|
|
options.title = param
|
|
|
|
""
|
|
|
|
|
2004-11-20 10:02:57 -05:00
|
|
|
when "section"
|
|
|
|
context.set_current_section(param, comment)
|
|
|
|
comment.clear
|
2004-12-04 00:38:17 -05:00
|
|
|
break
|
2003-12-01 02:12:49 -05:00
|
|
|
else
|
|
|
|
warn "Unrecognized directive '#{directive}'"
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
remove_private_comments(comment)
|
|
|
|
end
|
|
|
|
|
|
|
|
def remove_private_comments(comment)
|
|
|
|
comment.gsub!(/^#--.*?^#\+\+/m, '')
|
|
|
|
comment.sub!(/^#--.*/m, '')
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_symbol_or_name
|
|
|
|
tk = get_tk
|
|
|
|
case tk
|
|
|
|
when TkSYMBOL
|
|
|
|
tk.text.sub(/^:/, '')
|
|
|
|
when TkId, TkOp
|
|
|
|
tk.name
|
2003-12-01 12:00:05 -05:00
|
|
|
when TkSTRING
|
|
|
|
tk.text
|
2003-12-01 02:12:49 -05:00
|
|
|
else
|
|
|
|
raise "Name or symbol expected (got #{tk})"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_alias(context, single, tk, comment)
|
|
|
|
skip_tkspace
|
2004-02-29 22:26:05 -05:00
|
|
|
if (peek_tk.kind_of? TkLPAREN)
|
|
|
|
get_tk
|
|
|
|
skip_tkspace
|
|
|
|
end
|
2003-12-01 02:12:49 -05:00
|
|
|
new_name = get_symbol_or_name
|
|
|
|
@scanner.instance_eval{@lex_state = EXPR_FNAME}
|
|
|
|
skip_tkspace
|
|
|
|
if (peek_tk.kind_of? TkCOMMA)
|
|
|
|
get_tk
|
|
|
|
skip_tkspace
|
|
|
|
end
|
|
|
|
old_name = get_symbol_or_name
|
2004-01-20 00:04:31 -05:00
|
|
|
|
2003-12-01 02:12:49 -05:00
|
|
|
al = Alias.new(get_tkread, old_name, new_name, comment)
|
|
|
|
read_documentation_modifiers(al, ATTR_MODIFIERS)
|
|
|
|
if al.document_self
|
|
|
|
context.add_alias(al)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_yield_parameters
|
|
|
|
parse_method_or_yield_parameters
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_yield(context, single, tk, method)
|
|
|
|
if method.block_params.nil?
|
|
|
|
get_tkread
|
|
|
|
@scanner.instance_eval{@continue = false}
|
|
|
|
method.block_params = parse_yield_parameters
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_require(context, comment)
|
|
|
|
skip_tkspace_comment
|
|
|
|
tk = get_tk
|
|
|
|
if tk.kind_of? TkLPAREN
|
|
|
|
skip_tkspace_comment
|
|
|
|
tk = get_tk
|
|
|
|
end
|
|
|
|
|
|
|
|
name = nil
|
|
|
|
case tk
|
|
|
|
when TkSTRING
|
|
|
|
name = tk.text
|
2004-11-22 08:37:39 -05:00
|
|
|
# when TkCONSTANT, TkIDENTIFIER, TkIVAR, TkGVAR
|
|
|
|
# name = tk.name
|
2003-12-01 02:12:49 -05:00
|
|
|
when TkDSTRING
|
|
|
|
warn "Skipping require of dynamic string: #{tk.text}"
|
2004-11-22 08:37:39 -05:00
|
|
|
# else
|
|
|
|
# warn "'require' used as variable"
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
if name
|
|
|
|
context.add_require(Require.new(name, comment))
|
2004-10-09 23:35:51 -04:00
|
|
|
else
|
|
|
|
unget_tk(tk)
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_include(context, comment)
|
2004-05-07 08:51:18 -04:00
|
|
|
loop do
|
|
|
|
skip_tkspace_comment
|
|
|
|
name = get_constant_with_optional_parens
|
|
|
|
unless name.empty?
|
|
|
|
context.add_include(Include.new(name, comment))
|
|
|
|
end
|
|
|
|
return unless peek_tk.kind_of?(TkCOMMA)
|
|
|
|
get_tk
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_bool
|
|
|
|
skip_tkspace
|
|
|
|
tk = get_tk
|
|
|
|
case tk
|
|
|
|
when TkTRUE
|
|
|
|
true
|
|
|
|
when TkFALSE, TkNIL
|
|
|
|
false
|
|
|
|
else
|
|
|
|
unget_tk tk
|
|
|
|
true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_attr(context, single, tk, comment)
|
|
|
|
args = parse_symbol_arg(1)
|
|
|
|
if args.size > 0
|
|
|
|
name = args[0]
|
|
|
|
rw = "R"
|
|
|
|
skip_tkspace(false)
|
|
|
|
tk = get_tk
|
|
|
|
if tk.kind_of? TkCOMMA
|
|
|
|
rw = "RW" if get_bool
|
|
|
|
else
|
|
|
|
unget_tk tk
|
|
|
|
end
|
|
|
|
att = Attr.new(get_tkread, name, rw, comment)
|
|
|
|
read_documentation_modifiers(att, ATTR_MODIFIERS)
|
|
|
|
if att.document_self
|
|
|
|
context.add_attribute(att)
|
|
|
|
end
|
|
|
|
else
|
2004-10-19 09:46:05 -04:00
|
|
|
warn("'attr' ignored - looks like a variable")
|
2003-12-01 02:12:49 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_visibility(container, single, tk)
|
|
|
|
singleton = (single == SINGLE)
|
|
|
|
vis = case tk.name
|
|
|
|
when "private" then :private
|
|
|
|
when "protected" then :protected
|
|
|
|
when "public" then :public
|
|
|
|
when "private_class_method"
|
|
|
|
singleton = true
|
|
|
|
:private
|
|
|
|
when "public_class_method"
|
|
|
|
singleton = true
|
|
|
|
:public
|
|
|
|
else raise "Invalid visibility: #{tk.name}"
|
|
|
|
end
|
|
|
|
|
|
|
|
skip_tkspace_comment(false)
|
2004-05-19 23:36:43 -04:00
|
|
|
case peek_tk
|
|
|
|
# Ryan Davis suggested the extension to ignore modifiers, because he
|
|
|
|
# often writes
|
|
|
|
#
|
|
|
|
# protected unless $TESTING
|
|
|
|
#
|
|
|
|
when TkNL, TkUNLESS_MOD, TkIF_MOD
|
2003-12-01 02:12:49 -05:00
|
|
|
# error("Missing argument") if singleton
|
|
|
|
container.ongoing_visibility = vis
|
|
|
|
else
|
|
|
|
args = parse_symbol_arg
|
|
|
|
container.set_visibility_for(args, vis, singleton)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_attr_accessor(context, single, tk, comment)
|
|
|
|
args = parse_symbol_arg
|
|
|
|
read = get_tkread
|
|
|
|
rw = "?"
|
|
|
|
|
|
|
|
# If nodoc is given, don't document any of them
|
|
|
|
|
|
|
|
tmp = CodeObject.new
|
|
|
|
read_documentation_modifiers(tmp, ATTR_MODIFIERS)
|
|
|
|
return unless tmp.document_self
|
|
|
|
|
|
|
|
case tk.name
|
|
|
|
when "attr_reader" then rw = "R"
|
|
|
|
when "attr_writer" then rw = "W"
|
|
|
|
when "attr_accessor" then rw = "RW"
|
|
|
|
else
|
|
|
|
rw = @options.extra_accessor_flags[tk.name]
|
|
|
|
end
|
|
|
|
|
|
|
|
for name in args
|
|
|
|
att = Attr.new(get_tkread, name, rw, comment)
|
|
|
|
context.add_attribute(att)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def skip_tkspace_comment(skip_nl = true)
|
|
|
|
loop do
|
|
|
|
skip_tkspace(skip_nl)
|
|
|
|
return unless peek_tk.kind_of? TkCOMMENT
|
|
|
|
get_tk
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_symbol_arg(no = nil)
|
|
|
|
|
|
|
|
args = []
|
|
|
|
skip_tkspace_comment
|
|
|
|
case tk = get_tk
|
|
|
|
when TkLPAREN
|
|
|
|
loop do
|
|
|
|
skip_tkspace_comment
|
|
|
|
if tk1 = parse_symbol_in_arg
|
|
|
|
args.push tk1
|
|
|
|
break if no and args.size >= no
|
|
|
|
end
|
|
|
|
|
|
|
|
skip_tkspace_comment
|
|
|
|
case tk2 = get_tk
|
|
|
|
when TkRPAREN
|
|
|
|
break
|
|
|
|
when TkCOMMA
|
|
|
|
else
|
* sprintf.c (rb_str_format): allow %c to print one character
string (e.g. ?x).
* lib/tempfile.rb (Tempfile::make_tmpname): put dot between
basename and pid. [ruby-talk:196272]
* parse.y (do_block): remove -> style block.
* parse.y (parser_yylex): remove tLAMBDA_ARG.
* eval.c (rb_call0): binding for the return event hook should have
consistent scope. [ruby-core:07928]
* eval.c (proc_invoke): return behavior should depend whether it
is surrounded by a lambda or a mere block.
* eval.c (formal_assign): handles post splat arguments.
* eval.c (rb_call0): ditto.
* st.c (strhash): use FNV-1a hash.
* parse.y (parser_yylex): removed experimental ';;' terminator.
* eval.c (rb_node_arity): should be aware of post splat arguments.
* eval.c (rb_proc_arity): ditto.
* parse.y (f_args): syntax rule enhanced to support arguments
after the splat.
* parse.y (block_param): ditto for block parameters.
* parse.y (f_post_arg): mandatory formal arguments after the splat
argument.
* parse.y (new_args_gen): generate nodes for mandatory formal
arguments after the splat argument.
* eval.c (rb_eval): dispatch mandatory formal arguments after the
splat argument.
* parse.y (args): allow more than one splat in the argument list.
* parse.y (method_call): allow aref [] to accept all kind of
method argument, including assocs, splat, and block argument.
* eval.c (SETUP_ARGS0): prepare block argument as well.
* lib/mathn.rb (Integer): remove Integer#gcd2. [ruby-core:07931]
* eval.c (error_line): print receivers true/false/nil specially.
* eval.c (rb_proc_yield): handles parameters in yield semantics.
* eval.c (nil_yield): gives LocalJumpError to denote no block
error.
* io.c (rb_io_getc): now takes one-character string.
* string.c (rb_str_hash): use FNV-1a hash from Fowler/Noll/Vo
hashing algorithm.
* string.c (rb_str_aref): str[0] now returns 1 character string,
instead of a fixnum. [Ruby2]
* parse.y (parser_yylex): ?c now returns 1 character string,
instead of a fixnum. [Ruby2]
* string.c (rb_str_aset): no longer support fixnum insertion.
* eval.c (umethod_bind): should not update original class.
[ruby-dev:28636]
* eval.c (ev_const_get): should support constant access from
within instance_eval(). [ruby-dev:28327]
* time.c (time_timeval): should round for usec floating
number. [ruby-core:07896]
* time.c (time_add): ditto.
* dir.c (sys_warning): should not call a vararg function
rb_sys_warning() indirectly. [ruby-core:07886]
* numeric.c (flo_divmod): the first element of Float#divmod should
be an integer. [ruby-dev:28589]
* test/ruby/test_float.rb: add tests for divmod, div, modulo and remainder.
* re.c (rb_reg_initialize): should not allow modifying literal
regexps. frozen check moved from rb_reg_initialize_m as well.
* re.c (rb_reg_initialize): should not modify untainted objects in
safe levels higher than 3.
* re.c (rb_memcmp): type change from char* to const void*.
* dir.c (dir_close): should not close untainted dir stream.
* dir.c (GetDIR): add tainted/frozen check for each dir operation.
* lib/rdoc/parsers/parse_rb.rb (RDoc::RubyParser::parse_symbol_arg):
typo fixed. a patch from Florian Gross <florg at florg.net>.
* eval.c (EXEC_EVENT_HOOK): trace_func may remove itself from
event_hooks. no guarantee for arbitrary hook deletion.
[ruby-dev:28632]
* util.c (ruby_strtod): differ addition to minimize error.
[ruby-dev:28619]
* util.c (ruby_strtod): should not raise ERANGE when the input
string does not have any digits. [ruby-dev:28629]
* eval.c (proc_invoke): should restore old ruby_frame->block.
thanks to ts <decoux at moulon.inra.fr>. [ruby-core:07833]
also fix [ruby-dev:28614] as well.
* signal.c (trap): sig should be less then NSIG. Coverity found
this bug. a patch from Kevin Tew <tewk at tewk.com>.
[ruby-core:07823]
* math.c (math_log2): add new method inspired by
[ruby-talk:191237].
* math.c (math_log): add optional base argument to Math::log().
[ruby-talk:191308]
* ext/syck/emitter.c (syck_scan_scalar): avoid accessing
uninitialized array element. a patch from Pat Eyler
<rubypate at gmail.com>. [ruby-core:07809]
* array.c (rb_ary_fill): initialize local variables first. a
patch from Pat Eyler <rubypate at gmail.com>. [ruby-core:07810]
* ext/syck/yaml2byte.c (syck_yaml2byte_handler): need to free
type_tag. a patch from Pat Eyler <rubypate at gmail.com>.
[ruby-core:07808]
* ext/socket/socket.c (make_hostent_internal): accept ai_family
check from Sam Roberts <sroberts at uniserve.com>.
[ruby-core:07691]
* util.c (ruby_strtod): should not cut off 18 digits for no
reason. [ruby-core:07796]
* array.c (rb_ary_fill): internalize local variable "beg" to
pacify Coverity. [ruby-core:07770]
* pack.c (pack_unpack): now supports CRLF newlines. a patch from
<tommy at tmtm.org>. [ruby-dev:28601]
* applied code clean-up patch from Stefan Huehner
<stefan at huehner.org>. [ruby-core:07764]
* lib/jcode.rb (String::tr_s): should have translated non
squeezing character sequence (i.e. a character) as well. thanks
to Hiroshi Ichikawa <gimite at gimite.ddo.jp> [ruby-list:42090]
* ext/socket/socket.c: document update patch from Sam Roberts
<sroberts at uniserve.com>. [ruby-core:07701]
* lib/mathn.rb (Integer): need not to remove gcd2. a patch from
NARUSE, Yui <naruse at airemix.com>. [ruby-dev:28570]
* parse.y (arg): too much NEW_LIST()
* eval.c (SETUP_ARGS0): remove unnecessary access to nd_alen.
* eval.c (rb_eval): use ARGSCAT for NODE_OP_ASGN1.
[ruby-dev:28585]
* parse.y (arg): use NODE_ARGSCAT for placeholder.
* lib/getoptlong.rb (GetoptLong::get): RDoc update patch from
mathew <meta at pobox.com>. [ruby-core:07738]
* variable.c (rb_const_set): raise error when no target klass is
supplied. [ruby-dev:28582]
* prec.c (prec_prec_f): documentation patch from
<gerardo.santana at gmail.com>. [ruby-core:07689]
* bignum.c (rb_big_pow): second operand may be too big even if
it's a Fixnum. [ruby-talk:187984]
* README.EXT: update symbol description. [ruby-talk:188104]
* COPYING: explicitly note GPLv2. [ruby-talk:187922]
* parse.y: remove some obsolete syntax rules (unparenthesized
method calls in argument list).
* eval.c (rb_call0): insecure calling should be checked for non
NODE_SCOPE method invocations too.
* eval.c (rb_alias): should preserve the current safe level as
well as method definition.
* process.c (rb_f_sleep): remove RDoc description about SIGALRM
which is not valid on the current implementation. [ruby-dev:28464]
Thu Mar 23 21:40:47 2006 K.Kosako <sndgk393 AT ybb.ne.jp>
* eval.c (method_missing): should support argument splat in
super. a bug in combination of super, splat and
method_missing. [ruby-talk:185438]
* configure.in: Solaris SunPro compiler -rapth patch from
<kuwa at labs.fujitsu.com>. [ruby-dev:28443]
* configure.in: remove enable_rpath=no for Solaris.
[ruby-dev:28440]
* ext/win32ole/win32ole.c (ole_val2olevariantdata): change behavior
of converting OLE Variant object with VT_ARRAY|VT_UI1 and Ruby
String object.
* ruby.1: a clarification patch from David Lutterkort
<dlutter at redhat.com>. [ruby-core:7508]
* lib/rdoc/ri/ri_paths.rb (RI::Paths): adding paths from rubygems
directories. a patch from Eric Hodel <drbrain at segment7.net>.
[ruby-core:07423]
* eval.c (rb_clear_cache_by_class): clearing wrong cache.
* ext/extmk.rb: use :remove_destination to install extension libraries
to avoid SEGV. [ruby-dev:28417]
* eval.c (rb_thread_fd_writable): should not re-schedule output
from KILLED thread (must be error printing).
* array.c (rb_ary_flatten_bang): allow specifying recursion
level. [ruby-talk:182170]
* array.c (rb_ary_flatten): ditto.
* gc.c (add_heap): a heap_slots may overflow. a patch from Stefan
Weil <weil at mail.berlios.de>.
* eval.c (rb_call): use separate cache for fcall/vcall
invocation.
* eval.c (rb_eval): NODE_FCALL, NODE_VCALL can call local
functions.
* eval.c (rb_mod_local): a new method to specify newly added
visibility "local".
* eval.c (search_method): search for local methods which are
visible only from the current class.
* class.c (rb_class_local_methods): a method to list local methods.
* object.c (Init_Object): add BasicObject class as a top level
BlankSlate class.
* ruby.h (SYM2ID): should not cast to signed long.
[ruby-core:07414]
* class.c (rb_include_module): allow module duplication.
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@10235 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
2006-06-09 17:20:17 -04:00
|
|
|
warn("unexpected token: '#{tk2.inspect}'") if $DEBUG
|
2003-12-01 02:12:49 -05:00
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
else
|
|
|
|
unget_tk tk
|
|
|
|
if tk = parse_symbol_in_arg
|
|
|
|
args.push tk
|
|
|
|
return args if no and args.size >= no
|
|
|
|
end
|
|
|
|
|
|
|
|
loop do
|
|
|
|
# skip_tkspace_comment(false)
|
|
|
|
skip_tkspace(false)
|
|
|
|
|
|
|
|
tk1 = get_tk
|
|
|
|
unless tk1.kind_of?(TkCOMMA)
|
|
|
|
unget_tk tk1
|
|
|
|
break
|
|
|
|
end
|
|
|
|
|
|
|
|
skip_tkspace_comment
|
|
|
|
if tk = parse_symbol_in_arg
|
|
|
|
args.push tk
|
|
|
|
break if no and args.size >= no
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
args
|
|
|
|
end
|
|
|
|
|
|
|
|
def parse_symbol_in_arg
|
|
|
|
case tk = get_tk
|
|
|
|
when TkSYMBOL
|
|
|
|
tk.text.sub(/^:/, '')
|
|
|
|
when TkSTRING
|
|
|
|
eval @read[-1]
|
|
|
|
else
|
|
|
|
warn("Expected symbol or string, got #{tk.inspect}") if $DEBUG
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
end
|