2009-12-18 06:59:06 -05:00
|
|
|
require 'test_helper'
|
|
|
|
|
|
|
|
class LexerTest < Test::Unit::TestCase
|
|
|
|
|
|
|
|
def setup
|
|
|
|
@lex = Lexer.new
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_lexing_an_empty_string
|
2010-01-12 08:52:44 -05:00
|
|
|
assert @lex.tokenize("") == []
|
2009-12-18 06:59:06 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_lexing_basic_assignment
|
2009-12-30 21:51:23 -05:00
|
|
|
code = "a: 'one'\nb: [1, 2]"
|
2010-01-12 08:52:44 -05:00
|
|
|
assert @lex.tokenize(code) == [[:IDENTIFIER, "a"], [:ASSIGN, ":"],
|
|
|
|
[:STRING, "'one'"], ["\n", "\n"], [:IDENTIFIER, "b"], [:ASSIGN, ":"],
|
|
|
|
["[", "["], [:NUMBER, "1"], [",", ","], [:NUMBER, "2"], ["]", "]"],
|
2009-12-30 21:51:23 -05:00
|
|
|
["\n", "\n"]]
|
2009-12-18 06:59:06 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_lexing_object_literal
|
|
|
|
code = "{one : 1}"
|
2009-12-25 13:21:17 -08:00
|
|
|
assert @lex.tokenize(code) == [["{", "{"], [:IDENTIFIER, "one"], [:ASSIGN, ":"],
|
2009-12-30 21:51:23 -05:00
|
|
|
[:NUMBER, "1"], ["}", "}"], ["\n", "\n"]]
|
2009-12-18 06:59:06 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_lexing_function_definition
|
2010-01-26 10:52:05 -05:00
|
|
|
code = "(x, y) -> x * y"
|
2010-01-26 00:40:58 -05:00
|
|
|
assert @lex.tokenize(code) == [[:PARAM_START, "("], [:PARAM, "x"],
|
|
|
|
[",", ","], [:PARAM, "y"], [:PARAM_END, ")"],
|
2010-01-26 10:52:05 -05:00
|
|
|
["->", "->"], [:INDENT, 2], [:IDENTIFIER, "x"], ["*", "*"],
|
2009-12-30 21:51:23 -05:00
|
|
|
[:IDENTIFIER, "y"], [:OUTDENT, 2], ["\n", "\n"]]
|
2009-12-18 06:59:06 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_lexing_if_statement
|
|
|
|
code = "clap_your_hands() if happy"
|
2010-01-26 20:59:52 -05:00
|
|
|
assert @lex.tokenize(code) == [[:IDENTIFIER, "clap_your_hands"], [:CALL_START, "("],
|
|
|
|
[:CALL_END, ")"], [:IF, "if"], [:IDENTIFIER, "happy"], ["\n", "\n"]]
|
2009-12-18 06:59:06 -05:00
|
|
|
end
|
|
|
|
|
2009-12-22 10:48:58 -05:00
|
|
|
def test_lexing_comment
|
2009-12-30 21:51:23 -05:00
|
|
|
code = "a: 1\n# comment\n# on two lines\nb: 2"
|
2009-12-25 13:21:17 -08:00
|
|
|
assert @lex.tokenize(code) == [[:IDENTIFIER, "a"], [:ASSIGN, ":"], [:NUMBER, "1"],
|
2009-12-26 09:29:03 -08:00
|
|
|
["\n", "\n"], [:COMMENT, [" comment", " on two lines"]], ["\n", "\n"],
|
2009-12-30 21:51:23 -05:00
|
|
|
[:IDENTIFIER, "b"], [:ASSIGN, ":"], [:NUMBER, "2"], ["\n", "\n"]]
|
2009-12-26 09:29:03 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_lexing_newline_escaper
|
|
|
|
code = "two: 1 + \\\n\n 1"
|
|
|
|
assert @lex.tokenize(code) == [[:IDENTIFIER, "two"], [:ASSIGN, ":"],
|
2009-12-30 21:51:23 -05:00
|
|
|
[:NUMBER, "1"], ["+", "+"], [:NUMBER, "1"], ["\n", "\n"]]
|
2009-12-22 10:48:58 -05:00
|
|
|
end
|
|
|
|
|
2009-12-18 06:59:06 -05:00
|
|
|
def test_lexing
|
2009-12-25 16:20:28 -08:00
|
|
|
tokens = @lex.tokenize(File.read('test/fixtures/generation/each.coffee'))
|
|
|
|
assert tokens.inspect == File.read('test/fixtures/generation/each.tokens')
|
2009-12-18 06:59:06 -05:00
|
|
|
end
|
|
|
|
|
|
|
|
end
|