1
0
Fork 0
mirror of https://github.com/jashkenas/coffeescript.git synced 2022-11-09 12:23:24 -05:00
jashkenas--coffeescript/test/unit/test_lexer.rb

60 lines
1.9 KiB
Ruby
Raw Normal View History

2009-12-18 06:59:06 -05:00
require 'test_helper'
class LexerTest < Test::Unit::TestCase
def setup
@lex = Lexer.new
end
def test_lexing_an_empty_string
assert @lex.tokenize("") == []
2009-12-18 06:59:06 -05:00
end
def test_lexing_basic_assignment
code = "a: 'one'\nb: [1, 2]"
assert @lex.tokenize(code) == [[:IDENTIFIER, "a"], [:ASSIGN, ":"],
[:STRING, "'one'"], ["\n", "\n"], [:IDENTIFIER, "b"], [:ASSIGN, ":"],
["[", "["], [:NUMBER, "1"], [",", ","], [:NUMBER, "2"], ["]", "]"],
["\n", "\n"]]
2009-12-18 06:59:06 -05:00
end
def test_lexing_object_literal
code = "{one : 1}"
assert @lex.tokenize(code) == [["{", "{"], [:IDENTIFIER, "one"], [:ASSIGN, ":"],
[:NUMBER, "1"], ["}", "}"], ["\n", "\n"]]
2009-12-18 06:59:06 -05:00
end
def test_lexing_function_definition
code = "(x, y) -> x * y"
assert @lex.tokenize(code) == [[:PARAM_START, "("], [:PARAM, "x"],
[",", ","], [:PARAM, "y"], [:PARAM_END, ")"],
["->", "->"], [:INDENT, 2], [:IDENTIFIER, "x"], ["*", "*"],
[:IDENTIFIER, "y"], [:OUTDENT, 2], ["\n", "\n"]]
2009-12-18 06:59:06 -05:00
end
def test_lexing_if_statement
code = "clap_your_hands() if happy"
assert @lex.tokenize(code) == [[:IDENTIFIER, "clap_your_hands"], [:CALL_START, "("],
[:CALL_END, ")"], [:IF, "if"], [:IDENTIFIER, "happy"], ["\n", "\n"]]
2009-12-18 06:59:06 -05:00
end
def test_lexing_comment
code = "a: 1\n# comment\n# on two lines\nb: 2"
assert @lex.tokenize(code) == [[:IDENTIFIER, "a"], [:ASSIGN, ":"], [:NUMBER, "1"],
2009-12-26 09:29:03 -08:00
["\n", "\n"], [:COMMENT, [" comment", " on two lines"]], ["\n", "\n"],
[:IDENTIFIER, "b"], [:ASSIGN, ":"], [:NUMBER, "2"], ["\n", "\n"]]
2009-12-26 09:29:03 -08:00
end
def test_lexing_newline_escaper
code = "two: 1 + \\\n\n 1"
assert @lex.tokenize(code) == [[:IDENTIFIER, "two"], [:ASSIGN, ":"],
[:NUMBER, "1"], ["+", "+"], [:NUMBER, "1"], ["\n", "\n"]]
end
2009-12-18 06:59:06 -05:00
def test_lexing
tokens = @lex.tokenize(File.read('test/fixtures/generation/each.coffee'))
assert tokens.inspect == File.read('test/fixtures/generation/each.tokens')
2009-12-18 06:59:06 -05:00
end
end