mirror of
https://github.com/jashkenas/coffeescript.git
synced 2022-11-09 12:23:24 -05:00
adding an initial lexer test
This commit is contained in:
parent
049358d006
commit
92adabdddc
8 changed files with 120 additions and 4 deletions
15
Rakefile
15
Rakefile
|
@ -1,3 +1,14 @@
|
|||
require 'fileutils'
|
||||
require 'rake/testtask'
|
||||
|
||||
desc "Run all tests"
|
||||
task :test do
|
||||
$LOAD_PATH.unshift(File.expand_path('test'))
|
||||
require 'redgreen' if Gem.available?('redgreen')
|
||||
require 'test/unit'
|
||||
Dir['test/*/**/test_*.rb'].each {|test| require test }
|
||||
end
|
||||
|
||||
desc "Recompile the Racc parser (pass -v and -g for verbose debugging)"
|
||||
task :build, :extra_args do |t, args|
|
||||
sh "racc #{args[:extra_args]} -o lib/coffee_script/parser.rb lib/coffee_script/grammar.y"
|
||||
|
@ -16,4 +27,6 @@ namespace :gem do
|
|||
sh "sudo gem uninstall -x coffee-script"
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
task :default => :test
|
4
TODO
4
TODO
|
@ -2,9 +2,9 @@ TODO:
|
|||
|
||||
* Write some tests.
|
||||
|
||||
* Code Cleanup.
|
||||
* Finish the examples.
|
||||
|
||||
* Figure out how not to have to close each if statement individually.
|
||||
* Create the documentation page.
|
||||
|
||||
* Is it possible to close blocks (functions, ifs, trys) without an explicit
|
||||
block delimiter or significant whitespace?
|
||||
|
|
|
@ -157,8 +157,8 @@ module CoffeeScript
|
|||
index = 0
|
||||
loop do
|
||||
tok = @tokens[index -= 1]
|
||||
return if !tok || tok[0] != :IDENTIFIER
|
||||
next if tok[0] == ','
|
||||
return if tok[0] != :IDENTIFIER
|
||||
tok[0] = :PARAM
|
||||
end
|
||||
end
|
||||
|
|
11
test/fixtures/each.cs
vendored
Normal file
11
test/fixtures/each.cs
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
# The cornerstone, an each implementation.
|
||||
# Handles objects implementing forEach, arrays, and raw objects.
|
||||
_.each: obj, iterator, context =>
|
||||
index: 0
|
||||
try
|
||||
return obj.forEach(iterator, context) if obj.forEach
|
||||
return iterator.call(context, item, i, obj) for item, i in obj. if _.isArray(obj) or _.isArguments(obj)
|
||||
iterator.call(context, obj[key], key, obj) for key in _.keys(obj).
|
||||
catch e
|
||||
throw e if e aint breaker.
|
||||
obj.
|
1
test/fixtures/each.tokens
vendored
Normal file
1
test/fixtures/each.tokens
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
[["\n", "\n"], [:IDENTIFIER, "_"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "each"], [":", ":"], [:IDENTIFIER, "obj"], [",", ","], [:IDENTIFIER, "iterator"], [",", ","], [:PARAM, "context"], ["=>", "=>"], ["\n", "\n"], [:IDENTIFIER, "index"], [":", ":"], [:NUMBER, "0"], ["\n", "\n"], [:TRY, "try"], ["\n", "\n"], [:RETURN, "return"], [:IDENTIFIER, "obj"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "forEach"], ["(", "("], [:IDENTIFIER, "iterator"], [",", ","], [:IDENTIFIER, "context"], [")", ")"], [:IF, "if"], [:IDENTIFIER, "obj"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "forEach"], ["\n", "\n"], [:RETURN, "return"], [:IDENTIFIER, "iterator"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "call"], ["(", "("], [:IDENTIFIER, "context"], [",", ","], [:IDENTIFIER, "item"], [",", ","], [:IDENTIFIER, "i"], [",", ","], [:IDENTIFIER, "obj"], [")", ")"], [:FOR, "for"], [:IDENTIFIER, "item"], [",", ","], [:IDENTIFIER, "i"], [:IN, "in"], [:IDENTIFIER, "obj"], [".", "."], [:IF, "if"], [:IDENTIFIER, "_"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "isArray"], ["(", "("], [:IDENTIFIER, "obj"], [")", ")"], [:OR, "or"], [:IDENTIFIER, "_"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "isArguments"], ["(", "("], [:IDENTIFIER, "obj"], [")", ")"], ["\n", "\n"], [:IDENTIFIER, "iterator"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "call"], ["(", "("], [:IDENTIFIER, "context"], [",", ","], [:IDENTIFIER, "obj"], ["[", "["], [:IDENTIFIER, "key"], ["]", "]"], [",", ","], [:IDENTIFIER, "key"], [",", ","], [:IDENTIFIER, "obj"], [")", ")"], [:FOR, "for"], [:IDENTIFIER, "key"], [:IN, "in"], [:IDENTIFIER, "_"], [:PROPERTY_ACCESS, "."], [:IDENTIFIER, "keys"], ["(", "("], [:IDENTIFIER, "obj"], [")", ")"], [".", "."], ["\n", "\n"], [:CATCH, "catch"], [:IDENTIFIER, "e"], ["\n", "\n"], [:THROW, "throw"], [:IDENTIFIER, "e"], [:IF, "if"], [:IDENTIFIER, "e"], [:AINT, "aint"], [:IDENTIFIER, "breaker"], [".", "."], ["\n", "\n"], [:IDENTIFIER, "obj"], [".", "."]]
|
5
test/test_helper.rb
Normal file
5
test/test_helper.rb
Normal file
|
@ -0,0 +1,5 @@
|
|||
require 'lib/coffee-script'
|
||||
|
||||
class Test::Unit::TestCase
|
||||
include CoffeeScript
|
||||
end
|
43
test/unit/test_lexer.rb
Normal file
43
test/unit/test_lexer.rb
Normal file
|
@ -0,0 +1,43 @@
|
|||
require 'test_helper'
|
||||
|
||||
class LexerTest < Test::Unit::TestCase
|
||||
|
||||
def setup
|
||||
@lex = Lexer.new
|
||||
end
|
||||
|
||||
def test_lexing_an_empty_string
|
||||
assert @lex.tokenize("") == []
|
||||
end
|
||||
|
||||
def test_lexing_basic_assignment
|
||||
code = "a: 'one'; b: [1, 2]"
|
||||
assert @lex.tokenize(code) == [[:IDENTIFIER, "a"], [":", ":"],
|
||||
[:STRING, "'one'"], [";", ";"], [:IDENTIFIER, "b"], [":", ":"],
|
||||
["[", "["], [:NUMBER, "1"], [",", ","], [:NUMBER, "2"], ["]", "]"]]
|
||||
end
|
||||
|
||||
def test_lexing_object_literal
|
||||
code = "{one : 1}"
|
||||
assert @lex.tokenize(code) == [["{", "{"], [:IDENTIFIER, "one"], [":", ":"],
|
||||
[:NUMBER, "1"], ["}", "}"]]
|
||||
end
|
||||
|
||||
def test_lexing_function_definition
|
||||
code = "x => x * x."
|
||||
assert @lex.tokenize(code) == [[:PARAM, "x"], ["=>", "=>"],
|
||||
[:IDENTIFIER, "x"], ["*", "*"], [:IDENTIFIER, "x"], [".", "."]]
|
||||
end
|
||||
|
||||
def test_lexing_if_statement
|
||||
code = "clap_your_hands() if happy"
|
||||
assert @lex.tokenize(code) == [[:IDENTIFIER, "clap_your_hands"], ["(", "("],
|
||||
[")", ")"], [:IF, "if"], [:IDENTIFIER, "happy"]]
|
||||
end
|
||||
|
||||
def test_lexing
|
||||
tokens = @lex.tokenize(File.read('test/fixtures/each.cs'))
|
||||
assert tokens.inspect == File.read('test/fixtures/each.tokens')
|
||||
end
|
||||
|
||||
end
|
43
test/unit/test_parser.rb
Normal file
43
test/unit/test_parser.rb
Normal file
|
@ -0,0 +1,43 @@
|
|||
require 'test_helper'
|
||||
|
||||
class ParserTest < Test::Unit::TestCase
|
||||
|
||||
def setup
|
||||
@par = Parser.new
|
||||
end
|
||||
|
||||
def test_parsing_an_empty_string
|
||||
puts @par.parse("").inspect
|
||||
end
|
||||
|
||||
# def test_lexing_basic_assignment
|
||||
# code = "a: 'one'; b: [1, 2]"
|
||||
# assert @lex.tokenize(code) == [[:IDENTIFIER, "a"], [":", ":"],
|
||||
# [:STRING, "'one'"], [";", ";"], [:IDENTIFIER, "b"], [":", ":"],
|
||||
# ["[", "["], [:NUMBER, "1"], [",", ","], [:NUMBER, "2"], ["]", "]"]]
|
||||
# end
|
||||
#
|
||||
# def test_lexing_object_literal
|
||||
# code = "{one : 1}"
|
||||
# assert @lex.tokenize(code) == [["{", "{"], [:IDENTIFIER, "one"], [":", ":"],
|
||||
# [:NUMBER, "1"], ["}", "}"]]
|
||||
# end
|
||||
#
|
||||
# def test_lexing_function_definition
|
||||
# code = "x => x * x."
|
||||
# assert @lex.tokenize(code) == [[:PARAM, "x"], ["=>", "=>"],
|
||||
# [:IDENTIFIER, "x"], ["*", "*"], [:IDENTIFIER, "x"], [".", "."]]
|
||||
# end
|
||||
#
|
||||
# def test_lexing_if_statement
|
||||
# code = "clap_your_hands() if happy"
|
||||
# assert @lex.tokenize(code) == [[:IDENTIFIER, "clap_your_hands"], ["(", "("],
|
||||
# [")", ")"], [:IF, "if"], [:IDENTIFIER, "happy"]]
|
||||
# end
|
||||
#
|
||||
# def test_lexing
|
||||
# tokens = @lex.tokenize(File.read('test/fixtures/each.cs'))
|
||||
# assert tokens.inspect == File.read('test/fixtures/each.tokens')
|
||||
# end
|
||||
|
||||
end
|
Loading…
Add table
Reference in a new issue