diff --git a/Makefile b/Makefile index 659ef0e..b9b8906 100644 --- a/Makefile +++ b/Makefile @@ -23,6 +23,7 @@ repl: arcana-lisp test: arcana-lisp $(CAT) tests/arcana.scm | ./arcana-lisp + $(CAT) tests/arcana/tokenize.scm | ./arcana-lisp $(CAT) tests/arithm_ops.scm | ./arcana-lisp $(CAT) tests/basic_data_structs.scm | ./arcana-lisp $(CAT) tests/equiv.scm | ./arcana-lisp diff --git a/src/builtins.c b/src/builtins.c index 9a81d72..57ac19b 100644 --- a/src/builtins.c +++ b/src/builtins.c @@ -226,20 +226,11 @@ struct Object *func_arcana_SLASH_tokenize( Lexer_lex(lexer, '\n'); LEXER_DELETE(lexer); - assert(Tokens_top(tokens)); + struct Object *list = NULL; - struct Object *last = NULL; - struct Object *list = Object_new_pair( - Object_new_pair( - Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)), - Object_new_string(Tokens_top(tokens)->val) - ), - NULL - ); - Tokens_pop(tokens); - - while (Tokens_top(tokens)) { - struct Object *const new_pair = Object_new_pair( + if (Tokens_top(tokens)) { + struct Object *last = NULL; + list = Object_new_pair( Object_new_pair( Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)), Object_new_string(Tokens_top(tokens)->val) @@ -248,12 +239,23 @@ struct Object *func_arcana_SLASH_tokenize( ); Tokens_pop(tokens); - if (last) { - last->pair.cdr = new_pair; - last = new_pair; - } else { - last = new_pair; - list->pair.cdr = last; + while (Tokens_top(tokens)) { + struct Object *const new_pair = Object_new_pair( + Object_new_pair( + Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)), + Object_new_string(Tokens_top(tokens)->val) + ), + NULL + ); + Tokens_pop(tokens); + + if (last) { + last->pair.cdr = new_pair; + last = new_pair; + } else { + last = new_pair; + list->pair.cdr = last; + } } } diff --git a/tests/arcana.scm b/tests/arcana.scm index 5b3738f..fb7aef2 100644 --- a/tests/arcana.scm +++ b/tests/arcana.scm @@ -13,38 +13,6 @@ (cons 'TOKEN_ROUND_CLOSE ")"))) '(displayln (+ 123 456))) - ;;; arcana/tokenize ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - (assert-equal - (arcana/tokenize "(") - (list (cons 'TOKEN_ROUND_OPEN "("))) - (assert-equal - (arcana/tokenize "#false") - (list (cons 'TOKEN_TAG "false"))) - (assert-equal - (arcana/tokenize "\"\"") - (list (cons 'TOKEN_STRING ""))) - (assert-equal - (arcana/tokenize "\"qwe\"") - (list (cons 'TOKEN_STRING "qwe"))) - (assert-equal - (arcana/tokenize "(displayln \"qwe\")") - (list - (cons 'TOKEN_ROUND_OPEN "(") - (cons 'TOKEN_IDENT "displayln") - (cons 'TOKEN_STRING "qwe") - (cons 'TOKEN_ROUND_CLOSE ")"))) - - (assert-equal - (arcana/tokenize "(displayln (list 1))") - (list - (cons 'TOKEN_ROUND_OPEN "(") - (cons 'TOKEN_IDENT "displayln") - (cons 'TOKEN_ROUND_OPEN "(") - (cons 'TOKEN_IDENT "list") - (cons 'TOKEN_NUM "1") - (cons 'TOKEN_ROUND_CLOSE ")") - (cons 'TOKEN_ROUND_CLOSE ")"))) - ;;; arcana/typeof ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (assert-equal 'null (arcana/typeof '())) (assert-equal 'procedure (arcana/typeof +)) diff --git a/tests/arcana/tokenize.scm b/tests/arcana/tokenize.scm new file mode 100644 index 0000000..4d95c60 --- /dev/null +++ b/tests/arcana/tokenize.scm @@ -0,0 +1,48 @@ +(begin + (assert-equal '() (arcana/tokenize "")) + (assert-equal '() (arcana/tokenize " ")) + ;(assert-equal '() (arcana/tokenize "\n")) + (assert-equal '() (arcana/tokenize "; foo")) + ;(assert-equal '() (arcana/tokenize " ; foo\n ; foo")) + + (assert-equal (arcana/tokenize "(") (list (cons 'TOKEN_ROUND_OPEN "("))) + (assert-equal (arcana/tokenize ")") (list (cons 'TOKEN_ROUND_CLOSE ")"))) + (assert-equal (arcana/tokenize "[") (list (cons 'TOKEN_SQUARE_OPEN "["))) + (assert-equal (arcana/tokenize "]") (list (cons 'TOKEN_SQUARE_CLOSE "]"))) + (assert-equal (arcana/tokenize "{") (list (cons 'TOKEN_CURLY_OPEN "{"))) + (assert-equal (arcana/tokenize "}") (list (cons 'TOKEN_CURLY_CLOSE "}"))) + (assert-equal (arcana/tokenize "'") (list (cons 'TOKEN_QUOTE "'"))) + + (assert-equal (arcana/tokenize "#f") (list (cons 'TOKEN_TAG "f"))) + (assert-equal (arcana/tokenize "#t") (list (cons 'TOKEN_TAG "t"))) + (assert-equal (arcana/tokenize "#false") (list (cons 'TOKEN_TAG "false"))) + (assert-equal (arcana/tokenize "#true") (list (cons 'TOKEN_TAG "true"))) + (assert-equal (arcana/tokenize "#qwe") (list (cons 'TOKEN_TAG "qwe"))) + + (assert-equal (arcana/tokenize "qwe") (list (cons 'TOKEN_IDENT "qwe"))) + + (assert-equal (arcana/tokenize "123") (list (cons 'TOKEN_NUM "123"))) + ;(assert-equal (arcana/tokenize "-123") (list (cons 'TOKEN_NUM "-123"))) + + (assert-equal (arcana/tokenize "\"\"") (list (cons 'TOKEN_STRING ""))) + (assert-equal (arcana/tokenize "\"qwe\"") (list (cons 'TOKEN_STRING "qwe"))) + (assert-equal (arcana/tokenize "\"\\\"\"") (list (cons 'TOKEN_STRING "\""))) + + (assert-equal + (arcana/tokenize "(displayln \"qwe\")") + (list + (cons 'TOKEN_ROUND_OPEN "(") + (cons 'TOKEN_IDENT "displayln") + (cons 'TOKEN_STRING "qwe") + (cons 'TOKEN_ROUND_CLOSE ")"))) + (assert-equal + (arcana/tokenize "(displayln (list 1))") + (list + (cons 'TOKEN_ROUND_OPEN "(") + (cons 'TOKEN_IDENT "displayln") + (cons 'TOKEN_ROUND_OPEN "(") + (cons 'TOKEN_IDENT "list") + (cons 'TOKEN_NUM "1") + (cons 'TOKEN_ROUND_CLOSE ")") + (cons 'TOKEN_ROUND_CLOSE ")"))) +)