1
0
Fork 0

test func "arcana/tokenize"

This commit is contained in:
Alex Kotov 2023-05-07 15:16:53 +04:00
parent 4c7acd92f0
commit 23704a3f90
Signed by: kotovalexarian
GPG key ID: 553C0EBBEB5D5F08
4 changed files with 70 additions and 51 deletions

View file

@ -23,6 +23,7 @@ repl: arcana-lisp
test: arcana-lisp test: arcana-lisp
$(CAT) tests/arcana.scm | ./arcana-lisp $(CAT) tests/arcana.scm | ./arcana-lisp
$(CAT) tests/arcana/tokenize.scm | ./arcana-lisp
$(CAT) tests/arithm_ops.scm | ./arcana-lisp $(CAT) tests/arithm_ops.scm | ./arcana-lisp
$(CAT) tests/basic_data_structs.scm | ./arcana-lisp $(CAT) tests/basic_data_structs.scm | ./arcana-lisp
$(CAT) tests/equiv.scm | ./arcana-lisp $(CAT) tests/equiv.scm | ./arcana-lisp

View file

@ -226,10 +226,11 @@ struct Object *func_arcana_SLASH_tokenize(
Lexer_lex(lexer, '\n'); Lexer_lex(lexer, '\n');
LEXER_DELETE(lexer); LEXER_DELETE(lexer);
assert(Tokens_top(tokens)); struct Object *list = NULL;
if (Tokens_top(tokens)) {
struct Object *last = NULL; struct Object *last = NULL;
struct Object *list = Object_new_pair( list = Object_new_pair(
Object_new_pair( Object_new_pair(
Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)), Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)),
Object_new_string(Tokens_top(tokens)->val) Object_new_string(Tokens_top(tokens)->val)
@ -256,6 +257,7 @@ struct Object *func_arcana_SLASH_tokenize(
list->pair.cdr = last; list->pair.cdr = last;
} }
} }
}
TOKENS_DELETE(tokens); TOKENS_DELETE(tokens);
return list; return list;

View file

@ -13,38 +13,6 @@
(cons 'TOKEN_ROUND_CLOSE ")"))) (cons 'TOKEN_ROUND_CLOSE ")")))
'(displayln (+ 123 456))) '(displayln (+ 123 456)))
;;; arcana/tokenize ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(assert-equal
(arcana/tokenize "(")
(list (cons 'TOKEN_ROUND_OPEN "(")))
(assert-equal
(arcana/tokenize "#false")
(list (cons 'TOKEN_TAG "false")))
(assert-equal
(arcana/tokenize "\"\"")
(list (cons 'TOKEN_STRING "")))
(assert-equal
(arcana/tokenize "\"qwe\"")
(list (cons 'TOKEN_STRING "qwe")))
(assert-equal
(arcana/tokenize "(displayln \"qwe\")")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_STRING "qwe")
(cons 'TOKEN_ROUND_CLOSE ")")))
(assert-equal
(arcana/tokenize "(displayln (list 1))")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "list")
(cons 'TOKEN_NUM "1")
(cons 'TOKEN_ROUND_CLOSE ")")
(cons 'TOKEN_ROUND_CLOSE ")")))
;;; arcana/typeof ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;; arcana/typeof ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(assert-equal 'null (arcana/typeof '())) (assert-equal 'null (arcana/typeof '()))
(assert-equal 'procedure (arcana/typeof +)) (assert-equal 'procedure (arcana/typeof +))

48
tests/arcana/tokenize.scm Normal file
View file

@ -0,0 +1,48 @@
(begin
(assert-equal '() (arcana/tokenize ""))
(assert-equal '() (arcana/tokenize " "))
;(assert-equal '() (arcana/tokenize "\n"))
(assert-equal '() (arcana/tokenize "; foo"))
;(assert-equal '() (arcana/tokenize " ; foo\n ; foo"))
(assert-equal (arcana/tokenize "(") (list (cons 'TOKEN_ROUND_OPEN "(")))
(assert-equal (arcana/tokenize ")") (list (cons 'TOKEN_ROUND_CLOSE ")")))
(assert-equal (arcana/tokenize "[") (list (cons 'TOKEN_SQUARE_OPEN "[")))
(assert-equal (arcana/tokenize "]") (list (cons 'TOKEN_SQUARE_CLOSE "]")))
(assert-equal (arcana/tokenize "{") (list (cons 'TOKEN_CURLY_OPEN "{")))
(assert-equal (arcana/tokenize "}") (list (cons 'TOKEN_CURLY_CLOSE "}")))
(assert-equal (arcana/tokenize "'") (list (cons 'TOKEN_QUOTE "'")))
(assert-equal (arcana/tokenize "#f") (list (cons 'TOKEN_TAG "f")))
(assert-equal (arcana/tokenize "#t") (list (cons 'TOKEN_TAG "t")))
(assert-equal (arcana/tokenize "#false") (list (cons 'TOKEN_TAG "false")))
(assert-equal (arcana/tokenize "#true") (list (cons 'TOKEN_TAG "true")))
(assert-equal (arcana/tokenize "#qwe") (list (cons 'TOKEN_TAG "qwe")))
(assert-equal (arcana/tokenize "qwe") (list (cons 'TOKEN_IDENT "qwe")))
(assert-equal (arcana/tokenize "123") (list (cons 'TOKEN_NUM "123")))
;(assert-equal (arcana/tokenize "-123") (list (cons 'TOKEN_NUM "-123")))
(assert-equal (arcana/tokenize "\"\"") (list (cons 'TOKEN_STRING "")))
(assert-equal (arcana/tokenize "\"qwe\"") (list (cons 'TOKEN_STRING "qwe")))
(assert-equal (arcana/tokenize "\"\\\"\"") (list (cons 'TOKEN_STRING "\"")))
(assert-equal
(arcana/tokenize "(displayln \"qwe\")")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_STRING "qwe")
(cons 'TOKEN_ROUND_CLOSE ")")))
(assert-equal
(arcana/tokenize "(displayln (list 1))")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "list")
(cons 'TOKEN_NUM "1")
(cons 'TOKEN_ROUND_CLOSE ")")
(cons 'TOKEN_ROUND_CLOSE ")")))
)