1
0
Fork 0

test func "arcana/tokenize"

This commit is contained in:
Alex Kotov 2023-05-07 15:16:53 +04:00
parent 4c7acd92f0
commit 23704a3f90
Signed by: kotovalexarian
GPG key ID: 553C0EBBEB5D5F08
4 changed files with 70 additions and 51 deletions

View file

@ -23,6 +23,7 @@ repl: arcana-lisp
test: arcana-lisp
$(CAT) tests/arcana.scm | ./arcana-lisp
$(CAT) tests/arcana/tokenize.scm | ./arcana-lisp
$(CAT) tests/arithm_ops.scm | ./arcana-lisp
$(CAT) tests/basic_data_structs.scm | ./arcana-lisp
$(CAT) tests/equiv.scm | ./arcana-lisp

View file

@ -226,20 +226,11 @@ struct Object *func_arcana_SLASH_tokenize(
Lexer_lex(lexer, '\n');
LEXER_DELETE(lexer);
assert(Tokens_top(tokens));
struct Object *list = NULL;
struct Object *last = NULL;
struct Object *list = Object_new_pair(
Object_new_pair(
Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)),
Object_new_string(Tokens_top(tokens)->val)
),
NULL
);
Tokens_pop(tokens);
while (Tokens_top(tokens)) {
struct Object *const new_pair = Object_new_pair(
if (Tokens_top(tokens)) {
struct Object *last = NULL;
list = Object_new_pair(
Object_new_pair(
Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)),
Object_new_string(Tokens_top(tokens)->val)
@ -248,12 +239,23 @@ struct Object *func_arcana_SLASH_tokenize(
);
Tokens_pop(tokens);
if (last) {
last->pair.cdr = new_pair;
last = new_pair;
} else {
last = new_pair;
list->pair.cdr = last;
while (Tokens_top(tokens)) {
struct Object *const new_pair = Object_new_pair(
Object_new_pair(
Object_new_symbol(TokenType_to_str(Tokens_top(tokens)->type)),
Object_new_string(Tokens_top(tokens)->val)
),
NULL
);
Tokens_pop(tokens);
if (last) {
last->pair.cdr = new_pair;
last = new_pair;
} else {
last = new_pair;
list->pair.cdr = last;
}
}
}

View file

@ -13,38 +13,6 @@
(cons 'TOKEN_ROUND_CLOSE ")")))
'(displayln (+ 123 456)))
;;; arcana/tokenize ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(assert-equal
(arcana/tokenize "(")
(list (cons 'TOKEN_ROUND_OPEN "(")))
(assert-equal
(arcana/tokenize "#false")
(list (cons 'TOKEN_TAG "false")))
(assert-equal
(arcana/tokenize "\"\"")
(list (cons 'TOKEN_STRING "")))
(assert-equal
(arcana/tokenize "\"qwe\"")
(list (cons 'TOKEN_STRING "qwe")))
(assert-equal
(arcana/tokenize "(displayln \"qwe\")")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_STRING "qwe")
(cons 'TOKEN_ROUND_CLOSE ")")))
(assert-equal
(arcana/tokenize "(displayln (list 1))")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "list")
(cons 'TOKEN_NUM "1")
(cons 'TOKEN_ROUND_CLOSE ")")
(cons 'TOKEN_ROUND_CLOSE ")")))
;;; arcana/typeof ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(assert-equal 'null (arcana/typeof '()))
(assert-equal 'procedure (arcana/typeof +))

48
tests/arcana/tokenize.scm Normal file
View file

@ -0,0 +1,48 @@
(begin
(assert-equal '() (arcana/tokenize ""))
(assert-equal '() (arcana/tokenize " "))
;(assert-equal '() (arcana/tokenize "\n"))
(assert-equal '() (arcana/tokenize "; foo"))
;(assert-equal '() (arcana/tokenize " ; foo\n ; foo"))
(assert-equal (arcana/tokenize "(") (list (cons 'TOKEN_ROUND_OPEN "(")))
(assert-equal (arcana/tokenize ")") (list (cons 'TOKEN_ROUND_CLOSE ")")))
(assert-equal (arcana/tokenize "[") (list (cons 'TOKEN_SQUARE_OPEN "[")))
(assert-equal (arcana/tokenize "]") (list (cons 'TOKEN_SQUARE_CLOSE "]")))
(assert-equal (arcana/tokenize "{") (list (cons 'TOKEN_CURLY_OPEN "{")))
(assert-equal (arcana/tokenize "}") (list (cons 'TOKEN_CURLY_CLOSE "}")))
(assert-equal (arcana/tokenize "'") (list (cons 'TOKEN_QUOTE "'")))
(assert-equal (arcana/tokenize "#f") (list (cons 'TOKEN_TAG "f")))
(assert-equal (arcana/tokenize "#t") (list (cons 'TOKEN_TAG "t")))
(assert-equal (arcana/tokenize "#false") (list (cons 'TOKEN_TAG "false")))
(assert-equal (arcana/tokenize "#true") (list (cons 'TOKEN_TAG "true")))
(assert-equal (arcana/tokenize "#qwe") (list (cons 'TOKEN_TAG "qwe")))
(assert-equal (arcana/tokenize "qwe") (list (cons 'TOKEN_IDENT "qwe")))
(assert-equal (arcana/tokenize "123") (list (cons 'TOKEN_NUM "123")))
;(assert-equal (arcana/tokenize "-123") (list (cons 'TOKEN_NUM "-123")))
(assert-equal (arcana/tokenize "\"\"") (list (cons 'TOKEN_STRING "")))
(assert-equal (arcana/tokenize "\"qwe\"") (list (cons 'TOKEN_STRING "qwe")))
(assert-equal (arcana/tokenize "\"\\\"\"") (list (cons 'TOKEN_STRING "\"")))
(assert-equal
(arcana/tokenize "(displayln \"qwe\")")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_STRING "qwe")
(cons 'TOKEN_ROUND_CLOSE ")")))
(assert-equal
(arcana/tokenize "(displayln (list 1))")
(list
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "displayln")
(cons 'TOKEN_ROUND_OPEN "(")
(cons 'TOKEN_IDENT "list")
(cons 'TOKEN_NUM "1")
(cons 'TOKEN_ROUND_CLOSE ")")
(cons 'TOKEN_ROUND_CLOSE ")")))
)