From 7c166baee028077820b7fa23563b44c64b5e7262 Mon Sep 17 00:00:00 2001 From: Alex Kotov Date: Sun, 2 Apr 2023 15:11:52 +0400 Subject: [PATCH] Collect tokens to display --- src/main.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/main.rs b/src/main.rs index 1ec362f..a0240d2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,12 +5,14 @@ mod tokenizer; use input::Input; use lexer::Lexer; +use models::Token; fn main() { let mut stdin = std::io::stdin().lock(); let input = Input::new(&mut stdin); let lexer = Lexer::new(input); - for token in lexer.map(|result| result.unwrap()) { - println!("{:?} {:?}", token.value, token.pos); + let tokens: Vec = lexer.map(|result| result.unwrap()).collect(); + for token in tokens { + println!("{:?}", token); } }