Collect tokens to display
This commit is contained in:
parent
eeb22a5c3b
commit
7c166baee0
|
@ -5,12 +5,14 @@ mod tokenizer;
|
|||
|
||||
use input::Input;
|
||||
use lexer::Lexer;
|
||||
use models::Token;
|
||||
|
||||
fn main() {
|
||||
let mut stdin = std::io::stdin().lock();
|
||||
let input = Input::new(&mut stdin);
|
||||
let lexer = Lexer::new(input);
|
||||
for token in lexer.map(|result| result.unwrap()) {
|
||||
println!("{:?} {:?}", token.value, token.pos);
|
||||
let tokens: Vec<Token> = lexer.map(|result| result.unwrap()).collect();
|
||||
for token in tokens {
|
||||
println!("{:?}", token);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue