From 12ead5b70f260e51eba74bea81235353e9b18547 Mon Sep 17 00:00:00 2001 From: Alex Kotov Date: Sun, 2 Apr 2023 15:30:06 +0400 Subject: [PATCH] Build paragraphs --- src/main.rs | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/src/main.rs b/src/main.rs index a0240d2..7d593e5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,14 +5,42 @@ mod tokenizer; use input::Input; use lexer::Lexer; -use models::Token; fn main() { let mut stdin = std::io::stdin().lock(); let input = Input::new(&mut stdin); let lexer = Lexer::new(input); - let tokens: Vec = lexer.map(|result| result.unwrap()).collect(); + let tokens = lexer.map(|result| result.unwrap()); + + let mut paragraphs: Vec = Vec::new(); + let mut paragraph = String::new(); + for token in tokens { - println!("{:?}", token); + if let Some(chr) = token.value.chars().next() { + match chr { + '\n' => { + if token.value.len() == 1 { + paragraph.push(' '); + } else { + if !paragraph.is_empty() { + paragraphs.push(paragraph.trim().to_string()); + } + paragraph = String::new(); + } + } + ' ' => { + paragraph.push(' '); + } + _ => { + paragraph.push_str(&token.value); + } + } + } } + + if !paragraph.is_empty() { + paragraphs.push(paragraph.trim().to_string()); + } + + println!("{:#?}", paragraphs); }