diff --git a/src/main.rs b/src/main.rs index e4c6f39..cc8c4dd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -4,10 +4,24 @@ pub mod om; pub mod renderer; fn main() { - let mut input = String::new(); - io::stdin().read_to_string(&mut input).expect("Failed to read from stdin"); - om::tokenizer::Tokenizer::new(input).for_each(|token| { - println!("\n{:?}", token); - }); - println!(); + let mut input = String::new(); + io::stdin() + .read_to_string(&mut input) + .expect("Failed to read from stdin"); + let mut index = 0; + om::tokenizer::Tokenizer::new(input).for_each(|token| { + if let om::tokenizer::Token::NodeEnd = token { + index -= 1; + } + println!("{}{:?}", " ".repeat(index), token); + if let om::tokenizer::Token::Define { + name, + component, + behavior, + } = token + { + index += 1; + } + }); + println!(); } diff --git a/src/om/tokenizer.rs b/src/om/tokenizer.rs index f7988dd..10e8433 100644 --- a/src/om/tokenizer.rs +++ b/src/om/tokenizer.rs @@ -49,7 +49,7 @@ impl Iterator for Tokenizer { return Some(Token::EOF); } let c = self.consume_input(); - print!("{}", c); + // print!("{}", c); match self.state { State::Data => match c { '(' => { @@ -81,6 +81,16 @@ impl Iterator for Tokenizer { self.state = State::AfterDefine; self.latest = Some(Token::Define { name: self.buffer.clone(), + component: None, + behavior: Behavior::new(), + }); + self.buffer.clear(); + } + '<' => { + self.state = State::CompCall; + self.latest = Some(Token::Define { + name: self.buffer.clone(), + component: None, behavior: Behavior::new(), }); self.buffer.clear(); @@ -93,6 +103,31 @@ impl Iterator for Tokenizer { if self.is_eof() { return Some(Token::Define { name: self.buffer.clone(), + component: None, + behavior: Behavior::new(), + }); + } + } + }, + State::CompCall => match c { + '>' => { + self.state = State::AfterDefine; + if let Some(t) = self.latest.as_mut() { + if let Token::Define { component, .. } = t { + *component = Some(self.buffer.clone()); + } + } + self.buffer.clear(); + } + _ => { + if !c.is_ascii_alphanumeric() { + panic!("Unexpected character: {}", c); + } + self.buffer.push(c); + if self.is_eof() { + return Some(Token::Define { + name: self.buffer.clone(), + component: None, behavior: Behavior::new(), }); } @@ -172,7 +207,6 @@ impl Iterator for Tokenizer { self.reconsume = true; self.state = State::Behavior; - println!("BehaviorItem: {:?}", BehaviorItem::from(&self.buffer)); } } }, @@ -186,6 +220,7 @@ enum State { Data, NodeOpen, Define, + CompCall, AfterDefine, Behavior, BehaviorSeparator, @@ -193,7 +228,11 @@ enum State { #[derive(Debug, Clone, PartialEq, Eq)] pub enum Token { - Define { name: String, behavior: Behavior }, + Define { + name: String, + component: Option, + behavior: Behavior, + }, Character(char), NodeEnd, EOF, @@ -217,6 +256,7 @@ mod tests { let expected = [ Token::Define { name: "foo".to_string(), + component: None, behavior: Behavior { items: vec![BehaviorItem::KeyValue { prefix: Some("prefix".to_string()), @@ -239,6 +279,7 @@ mod tests { let expected = [ Token::Define { name: "foo".to_string(), + component: None, behavior: Behavior { items: vec![BehaviorItem::KeyValue { prefix: Some("prefix".to_string()), @@ -249,6 +290,7 @@ mod tests { }, Token::Define { name: "bar".to_string(), + component: None, behavior: Behavior { items: vec![BehaviorItem::KeyValue { prefix: Some("prefix".to_string()),