diff --git a/Cargo.lock b/Cargo.lock index 8fda08f..29737a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -874,6 +874,16 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + [[package]] name = "colorsys" version = "0.6.7" @@ -2481,6 +2491,7 @@ dependencies = [ name = "matugen-parser" version = "0.1.0" dependencies = [ + "colored", "nom", "serde", "string_cache", diff --git a/matugen-parser/Cargo.toml b/matugen-parser/Cargo.toml index 7258a83..c95d905 100644 --- a/matugen-parser/Cargo.toml +++ b/matugen-parser/Cargo.toml @@ -4,6 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] +colored = "2.2.0" nom = "7.1.3" serde = "1.0.209" string_cache = "0.8.7" diff --git a/matugen-parser/example/template.txt b/matugen-parser/example/template.txt index 86e6186..eaf3d48 100644 --- a/matugen-parser/example/template.txt +++ b/matugen-parser/example/template.txt @@ -1,4 +1,6 @@ -{{ colors colors }} +{{ colors.colors | a }} + +{{ colors.colors | b: c }} {{ colors.source_color.default.hex }} diff --git a/matugen-parser/src/errors/mod.rs b/matugen-parser/src/errors/mod.rs new file mode 100644 index 0000000..65d4b4b --- /dev/null +++ b/matugen-parser/src/errors/mod.rs @@ -0,0 +1,15 @@ +use parse::ParseError; + +pub mod parse; + +pub fn handle_error(f: Result>) { + if let Err(ref e) = f { + std::eprintln!("{}", e); + }; +} + +pub fn handle_error_panic(f: Result>) { + if let Err(ref e) = f { + panic!("{}", e); + }; +} diff --git a/matugen-parser/src/errors/parse.rs b/matugen-parser/src/errors/parse.rs new file mode 100644 index 0000000..87ec165 --- /dev/null +++ b/matugen-parser/src/errors/parse.rs @@ -0,0 +1,83 @@ +use std::fmt; + +use crate::parser::Parser; + +#[derive(Debug)] +pub struct ParseError<'a> { + pub err_type: ParseErrorTypes, + pub start: usize, + pub end: usize, + pub source: &'a str, + pub filename: &'a str, + pub line_number: u64, +} + +impl ParseError<'_> { + pub fn new<'a>( + err_type: ParseErrorTypes, + start: usize, + end: usize, + source: &'a str, + filename: &'a str, + line_number: u64, + ) -> ParseError<'a> { + ParseError { + err_type, + start, + end, + source, + filename, + line_number, + } + } + pub fn new_from_parser<'a>(err_type: ParseErrorTypes, parser: &Parser<'a>) -> ParseError<'a> { + ParseError { + err_type, + start: parser.last_bracket_start, + end: parser.prev_token_end, + source: parser.source, + filename: &parser.filename, + line_number: parser.lexer.cur_line, + } + } +} + +impl<'a> fmt::Display for ParseError<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let err_msg = match self.err_type { + ParseErrorTypes::UnexpectedFilterArgumentToken => { + "Unexpected character in filter argument" + } + ParseErrorTypes::UnclosedBracket => "Unclosed bracket", + ParseErrorTypes::DoubleDot => "Double dot", + ParseErrorTypes::DoubleString => "Double string", + }; + let mut str = "".to_string(); + + let span = self.source.get(self.start..self.end).unwrap_or(""); + + for line in span.lines() { + str.push_str(&format!("{} \x1b[94m|\x1b[0m {}\n", self.line_number, line)) + } + + write!( + f, + "\n\u{1b}[2;30;41m ERROR \u{1b}[0m\u{1b}[2;30;47m {} \u{1b}[0m\n\x1b[94m-->\x1b[0m {}:{}:{}\n{}\n", + err_msg, self.filename, self.start, self.end, str, + ) + + // write!( + // f, + // "\n\u{1b}[1;31m[ERROR] {} {}..{}: {}:\u{1b}[0m\n{}\n", + // self.filename, self.start, self.end, err_msg, span, + // ) + } +} + +#[derive(Debug)] +pub enum ParseErrorTypes { + UnclosedBracket, + DoubleDot, + DoubleString, + UnexpectedFilterArgumentToken, +} diff --git a/matugen-parser/src/lib.rs b/matugen-parser/src/lib.rs new file mode 100644 index 0000000..c3ec6da --- /dev/null +++ b/matugen-parser/src/lib.rs @@ -0,0 +1,4 @@ +pub mod errors; +pub mod lexer; +pub mod node; +pub mod parser; diff --git a/matugen-parser/src/main.rs b/matugen-parser/src/main.rs index 8884f14..dced9b8 100644 --- a/matugen-parser/src/main.rs +++ b/matugen-parser/src/main.rs @@ -4,6 +4,7 @@ use std::path::PathBuf; use lexer::Lexer; use parser::Parser; +mod errors; mod lexer; mod node; mod parser; diff --git a/matugen-parser/src/parser.rs b/matugen-parser/src/parser.rs deleted file mode 100644 index 3a51b0d..0000000 --- a/matugen-parser/src/parser.rs +++ /dev/null @@ -1,449 +0,0 @@ -#![allow(unused_variables)] - -#[derive(Debug)] -pub struct ParseError<'a> { - pub err_type: ParseErrorTypes, - pub start: usize, - pub end: usize, - pub source: &'a str, - pub filename: &'a str, - pub line_number: u64, -} - -impl<'a> fmt::Display for ParseError<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let err_msg = match self.err_type { - ParseErrorTypes::UnexpectedFilterArgumentToken => { - "Unexpected character in filter argument" - } - ParseErrorTypes::UnclosedBracket => "Unclosed bracket", - ParseErrorTypes::DoubleDot => "Double dot", - ParseErrorTypes::DoubleString => "Double string", - }; - let mut str = "".to_string(); - - let span = self.source.get(self.start..self.end).unwrap_or(""); - - for line in span.lines() { - str.push_str(&format!("{} \x1b[94m|\x1b[0m {}\n", self.line_number, line)) - } - - write!( - f, - "\n\u{1b}[2;30;41m ERROR \u{1b}[0m\u{1b}[2;30;47m {} \u{1b}[0m\n\x1b[94m-->\x1b[0m {}:{}:{}\n{}\n", - err_msg, self.filename, self.start, self.end, str, - ) - - // write!( - // f, - // "\n\u{1b}[1;31m[ERROR] {} {}..{}: {}:\u{1b}[0m\n{}\n", - // self.filename, self.start, self.end, err_msg, span, - // ) - } -} - -#[derive(Debug)] -pub enum ParseErrorTypes { - UnclosedBracket, - DoubleDot, - DoubleString, - UnexpectedFilterArgumentToken, -} - -use std::fmt; -use std::iter::Filter; - -use crate::lexer::{Kind, Lexer, Token, TokenValue}; -use crate::node::{FilterDefinition, KeywordDefinition, Node, Program, Statement}; -/// A parser for turning a stream of tokens into a Abstract Syntax Tree. -#[derive(Debug)] -pub struct Parser<'a> { - source: &'a str, - filename: &'a str, - lexer: Lexer<'a>, - - /// Current Token consumed from the lexer - cur_token: Token, - /// The end range of the previous token - prev_token_end: usize, - - opened: bool, - closed: bool, - seen_dot: bool, - - last_bracket_start: usize, -} - -impl<'a> Parser<'a> { - /// Create a new parser. - pub fn new(source: &'a str, filename: &'a str) -> Parser<'a> { - let mut lexer = Lexer::new(&source); - Parser { - source, - filename, - cur_token: lexer.start(), - lexer, - prev_token_end: 0, - opened: false, - closed: false, - seen_dot: false, - last_bracket_start: 0, - } - } - - pub fn parse(&mut self) -> Program { - let end = self.source.len(); - let statments = self.get_keywords(); - Program { - node: Node { start: 0, end }, - body: statments, - } - } - - // pub fn parse(&mut self) -> Template { - // Template { - // node: Node { - // start: 0, - // end: self.source.len(), - // }, - // body: vec![], - // } - // } - - // fn parse_keyword_statement(&mut self) -> Statement { - // let node = self.start_node(); - // // NOTE: the token returned from the lexer is `Kind::Debugger`, we'll fix this later. - // self.bump_any(); - // Statement::KeywordDeclarationStatement { - // 0: KeywordDeclaration { - // node: self.finish_node(node), - // }, - // } - // } - - fn start_node(&mut self) -> Node { - let token = self.cur_token(); - Node::new(token.start, 0) - } - - fn finish_node(&self, node: Node) -> Node { - Node::new(node.start, self.prev_token_end) - } - - fn cur_token(&self) -> &Token { - &self.cur_token - } - - fn cur_kind(&self) -> &Kind { - &self.cur_token.kind - } - - fn cur_val(&self) -> &TokenValue { - &self.cur_token.value - } - - /// Checks if the current index has token `Kind` - fn at(&self, kind: Kind) -> bool { - self.cur_kind() == &kind - } - - /// Advance if we are at `Kind` - fn bump(&mut self, kind: Kind) { - if self.at(kind) { - self.advance(); - } - } - - /// Advance any token - fn bump_any(&mut self) { - self.advance(); - } - - fn bump_until_not_at(&mut self, kind: Kind) { - while self.cur_kind() == &kind && !self.at(Kind::Eof) { - self.bump_any() - } - } - - /// Advance any token - fn bump_while_not(&mut self, kind: Kind) { - while self.cur_kind() != &kind && !self.at(Kind::Eof) { - self.advance(); - } - } - - /// Advance and return true if we are at `Kind`, return false otherwise - fn eat(&mut self, kind: Kind) -> bool { - if self.at(kind) { - self.advance(); - return true; - } - false - } - - /// Advance and return true if we are at `Kind`, return false otherwise - fn eat_ignore_spaces(&mut self, kind: Kind) -> bool { - self.bump_until_not_at(Kind::Space); - - if self.at(kind) { - self.advance(); - return true; - } - false - } - - /// Move to the next token - fn advance(&mut self) { - let token = self.lexer.next_token(); - self.prev_token_end = self.cur_token.end; - self.cur_token = token; - - println!("self at : {:?}", self.cur_token()); - } - - pub fn get_closing(&mut self) -> Result<(), ParseError> { - self.bump_any(); - if self.eat(Kind::RBracket) { - self.closed = true; - self.opened = false; - println!( - "{}..{}: closed fine without filter", - self.last_bracket_start, self.prev_token_end - ); - Ok(()) - } else { - Err(ParseError { - err_type: ParseErrorTypes::UnclosedBracket, - start: self.last_bracket_start, - end: self.prev_token_end, - source: self.source, - filename: &self.filename, - line_number: self.lexer.cur_line, - }) - } - } - - pub fn get_keywords(&mut self) -> Vec { - let mut vec: Vec = vec![]; - - while !self.at(Kind::Eof) { - if !self.at(Kind::Lbracket) { - self.bump_until_not_at(Kind::Lbracket); - } - - // We would only get the second bracket at the start without the -1, - // the opening will ALWAYS have two brackets unlike the closing, which - // might have an error inside of it (so we dont look ahead for the closing). - self.last_bracket_start = self.get_opening().unwrap() - 1; - let start = self.start_node(); - - let mut strings: Vec = vec![]; - - let res = self.collect_strings(&mut strings); - - if let Err(e) = res { - panic!("{}", format!("{}", e)); - } - - vec.push(Statement::KeywordDefinition(Box::new(KeywordDefinition { - node: self.finish_node(start), - keywords: strings, - filters: None, - }))); - } - vec - } - - fn get_filter(&mut self) -> Result, ParseError> { - if self.eat(Kind::Bar) { - println!("ok"); - } else { - // return Err(ParseError { - // err_type: ParseErrorTypes::UnclosedBracket, - // start: self.last_bracket_start, - // end: self.prev_token_end, - // source: self.source, - // }); - } - let start = self.start_node(); - - // FilterDefinition { - // node: self.finish_node(start), - // filter_name: todo!(), - // arguments: todo!(), - // }; - self.bump_while_not(Kind::String); - let name = self.cur_token().clone().value; - - self.advance(); - - if self.eat(Kind::RBracket) { - println!("no filter args"); - self.get_closing(); - return Ok(None); - } - - let node = self.finish_node(start); - - let res = self.collect_filter_args(); - if let Err(ref e) = res { - eprintln!("{}", e); - } - - Ok(Some(FilterDefinition { - node, - filter_name: name, - arguments: res.unwrap(), - })) - // self.bump_while_not(Kind::RBracket); - } - - fn collect_filter_args(&mut self) -> Result, ParseError> { - let mut arguments: Vec = vec![]; - - if !self.eat_ignore_spaces(Kind::Colon) { - println!("not: {:?}", self.cur_token()); - self.bump_while_not(Kind::RBracket) - } else { - // while !self.at(Kind::RBracket) { - // match self.cur_kind() { - // Kind::String => arguments.push(&self.cur_token.value), - // Kind::Number => todo!(), - // _ => {} - // } - // } - loop { - match self.cur_kind() { - Kind::Space => { - self.bump_until_not_at(Kind::Space); - } - Kind::String => { - arguments.push(self.cur_token.value.clone()); - self.bump(Kind::String) - } - Kind::Number => { - arguments.push(self.cur_token.value.clone()); - self.bump(Kind::Number) - } - Kind::RBracket => { - println!("herer"); - break; - } - _ => { - return Err(ParseError { - err_type: ParseErrorTypes::UnexpectedFilterArgumentToken, - start: self.last_bracket_start, - end: self.prev_token_end, - source: self.source, - filename: &self.filename, - line_number: self.lexer.cur_line, - }) - } - } - } - // return Err(ParseError { - // err_type: ParseErrorTypes::MissingFilterColon, - // start: self.last_bracket_start, - // end: self.prev_token_end, - // source: &self.source, - // }); - } - println!("arguments: {:?}", arguments); - Ok(arguments) - } - - // Returns true if filter is used - fn collect_strings(&mut self, strings: &mut Vec) -> Result<(), ParseError> { - // Always first string, what comes after we cant know - self.bump_while_not(Kind::String); - strings.push(self.cur_token.clone().value); - - self.bump_any(); - - while !&self.closed && !self.at(Kind::Eof) { - match &self.cur_kind() { - Kind::Dot => { - if self.seen_dot && self.eat(Kind::Dot) { - self.seen_dot = false; - return Err(ParseError { - err_type: ParseErrorTypes::DoubleDot, - start: self.last_bracket_start, - end: self.prev_token_end + 1, - source: self.source, - filename: &self.filename, - line_number: self.lexer.cur_line, - }); - } else { - self.seen_dot = true; - self.bump(Kind::Dot); - } - } - Kind::String => { - if self.seen_dot { - strings.push(self.cur_token.clone().value); - self.bump(Kind::String); - self.seen_dot = false; - } else { - self.bump_while_not(Kind::RBracket); - return Err(ParseError { - err_type: ParseErrorTypes::DoubleString, - start: self.last_bracket_start, - end: self.prev_token_end + 1, - source: self.source, - filename: &self.filename, - line_number: self.lexer.cur_line, - }); - } - } - Kind::Bar => { - let res = self.get_filter(); - if let Err(e) = res { - eprintln!("{}", e) - } - if self.eat_ignore_spaces(Kind::RBracket) { - return self.get_closing(); - } else { - self.bump_until_not_at(Kind::RBracket); - return self.get_closing(); - } - } - Kind::RBracket => { - return self.get_closing(); - // if self.eat(Kind::RBracket) { - // self.closed = true; - // self.opened = false; - // println!("closed without filter") - // } else { - // println!("fucked the closing"); - // break; - // } - } - Kind::Space => self.bump(Kind::Space), - Kind::NewLine => self.bump(Kind::NewLine), - Kind::Identifier => self.bump(Kind::Identifier), - _ => { - println!("{:?}", self.cur_token()); - } - } - } - Ok(()) - } - - fn get_opening(&mut self) -> Option { - let mut start = self.cur_token().start; - - self.bump_any(); - - while !self.opened { - if self.eat(Kind::Lbracket) { - self.opened = true; - self.closed = false; - } else if self.eat(Kind::Eof) { - return None; - } - self.bump_while_not(Kind::Lbracket); - start = self.cur_token().start; - } - Some(start) - } -} diff --git a/matugen-parser/src/parser/keywords.rs b/matugen-parser/src/parser/keywords.rs new file mode 100644 index 0000000..46ceb3f --- /dev/null +++ b/matugen-parser/src/parser/keywords.rs @@ -0,0 +1,239 @@ +use colored::Colorize; + +use crate::{ + errors::{ + handle_error, handle_error_panic, + parse::{ParseError, ParseErrorTypes}, + }, + lexer::{Kind, TokenValue}, + node::{FilterDefinition, KeywordDefinition, Statement}, +}; + +use super::Parser; + +impl Parser<'_> { + fn get_opening(&mut self) -> Option { + let mut start = self.cur_token().start; + + self.bump_any(); + + while !self.opened { + if self.eat(Kind::Lbracket) { + self.opened = true; + self.closed = false; + } else if self.eat(Kind::Eof) { + return None; + } + self.bump_while_not(Kind::Lbracket); + start = self.cur_token().start; + } + Some(start) + } + + pub fn get_closing(&mut self) -> Result<(), ParseError> { + println!("STARTING TO CLOSE"); + self.bump_any(); + if self.eat(Kind::RBracket) { + self.closed = true; + self.opened = false; + Ok(()) + } else { + Err(ParseError::new_from_parser( + ParseErrorTypes::UnclosedBracket, + &self, + )) + } + } + + pub fn get_keywords(&mut self) -> Vec { + let mut vec: Vec = vec![]; + + while !self.at(Kind::Eof) { + if !self.at(Kind::Lbracket) { + self.bump_until_not_at(Kind::Lbracket); + } + + // We would only get the second bracket at the start without the -1, + // the opening will ALWAYS have two brackets unlike the closing, which + // might have an error inside of it (so we dont look ahead for the closing). + self.last_bracket_start = self.get_opening().unwrap() - 1; + let start = self.start_node(); + + let mut strings: Vec = vec![]; + let mut filters: Vec = vec![]; + + handle_error_panic(self.collect_strings(&mut strings, &mut filters)); + + vec.push(Statement::KeywordDefinition(Box::new(KeywordDefinition { + node: self.finish_node(start), + keywords: strings, + filters: { + if filters.len() >= 1 { + Some(filters) + } else { + None + } + }, + }))); + } + vec + } + + fn get_filter(&mut self) -> Result, ParseError> { + let start = self.start_node(); + + self.bump_while_not(Kind::String); + + let name = self.cur_token().clone().value; + + let mut filter_args: Vec = vec![]; + + handle_error(self.collect_filter_args(&mut filter_args)); + + if self.at(Kind::RBracket) { + handle_error(self.get_closing()); + return Ok(Some(FilterDefinition { + node: self.finish_node(start), + filter_name: name, + arguments: filter_args, + })); + } + + Ok(Some(FilterDefinition { + node: self.finish_node(start), + filter_name: name, + arguments: filter_args, + })) + // self.bump_while_not(Kind::RBracket); + } + + fn collect_filter_args( + &mut self, + arguments: &mut Vec, + ) -> Result, ParseError> { + // THIS SHOULD BE THE FILTER NAME + self.eat(Kind::String); + + if !self.eat_ignore_spaces(Kind::Colon) { + println!( + "{}", + format!("DOESNT HAVE ANY ARGS: {:?}", self.cur_token()) + .red() + .bold() + ); + self.bump_while_not(Kind::RBracket) + } else { + // while !self.at(Kind::RBracket) { + // match self.cur_kind() { + // Kind::String => arguments.push(&self.cur_token.value), + // Kind::Number => todo!(), + // _ => {} + // } + // } + loop { + match self.cur_kind() { + Kind::Space => { + self.bump_until_not_at(Kind::Space); + } + Kind::String => { + arguments.push(self.cur_token.value.clone()); + self.bump(Kind::String) + } + Kind::Number => { + arguments.push(self.cur_token.value.clone()); + self.bump(Kind::Number) + } + Kind::RBracket => { + break; + } + _ => { + return Err(ParseError::new_from_parser( + ParseErrorTypes::UnexpectedFilterArgumentToken, + &self, + )) + } + } + } + // return Err(ParseError { + // err_type: ParseErrorTypes::MissingFilterColon, + // start: self.last_bracket_start, + // end: self.prev_token_end, + // source: &self.source, + // }); + } + println!("arguments: {:?}", arguments); + Ok(arguments.to_vec()) + } + + // Returns true if filter is used + fn collect_strings( + &mut self, + strings: &mut Vec, + filters: &mut Vec, + ) -> Result<(), ParseError> { + // Always first string, what comes after we cant know + self.bump_while_not(Kind::String); + strings.push(self.cur_val().clone()); + + self.bump_any(); + + while !&self.closed && !self.at(Kind::Eof) { + match &self.cur_kind() { + Kind::Dot => { + if self.seen_dot && self.eat(Kind::Dot) { + self.seen_dot = false; + return Err(ParseError::new_from_parser( + ParseErrorTypes::DoubleDot, + &self, + )); + } else { + self.seen_dot = true; + self.bump(Kind::Dot); + } + } + Kind::String => { + if self.seen_dot { + strings.push(self.cur_token.clone().value); + self.bump(Kind::String); + self.seen_dot = false; + } else { + self.bump_while_not(Kind::RBracket); + return Err(ParseError::new_from_parser( + ParseErrorTypes::DoubleString, + &self, + )); + } + } + Kind::Bar => { + let res = self.get_filter(); + match res { + Ok(v) => { + if let Some(def) = v { + filters.push(def); + } + } + Err(e) => eprintln!("{}", e), + } + } + Kind::RBracket => { + return self.get_closing(); + // if self.eat(Kind::RBracket) { + // self.closed = true; + // self.opened = false; + // println!("closed without filter") + // } else { + // println!("fucked the closing"); + // break; + // } + } + Kind::Space => self.bump(Kind::Space), + Kind::NewLine => self.bump(Kind::NewLine), + Kind::Identifier => self.bump(Kind::Identifier), + _ => { + println!("{:?}", self.cur_token()); + } + } + } + Ok(()) + } +} diff --git a/matugen-parser/src/parser/language.rs b/matugen-parser/src/parser/language.rs new file mode 100644 index 0000000..e69de29 diff --git a/matugen-parser/src/parser/mod.rs b/matugen-parser/src/parser/mod.rs new file mode 100644 index 0000000..386b9e0 --- /dev/null +++ b/matugen-parser/src/parser/mod.rs @@ -0,0 +1,150 @@ +pub mod keywords; +pub mod language; + +use colored::Colorize; +use std::cell::RefCell; +use std::fmt; +use std::iter::Filter; +use std::rc::Rc; + +use crate::errors::parse::{ParseError, ParseErrorTypes}; +use crate::errors::{handle_error, handle_error_panic}; + +use crate::lexer::{Kind, Lexer, Token, TokenValue}; +use crate::node::{FilterDefinition, KeywordDefinition, Node, Program, Statement}; + +#[derive(Debug)] +pub struct Parser<'a> { + pub source: &'a str, + pub filename: &'a str, + pub lexer: Lexer<'a>, + + /// Current Token consumed from the lexer + pub cur_token: Token, + /// The end range of the previous token + pub prev_token_end: usize, + + pub opened: bool, + pub closed: bool, + pub seen_dot: bool, + + pub last_bracket_start: usize, +} + +impl<'a> Parser<'a> { + /// Create a new parser. + pub fn new(source: &'a str, filename: &'a str) -> Parser<'a> { + let mut lexer = Lexer::new(&source); + Parser { + source, + filename, + cur_token: lexer.start(), + lexer, + prev_token_end: 0, + opened: false, + closed: false, + seen_dot: false, + last_bracket_start: 0, + } + } + + pub fn parse(&mut self) -> Program { + let end = self.source.len(); + let statments = self.get_keywords(); + Program { + node: Node { start: 0, end }, + body: statments, + } + } + + // fn parse_keyword_statement(&mut self) -> Statement { + // let node = self.start_node(); + // // NOTE: the token returned from the lexer is `Kind::Debugger`, we'll fix this later. + // self.bump_any(); + // Statement::KeywordDeclarationStatement { + // 0: KeywordDeclaration { + // node: self.finish_node(node), + // }, + // } + // } + + fn start_node(&mut self) -> Node { + let token = self.cur_token(); + Node::new(token.start, 0) + } + + fn finish_node(&self, node: Node) -> Node { + Node::new(node.start, self.prev_token_end) + } + + fn cur_token(&self) -> &Token { + &self.cur_token + } + + fn cur_kind(&self) -> &Kind { + &self.cur_token.kind + } + + fn cur_val(&self) -> &TokenValue { + &self.cur_token.value + } + + /// Checks if the current index has token `Kind` + fn at(&self, kind: Kind) -> bool { + self.cur_kind() == &kind + } + + /// Advance if we are at `Kind` + fn bump(&mut self, kind: Kind) { + if self.at(kind) { + self.advance(); + } + } + + /// Advance any token + fn bump_any(&mut self) { + self.advance(); + } + + fn bump_until_not_at(&mut self, kind: Kind) { + while self.cur_kind() == &kind && !self.at(Kind::Eof) { + self.bump_any() + } + } + + /// Advance any token + fn bump_while_not(&mut self, kind: Kind) { + while self.cur_kind() != &kind && !self.at(Kind::Eof) { + self.advance(); + } + } + + /// Advance and return true if we are at `Kind`, return false otherwise + fn eat(&mut self, kind: Kind) -> bool { + if self.at(kind) { + self.advance(); + return true; + } + false + } + + /// Advance and return true if we are at `Kind`, return false otherwise + fn eat_ignore_spaces(&mut self, kind: Kind) -> bool { + self.bump_until_not_at(Kind::Space); + + if self.at(kind) { + self.advance(); + return true; + } + false + } + + /// Move to the next token + fn advance(&mut self) { + let token = self.lexer.next_token(); + self.prev_token_end = self.cur_token.end; + self.cur_token = token.into(); + + println!("self at : {:?}", self.cur_token()); + } +}