diff --git a/lsp/src/analysis/scope_analyzer.rs b/lsp/src/analysis/scope_analyzer.rs index c255f38..659b451 100644 --- a/lsp/src/analysis/scope_analyzer.rs +++ b/lsp/src/analysis/scope_analyzer.rs @@ -318,10 +318,10 @@ impl ASTVisitor for ScopeAnalyzer { _max: &Expression, incr: &Option, statements: &[Statement], - span: Span, + _span: Span, ) { // TODO: figure out how to have "invisible scopes" - // self.push_scope("__repeat".to_owned(), span); + // self.push_scope("__repeat".to_owned(), _span); if let Some(incr) = incr { self.insert_symbol(incr, Symbol::Constant { name: incr.clone() }); } diff --git a/lsp/src/analysis/visitor.rs b/lsp/src/analysis/visitor.rs index 9ebbf76..061be85 100644 --- a/lsp/src/analysis/visitor.rs +++ b/lsp/src/analysis/visitor.rs @@ -203,10 +203,8 @@ pub trait ASTVisitor { self.visit_term(tok, expr1, expr2, expression.span) } ExpressionKind::Bank(expr) => self.visit_bank(expr, expression.span), - ExpressionKind::SizeOf(expr) => self.visit_sizeof(expr, expression.span), ExpressionKind::WordOp(tok, expr) => self.visit_word_op(tok, expr, expression.span), ExpressionKind::Match(expr1, expr2) => self.visit_match(expr1, expr2, expression.span), - ExpressionKind::Def(tok) => self.visit_def(tok, expression.span), ExpressionKind::Identifier(ident) => self.visit_identifier(ident, expression.span), ExpressionKind::UnnamedLabelReference(reference) => { self.visit_unnamed_label_reference(reference, expression.span) @@ -219,8 +217,7 @@ pub trait ASTVisitor { ExpressionKind::Call(callee, arguments) => { self.visit_call(callee, arguments, expression.span) }, - ExpressionKind::Ident(expr) => self.visit_ident(expr, expression.span), - ExpressionKind::Sprintf(str, args) => self.visit_sprintf(str, args, expression.span), + ExpressionKind::PseudoFunction(name, args) => self.visit_pseudo_function(name, args, expression.span), } } @@ -298,20 +295,16 @@ pub trait ASTVisitor { fn visit_bank(&mut self, tok: &Expression, _span: Span) { self.visit_expression(tok); } - fn visit_sizeof(&mut self, expr: &Expression, _span: Span) { - self.visit_expression(expr); - } fn visit_word_op(&mut self, _tok: &Token, expr: &Expression, _span: Span) { self.visit_expression(expr); } fn visit_match(&mut self, _expr1: &Expression, _expr2: &Expression, _span: Span) {} - fn visit_ident(&mut self, _ident: &Expression, _span: Span) {} - fn visit_sprintf(&mut self, _str: &Expression, args: &[Expression], _span: Span) { + fn visit_pseudo_function(&mut self, _name: &Token, args: &[Expression], _span: Span) { for arg in args.iter() { self.visit_expression(arg); } } - fn visit_def(&mut self, _tok: &Token, _span: Span) {} + fn visit_identifier(&mut self, _ident: &str, _span: Span) {} fn visit_unnamed_label_reference(&mut self, _reference: &i8, _span: Span) {} fn visit_string(&mut self, _str: &str, _span: Span) {} diff --git a/lsp/src/data/files.rs b/lsp/src/data/files.rs index a287bb3..7f35fcb 100644 --- a/lsp/src/data/files.rs +++ b/lsp/src/data/files.rs @@ -173,7 +173,7 @@ impl Files { let mut diagnostics = vec![]; let mut includes_changed = false; let parse_result = { - let mut file = self.get_mut(file_id); + let file = self.get_mut(file_id); file.parse() }; diff --git a/parser/src/data/token_type.rs b/parser/src/data/token_type.rs index 58be879..6120e59 100644 --- a/parser/src/data/token_type.rs +++ b/parser/src/data/token_type.rs @@ -43,11 +43,7 @@ pub enum TokenType { LeftBrace, RightBrace, Bank, - SizeOf, Match, - Ident, - Sprintf, - Def, UnnamedLabelReference, Extract, WordOp, diff --git a/parser/src/parser.rs b/parser/src/parser.rs index a37529e..a4d54b2 100644 --- a/parser/src/parser.rs +++ b/parser/src/parser.rs @@ -122,17 +122,14 @@ pub enum ExpressionKind { SimpleExpression(Token, Box, Box), Term(TokenType, Box, Box), Bank(Box), - SizeOf(Box), Identifier(String), Match(Box, Box), - Def(Token), String(String), Extract(Token, Box, Box), TokenList(Vec), Call(String, Vec), WordOp(Token, Box), - Ident(Box), - Sprintf(Box, Vec), + PseudoFunction(Token, Vec), } #[derive(Debug, Clone, PartialEq)] @@ -1064,17 +1061,6 @@ impl<'a> Parser<'a> { span: Span::new(start, end), }); } - if match_token!(self.tokens, TokenType::SizeOf) { - self.consume_token(TokenType::LeftParen)?; - let expr = self.parse_expression()?; - self.consume_token(TokenType::RightParen)?; - let end = self.mark_end(); - - return Ok(Expression { - kind: ExpressionKind::SizeOf(Box::from(expr)), - span: Span::new(start, end), - }); - } if match_token!(self.tokens, TokenType::WordOp) { let variant = self.last(); self.consume_token(TokenType::LeftParen)?; @@ -1100,47 +1086,9 @@ impl<'a> Parser<'a> { span: Span::new(start, end), }); } - if match_token!(self.tokens, TokenType::Ident) { - self.consume_token(TokenType::LeftParen)?; - let expr = self.parse_expression()?; - self.consume_token(TokenType::RightParen)?; - let end = self.mark_end(); - - return Ok(Expression { - kind: ExpressionKind::Ident(Box::from(expr)), - span: Span::new(start, end), - }); - } - if match_token!(self.tokens, TokenType::Sprintf) { - self.consume_token(TokenType::LeftParen)?; - let expr = self.parse_expression()?; - let mut args = vec![]; - while match_token!(self.tokens, TokenType::Comma) { - args.push(self.parse_expression()?); - } - self.consume_token(TokenType::RightParen)?; - let end = self.mark_end(); - - return Ok(Expression { - kind: ExpressionKind::Sprintf(Box::from(expr), args), - span: Span::new(start, end), - }); - } if match_token!(self.tokens, TokenType::Extract) { return self.parse_extract(); } - if match_token!(self.tokens, TokenType::Def) { - self.consume_token(TokenType::LeftParen)?; - self.tokens.advance(); - let tok = self.last(); - self.consume_token(TokenType::RightParen)?; - let end = self.mark_end(); - - return Ok(Expression { - kind: ExpressionKind::Def(tok), - span: Span::new(start, end), - }); - } if match_token!(self.tokens, TokenType::Caret) { let right = self.parse_factor()?; let end = self.mark_end(); @@ -1223,20 +1171,48 @@ impl<'a> Parser<'a> { }); } if check_token!(self.tokens, TokenType::Macro) { - let start = self.mark_start(); - let macro_name = self.consume_token(TokenType::Macro)?.lexeme; - let end = self.mark_end(); - return match macro_name.as_str() { - ".asize" | ".isize" => Ok(Expression { - kind: ExpressionKind::Literal(macro_name), - span: Span::new(start, end), - }), + let next = self.tokens.peek().unwrap(); + + return match next.lexeme.as_str() { + ".addrsize" | ".bank" | ".bankbyte" | ".blank" | ".cap" | ".capability" + | ".concat" | ".const" | ".def" | ".defined" | ".definedmacro" | ".hibyte" + | ".hiword" | ".ident" | ".ismnem" | ".ismnemonic" | ".max" | ".min" | ".ref" + | ".referenced" | ".sizeof" | ".sprintf" | ".strat" | ".string" | ".strlen" | "tcount" => { + self.parse_pseudo_function() + } + ".asize" | ".isize" => { + let start = self.mark_start(); + let macro_name = self.consume_token(TokenType::Macro)?.lexeme; + let end = self.mark_end(); + Ok(Expression { + kind: ExpressionKind::Literal(macro_name), + span: Span::new(start, end), + }) + } _ => Err(ParseError::UnexpectedToken(self.peek()?)), }; } Err(ParseError::UnexpectedToken(self.peek()?)) } + fn parse_pseudo_function(&mut self) -> Result { + let start = self.mark_start(); + let macro_name = self.consume_token(TokenType::Macro)?; + self.consume_token(TokenType::LeftParen)?; + let mut args = vec![]; + args.push(self.parse_expression()?); + while match_token!(self.tokens, TokenType::Comma) { + args.push(self.parse_expression()?); + } + self.consume_token(TokenType::RightParen)?; + let end = self.mark_end(); + + Ok(Expression { + kind: ExpressionKind::PseudoFunction(macro_name, args), + span: Span::new(start, end), + }) + } + fn parse_macro_parameters(&mut self) -> Result> { let mut parameters = vec![]; if !check_token!(self.tokens, TokenType::EOL) { diff --git a/parser/src/tokenizer.rs b/parser/src/tokenizer.rs index c2cbcae..d5d06e2 100644 --- a/parser/src/tokenizer.rs +++ b/parser/src/tokenizer.rs @@ -78,14 +78,10 @@ impl<'a> Tokenizer<'a> { ".xor" => self.make_token(TokenType::Xor), ".not" => self.make_token(TokenType::Not), ".bank" => self.make_token(TokenType::Bank), - ".sizeof" => self.make_token(TokenType::SizeOf), ".loword" | ".hiword" | ".bankbyte" | ".lobyte" | ".hibyte" => { self.make_token(TokenType::WordOp) } ".match" | ".xmatch" => self.make_token(TokenType::Match), - ".ident" => self.make_token(TokenType::Ident), - ".sprintf" => self.make_token(TokenType::Sprintf), - ".def" | ".defined" => self.make_token(TokenType::Def), ".left" | ".mid" | ".right" => self.make_token(TokenType::Extract), _ => self.make_token(TokenType::Macro), }))