diff --git a/multiply_add.sus b/multiply_add.sus index 0b41ef4..7e780dd 100644 --- a/multiply_add.sus +++ b/multiply_add.sus @@ -26,6 +26,27 @@ module multiply_add2 : int a, int b, int c -> int r { } +module parallel_mul_add_att : int a, int b -> int p, int q { + int a2 = a * a; + int b2 = b + b; + @ + int a3 = a * a2; + int b3 = b + b2; + @ + p = a3 * a; + q = b3 + b; + @ //? +} +module parallel_mul_add_reg : int a, int b -> int p, int q { + reg int a2 = a * a; + reg int a3 = a * a2 + 3; + reg p = a * a3; + + reg int b2 = b + b; + reg int b3 = b + b2; + reg q = b + b3; +} + /* a module to test the syntax */ module MultiplyAdd : i32 a, i32 b, i32 c -> i32 result { //module beelqzd diff --git a/src/ast.rs b/src/ast.rs index f8f4746..2cb4045 100644 --- a/src/ast.rs +++ b/src/ast.rs @@ -1,9 +1,11 @@ use num_bigint::BigUint; -use crate::tokenizer::{TokenTypeIdx, TokenExtraInfo}; +use crate::{tokenizer::{TokenTypeIdx, TokenExtraInfo}, errors::{ParsingError, error_basic_str}}; use core::ops::Range; +use std::collections::HashMap; + // Token span. Indices are INCLUSIVE #[derive(Clone,Copy,Debug,PartialEq,Eq)] pub struct Span(pub usize, pub usize); @@ -93,6 +95,13 @@ impl IdentifierIdx { None } } + pub fn get_global(&self) -> Option { + if self.name_idx >= GLOBAL_IDENTIFIER_OFFSET { + Some((self.name_idx - GLOBAL_IDENTIFIER_OFFSET) as TokenExtraInfo) + } else { + None + } + } } #[derive(Debug, Clone, Copy)] @@ -101,10 +110,19 @@ pub struct IdentifierToken { pub name_idx : TokenExtraInfo } + +#[derive(Debug, Clone)] +pub enum TypeExpression { + Named(usize), + Array(Box<(SpanTypeExpression, SpanExpression)>) +} + +pub type SpanTypeExpression = (TypeExpression, Span); + #[derive(Debug,Clone)] pub struct SignalDeclaration { pub span : Span, - pub typ : SpanExpression, + pub typ : SpanTypeExpression, pub name_idx : TokenExtraInfo, pub identifier_type : IdentifierType } @@ -126,7 +144,7 @@ pub enum Expression { Constant(Value), UnaryOp(Box<(Operator, usize/*Operator token */, SpanExpression)>), BinOp(Box<(SpanExpression, Operator, usize/*Operator token */, SpanExpression)>), - Array(Vec), // first[second, third, ...] + Array(Box<(SpanExpression, SpanExpression)>), // first[second] FuncCall(Vec) // first(second, third, ...) } @@ -139,12 +157,12 @@ impl Expression { pub type SpanExpression = (Expression, Span); pub type SpanStatement = (Statement, Span); +pub type SpanAssignableExpression = SpanExpression; + #[derive(Debug)] pub enum Statement { - Assign(SpanExpression, SpanExpression, usize/* Eq sign token */), // v = expr; - Mention(SpanExpression), + Assign(Vec, SpanExpression), // v = expr; Block(Vec), - PipelineStage(usize), TimelineStage(usize) } @@ -161,25 +179,53 @@ pub struct ASTRoot { pub modules : Vec } -pub fn for_each_identifier_in_expression((expr, span) : &SpanExpression, func : &mut F) where F: FnMut(IdentifierIdx, usize) -> () { - match expr { - Expression::Named(id) => { - assert!(span.0 == span.1); - func(*id, span.0) - }, - Expression::Constant(_v) => {}, - Expression::UnaryOp(b) => { - let (_operator, _operator_pos, right) = &**b; - for_each_identifier_in_expression(&right, func); +pub trait IterIdentifiers { + fn for_each_value(&self, func : &mut F) where F : FnMut(IdentifierIdx, usize) -> (); +} + +impl IterIdentifiers for SpanExpression { + fn for_each_value(&self, func : &mut F) where F : FnMut(IdentifierIdx, usize) -> () { + let (expr, span) = self; + match expr { + Expression::Named(id) => { + assert!(span.0 == span.1); + func(*id, span.0) + } + Expression::Constant(_v) => {} + Expression::UnaryOp(b) => { + let (_operator, _operator_pos, right) = &**b; + right.for_each_value(func); + } + Expression::BinOp(b) => { + let (left, _operator, _operator_pos, right) = &**b; + left.for_each_value(func); + right.for_each_value(func); + } + Expression::FuncCall(args) => { + for arg in args { + arg.for_each_value(func); + } + } + Expression::Array(b) => { + let (array, idx) = &**b; + array.for_each_value(func); + idx.for_each_value(func); + } } - Expression::BinOp(b) => { - let (left, _operator, _operator_pos, right) = &**b; - for_each_identifier_in_expression(&left, func); - for_each_identifier_in_expression(&right, func); - }, - Expression::Array(args) | Expression::FuncCall(args) => { - for arg in args { - for_each_identifier_in_expression(arg, func); + } +} + +impl IterIdentifiers for SpanTypeExpression { + fn for_each_value(&self, func : &mut F) where F : FnMut(IdentifierIdx, usize) -> () { + let (typ, _span) = self; + match typ { + TypeExpression::Named(_n) => { + // is type + } + TypeExpression::Array(b) => { + let (arr_typ, arr_size) = &**b; + arr_typ.for_each_value(func); + arr_size.for_each_value(func); } } } @@ -188,13 +234,12 @@ pub fn for_each_identifier_in_expression((expr, span) : &SpanExpression, func pub fn for_each_expression_in_block(block : &Vec, func : &mut F) where F: FnMut(&SpanExpression) { for (stmt, _span) in block { match stmt { - Statement::Assign(to, v, _eq_sign_pos) => { - func(to); + Statement::Assign(to, v) => { + for t in to { + func(t); + } func(v); }, - Statement::Mention(m) => { - func(m); - }, Statement::Block(b) => { for_each_expression_in_block(b, func); }, @@ -213,3 +258,22 @@ pub fn for_each_expression_in_module(m : &Module, func : &mut F) where F : Fn } for_each_expression_in_block(&m.code, func); } + +pub struct GlobalContext { + real_types : HashMap, + // aliases : todo!() +} + +impl GlobalContext { + pub fn parse_to_type((expr, span) : &SpanExpression, errors : &mut Vec>) -> Option { + match expr { + Expression::Named(idx) => {todo!();}, + Expression::Array(args) => {todo!();}, + other => { + errors.push(error_basic_str(*span, "Unexpected part")); + return None + } + } + } +} + diff --git a/src/code_generation/mod.rs b/src/code_generation/mod.rs index 85f33f3..1757a2b 100644 --- a/src/code_generation/mod.rs +++ b/src/code_generation/mod.rs @@ -1,10 +1,6 @@ use crate::{ast::*, errors::{ParsingError, error_basic_str}}; -pub struct GlobalContext { - -} - type ToAssignable = usize; #[derive(Debug)] @@ -20,7 +16,7 @@ pub enum Operation { #[derive(Debug)] pub struct LocalVar { span : Span, - typ : Option, + typ : Option, identifier_type : IdentifierType } @@ -99,28 +95,32 @@ impl Flattened { } }) } -} - -pub fn synthesize(module : &Module, errors : &mut Vec>) -> Flattened { - let mut result = Flattened{variables : Vec::new(), operations : Vec::new()}; - - for decl in &module.declarations { - result.variables.push(LocalVar{span : decl.span, typ : Some(decl.typ.clone()), identifier_type : decl.identifier_type}) - } - for (stmt, stmt_span) in &module.code { - match stmt { - Statement::Assign(to, value_expr, eq_sign_pos) => { - if let Some(to_idx) = result.synthesize_assign_to_expr(to, errors) { - let value_idx = result.synthesize_expression(value_expr); - result.operations.push((Operation::Copy { out: to_idx, input: value_idx }, *eq_sign_pos)) + pub fn synthesize(module : &Module, errors : &mut Vec>) -> Flattened { + let mut result = Flattened{variables : Vec::new(), operations : Vec::new()}; + + for decl in &module.declarations { + result.variables.push(LocalVar{span : decl.span, typ : Some(decl.typ.clone()), identifier_type : decl.identifier_type}) + } + + for (stmt, stmt_span) in &module.code { + match stmt { + Statement::Assign(to, value_expr) => { + /*if let Some(to_idx) = result.synthesize_assign_to_expr(to, errors) { + let value_idx = result.synthesize_expression(value_expr); + result.operations.push((Operation::Copy { out: to_idx, input: value_idx }, *eq_sign_pos)) + }*/ + }, + other => { + todo!(); } - }, - other => { - todo!(); } } + + result } - result + pub fn typecheck(&mut self, errors : &mut Vec>) { + + } } diff --git a/src/dev_aid/lsp.rs b/src/dev_aid/lsp.rs index 99f93cc..f55d537 100644 --- a/src/dev_aid/lsp.rs +++ b/src/dev_aid/lsp.rs @@ -14,8 +14,6 @@ use crate::{parser::{perform_full_semantic_parse, FullParseResult}, dev_aid::syn use super::syntax_highlighting::{IDETokenType, IDEIdentifierType, IDEToken}; -use std::env; - static LSP_LOG_PATH : &str = if crate::tokenizer::const_eq_str(std::env::consts::OS, "windows") { "C:\\Users\\lenna\\lsp_out.txt" } else { @@ -24,14 +22,6 @@ static LSP_LOG_PATH : &str = if crate::tokenizer::const_eq_str(std::env::consts: thread_local!(static LSP_LOG: File = File::create(LSP_LOG_PATH).expect("Replacement terminal /home/lennart/lsp_out.txt could not be created")); -macro_rules! print { - ($($arg:tt)*) => {{ - use std::io::Write; - LSP_LOG.with(|mut file| { - write!(file, $($arg)*).unwrap(); - }) - }}; -} macro_rules! println { ($($arg:tt)*) => {{ use std::io::Write; @@ -152,8 +142,7 @@ fn get_semantic_token_type_from_ide_token(tok : &IDEToken) -> u32 { IDETokenType::Comment => 0, IDETokenType::Keyword => 1, IDETokenType::Operator => 2, - IDETokenType::PipelineStage => 7, // EVENT seems to get a good colour - IDETokenType::TimelineStage => 7, + IDETokenType::TimelineStage => 7,// EVENT seems to get a good colour IDETokenType::Identifier(IDEIdentifierType::Value(IdentifierType::Input)) => 4, IDETokenType::Identifier(IDEIdentifierType::Value(IdentifierType::Output)) => 4, IDETokenType::Identifier(IDEIdentifierType::Value(IdentifierType::State)) => 3, diff --git a/src/dev_aid/syntax_highlighting.rs b/src/dev_aid/syntax_highlighting.rs index efae134..8ed1ff7 100644 --- a/src/dev_aid/syntax_highlighting.rs +++ b/src/dev_aid/syntax_highlighting.rs @@ -17,7 +17,6 @@ pub enum IDETokenType { Comment, Keyword, Operator, - PipelineStage, TimelineStage, Identifier(IDEIdentifierType), Number, @@ -60,7 +59,6 @@ fn pretty_print(file_text : &str, token_spans : &[CharSpan], ide_infos : &[IDETo IDETokenType::Comment => Style::new().green().dim(), IDETokenType::Keyword => Style::new().blue(), IDETokenType::Operator => Style::new().white().bright(), - IDETokenType::PipelineStage => Style::new().red().bold(), IDETokenType::TimelineStage => Style::new().red().bold(), IDETokenType::Identifier(IDEIdentifierType::Unknown) => Style::new().red().underlined(), IDETokenType::Identifier(IDEIdentifierType::Value(IdentifierType::Local)) => Style::new().blue().bright(), @@ -97,14 +95,14 @@ fn add_ide_bracket_depths_recursive<'a>(result : &mut [IDEToken], current_depth fn walk_name_color(ast : &ASTRoot, result : &mut [IDEToken]) { for module in &ast.modules { for decl in &module.declarations { - for_each_identifier_in_expression(&decl.typ, &mut |_name, position| { + decl.typ.for_each_value(&mut |_name, position| { result[position].typ = IDETokenType::Identifier(IDEIdentifierType::Type); }); //result[decl.name.position].typ = IDETokenType::Identifier(IDEIdentifierType::Value(decl.identifier_type)); } for_each_expression_in_module(&module, &mut |expr| { - for_each_identifier_in_expression(expr, &mut |name, position| { + expr.for_each_value(&mut |name, position| { result[position].typ = IDETokenType::Identifier(if let Some(l) = name.get_local() { IDEIdentifierType::Value(module.declarations[l].identifier_type) } else { @@ -126,9 +124,7 @@ pub fn create_token_ide_info<'a>(parsed: &FullParseResult) -> Vec { } else if is_bracket(tok_typ) != IsBracket::NotABracket { IDETokenType::InvalidBracket // Brackets are initially invalid. They should be overwritten by the token_hierarchy step. The ones that don't get overwritten are invalid } else if is_symbol(tok_typ) { - if tok_typ == kw("@") { - IDETokenType::PipelineStage - } else if tok_typ == kw("#") { + if tok_typ == kw("#") { IDETokenType::TimelineStage } else { IDETokenType::Operator diff --git a/src/errors.rs b/src/errors.rs index 9253ef5..32de877 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -111,16 +111,29 @@ pub fn error_unopened_bracket(close_pos : usize, close_typ : TokenTypeIdx, open_ error_with_info(Span::from(close_pos), reason, vec![error_info_str(Span(open_after_pos, open_after_pos), "must be opened in scope after this")]) } +pub fn error_unexpected_token(expected : &[TokenTypeIdx], found : TokenTypeIdx, pos : usize, context : &str) -> ParsingError { + let expected_list_str = join_expected_list(expected); + error_unexpected_token_str(&expected_list_str, found, pos, context) +} + +pub fn error_unexpected_token_str(expected_list_str : &str, found : TokenTypeIdx, pos : usize, context : &str) -> ParsingError { + let tok_typ_name = get_token_type_name(found); + error_basic(Span::from(pos), format!("Unexpected Token '{tok_typ_name}' while parsing {context}. Expected {expected_list_str}")) +} + pub fn error_unexpected_tree_node(expected : &[TokenTypeIdx], found : Option<&TokenTreeNode>, unexpected_eof_idx : usize, context : &str) -> ParsingError { let expected_list_str = join_expected_list(expected); + error_unexpected_tree_node_str(&expected_list_str, found, unexpected_eof_idx, context) +} + +pub fn error_unexpected_tree_node_str(expected_list_str : &str, found : Option<&TokenTreeNode>, unexpected_eof_idx : usize, context : &str) -> ParsingError { match found { None => { error_basic(Span::from(unexpected_eof_idx), format!("Unexpected End of Scope while parsing {context}. Expected {expected_list_str}")) - }, + } Some(TokenTreeNode::PlainToken(tok, pos)) => { - let tok_typ_name = get_token_type_name(tok.get_type()); - error_basic(Span::from(*pos), format!("Unexpected Token '{tok_typ_name}' while parsing {context}. Expected {expected_list_str}")) - }, + error_unexpected_token_str(expected_list_str, tok.get_type(), *pos, context) + } Some(TokenTreeNode::Block(typ, _, span)) => { let tok_typ_name = get_token_type_name(*typ); error_basic(*span, format!("Unexpected Block '{tok_typ_name}' while parsing {context}. Expected {expected_list_str}")) diff --git a/src/parser.rs b/src/parser.rs index cc46609..6d5548c 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -119,17 +119,16 @@ impl<'prev> LocalVariableContext<'prev> { } } +#[derive(Clone)] struct TokenStream<'it> { iter : Peekable>, unexpected_eof_token : usize, pub last_idx : usize } -impl<'it> TokenStream<'it> { - // The given start idx should point to the first element in this block. unexpected_eof_token should point one past the last element - fn new(list : &'it [TokenTreeNode], start_idx : usize, unexpected_eof_token : usize) -> TokenStream<'it> { - TokenStream{iter : list.iter().peekable(), unexpected_eof_token : unexpected_eof_token, last_idx : start_idx} - } +impl<'it> Iterator for TokenStream<'it> { + type Item = &'it TokenTreeNode; + fn next(&mut self) -> Option<&'it TokenTreeNode> { if let Some(found) = self.iter.next() { self.last_idx = found.get_span().1; @@ -138,6 +137,13 @@ impl<'it> TokenStream<'it> { None } } +} + +impl<'it> TokenStream<'it> { + // The given start idx should point to the first element in this block. unexpected_eof_token should point one past the last element + fn new(list : &'it [TokenTreeNode], start_idx : usize, unexpected_eof_token : usize) -> TokenStream<'it> { + TokenStream{iter : list.iter().peekable(), unexpected_eof_token : unexpected_eof_token, last_idx : start_idx} + } fn peek(&mut self) -> Option<&'it TokenTreeNode> { if let Some(&found) = self.iter.peek() { Some(found) @@ -153,31 +159,23 @@ impl<'it> TokenStream<'it> { } false } - fn peek_is_plain_one_of(&mut self, expecteds : &[TokenTypeIdx]) -> bool { - if let Some(TokenTreeNode::PlainToken(tok, _place)) = self.iter.peek() { - for ex in expecteds { - if tok.get_type() == *ex { - return true; - } + fn eat_is_plain(&mut self, expected : TokenTypeIdx) -> Option<(TokenExtraInfo, usize)> { + if let Some(TokenTreeNode::PlainToken(tok, pos)) = self.peek() { + if tok.get_type() == expected { + self.next(); + return Some((tok.get_info(), *pos)); } } - false + None } - fn peek_is_block(&mut self, expected : TokenTypeIdx) -> bool { - if let Some(TokenTreeNode::Block(typ, _content, _span)) = self.iter.peek() { + fn eat_is_block(&mut self, expected : TokenTypeIdx) -> Option<(&Vec, Span)> { + if let Some(TokenTreeNode::Block(typ, content, span)) = self.peek() { if *typ == expected { - return true; + self.next(); + return Some((content, *span)); } } - false - } - fn skip_until(&mut self, end_type : TokenTypeIdx) { - while let Some(found) = self.peek() { - if found.get_token_type() == end_type { - return; - } - self.next(); - } + None } fn skip_until_one_of(&mut self, end_types : &[TokenTypeIdx]) { while let Some(found) = self.peek() { @@ -236,7 +234,19 @@ impl<'a> ASTParserContext<'a> { } } - fn add_declaration(&mut self, type_expr : SpanExpression, name : IdentifierToken, identifier_type : IdentifierType, declarations : &mut Vec, scope : &mut LocalVariableContext) -> usize { + fn token_stream_should_be_finished(&mut self, mut token_stream : TokenStream, context : &str) { + if let Some(bad_token) = token_stream.next() { + let mut bad_tokens_span = bad_token.get_span(); + + for tok in token_stream { + bad_tokens_span.1 = tok.get_span().1; + } + + self.errors.push(error_basic(bad_tokens_span, format!("More tokens found than expected while parsing {context}"))) + } + } + + fn add_declaration(&mut self, type_expr : SpanTypeExpression, name : IdentifierToken, identifier_type : IdentifierType, declarations : &mut Vec, scope : &mut LocalVariableContext) -> usize { let span = Span(type_expr.1.0, name.position); let decl = SignalDeclaration{typ : type_expr, span, name_idx : name.name_idx, identifier_type}; let decl_id = declarations.len(); @@ -298,8 +308,9 @@ impl<'a> ASTParserContext<'a> { } }; while let Some(TokenTreeNode::Block(typ, content, bracket_span)) = token_stream.peek() { - if *typ == kw("[") || *typ == kw("(") { - let start_at = base_expr.1.0; + let start_at = base_expr.1.0; + let total_span = Span(start_at, bracket_span.1); + if *typ == kw("(") { let mut args : Vec = Vec::new(); args.push(base_expr); let mut content_tokens_iter = TokenStream::new(content, bracket_span.0, bracket_span.1); @@ -313,8 +324,11 @@ impl<'a> ASTParserContext<'a> { } } } - let total_span = Span(start_at, bracket_span.1); - base_expr = (if *typ == kw("[") {Expression::Array(args)} else {Expression::FuncCall(args)}, total_span) + base_expr = (Expression::FuncCall(args), total_span) + } else if *typ == kw("[") { + let mut arg_token_stream = TokenStream::new(content, bracket_span.0, bracket_span.1); + let arg = self.parse_expression(&mut arg_token_stream, scope)?; + base_expr = (Expression::Array(Box::new((base_expr, arg))), total_span) } else { break; } @@ -354,125 +368,156 @@ impl<'a> ASTParserContext<'a> { } fn parse_signal_declaration(&mut self, token_stream : &mut TokenStream, identifier_type : IdentifierType, declarations : &mut Vec, scope : &mut LocalVariableContext) -> Option<()> { - let sig_type = self.parse_expression(token_stream, scope)?; + let sig_type = self.try_parse_type(token_stream, scope)?; let name = self.eat_identifier(token_stream, "signal declaration")?; self.add_declaration(sig_type, name, identifier_type, declarations, scope); Some(()) } + fn try_parse_type(&mut self, token_stream : &mut TokenStream, scope : &LocalVariableContext) -> Option { + let (name_id, first_pos) = token_stream.eat_is_plain(TOKEN_IDENTIFIER)?; + let mut cur_type = (TypeExpression::Named(name_id as usize), Span::from(first_pos)); + while let Some((content, block_span)) = token_stream.eat_is_block(kw("[")) { + let mut array_index_token_stream = TokenStream::new(content, block_span.0, block_span.1); + let expr = self.parse_expression(&mut array_index_token_stream, scope)?; + self.token_stream_should_be_finished(array_index_token_stream, "type array index"); + cur_type = (TypeExpression::Array(Box::new((cur_type, expr))), Span(first_pos, block_span.1)); + } + Some(cur_type) + } + + fn try_parse_declaration(&mut self, token_stream : &mut TokenStream, declarations : &mut Vec, scope : &mut LocalVariableContext) -> Option { + let identifier_type = if let Some((_info, _pos)) = token_stream.eat_is_plain(kw("state")) { + IdentifierType::State + } else { + IdentifierType::Local + }; + + let typ = self.try_parse_type(token_stream, scope)?; + let (name_idx, position) = token_stream.eat_is_plain(TOKEN_IDENTIFIER)?; + let local_id = self.add_declaration(typ, IdentifierToken{name_idx, position}, identifier_type, declarations, scope); + Some((Expression::Named(IdentifierIdx::new_local(local_id)), Span::from(position))) + } + fn parse_bundle(&mut self, token_stream : &mut TokenStream, identifier_type : IdentifierType, declarations : &mut Vec, scope : &mut LocalVariableContext) { while token_stream.peek_is_plain(TOKEN_IDENTIFIER) { - if let Some(()) = self.parse_signal_declaration(token_stream, identifier_type, declarations, scope) { + if let Some(_) = self.parse_signal_declaration(token_stream, identifier_type, declarations, scope) { } else { // Error during parsing signal decl. Skip till "," or end of scope token_stream.skip_until_one_of(&[kw(","), kw("->"), kw("{"), kw(";")]); } - if !token_stream.peek_is_plain(kw(",")) { + if !token_stream.eat_is_plain(kw(",")).is_some() { break; } - token_stream.next(); } } fn parse_interface(&mut self, token_stream : &mut TokenStream, declarations : &mut Vec, scope : &mut LocalVariableContext) { self.parse_bundle(token_stream, IdentifierType::Input, declarations, scope); - if token_stream.peek_is_plain(kw("->")) { - token_stream.next(); + if token_stream.eat_is_plain(kw("->")).is_some() { self.parse_bundle(token_stream, IdentifierType::Output, declarations, scope); } } fn parse_statement(&mut self, token_stream : &mut TokenStream, declarations : &mut Vec, scope : &mut LocalVariableContext, statements : &mut Vec) -> Option<()> { - let mut state_decl : Option = None; let start_at = if let Some(peek) = token_stream.peek() { peek.get_span().0 } else { return None; }; - match token_stream.peek() { - None => { - return None; - } - Some(TokenTreeNode::PlainToken(tok, pos)) if tok.get_type() == kw("@") => { - // Assignment - token_stream.next(); - statements.push((Statement::PipelineStage(*pos), Span::from(*pos))); - return Some(()) - } - Some(TokenTreeNode::PlainToken(tok, pos)) if tok.get_type() == kw("#") => { - // Assignment - token_stream.next(); - statements.push((Statement::TimelineStage(*pos), Span::from(*pos))); - return Some(()) + if let Some((_info, pos)) = token_stream.eat_is_plain(kw("#")) { + statements.push((Statement::TimelineStage(pos), Span::from(pos))); + return Some(()); + } + + let mut left_expressions : Vec<(SpanExpression, usize)> = Vec::new(); + let mut all_decls = true; + loop { // Loop over a number of declarations possibly + let mut reg_count : usize = 0; + while let Some((_info, _pos)) = token_stream.eat_is_plain(kw("reg")) { + reg_count += 1; } - Some(TokenTreeNode::PlainToken(tok, pos)) if tok.get_type() == kw("state") => { - // Assignment - token_stream.next(); - state_decl = Some(*pos); + + let mut tok_stream_copy = token_stream.clone(); + + if let Some(name) = self.try_parse_declaration(&mut tok_stream_copy, declarations, scope) { + // Maybe it's a declaration? + *token_stream = tok_stream_copy; + left_expressions.push((name, reg_count)); + + } else if let Some(sp_expr) = self.parse_expression(token_stream, scope) { + // It's an expression instead! + left_expressions.push((sp_expr, reg_count)); + all_decls = false; + } else { + // Also not, error then + token_stream.skip_until_one_of(&[kw(","), kw("="), kw(";")]); } - _other => {} - } - let expr_first = self.parse_expression(token_stream, scope)?; // Error case - let resulting_statement = match token_stream.peek() { - // Regular assignment - None => { - if let Some(kw_pos) = state_decl { - self.errors.push(error_basic_str(Span::from(kw_pos), "Cannot attach 'state' keyword in mention")) + match token_stream.next() { + Some(TokenTreeNode::PlainToken(tok, _pos)) if tok.get_type() == kw(",") => { + continue; // parse next declaration } - Statement::Mention(expr_first) - }, - Some(TokenTreeNode::PlainToken(tok, _)) if tok.get_type() == kw(";") => { - token_stream.next(); - if let Some(kw_pos) = state_decl { - self.errors.push(error_basic_str(Span::from(kw_pos), "Cannot attach 'state' keyword in mention")) + Some(TokenTreeNode::PlainToken(tok, assign_pos)) if tok.get_type() == kw("=") => { + // Ends the loop + // T a, T b = x(y); + return self.parse_statement_handle_equals(left_expressions, assign_pos, token_stream, scope, statements, start_at); } - Statement::Mention(expr_first) - }, - Some(TokenTreeNode::PlainToken(tok, eq_sign_pos)) if tok.get_type() == kw("=") => { - if let Some(kw_pos) = state_decl { - self.errors.push(error_basic_str(Span::from(kw_pos), "Cannot attach 'state' keyword in assignment")) + Some(TokenTreeNode::PlainToken(tok, _pos)) if tok.get_type() == kw(";") => { + // Ends the loop + return self.parse_statement_handle_end(left_expressions, all_decls, statements); } - // Assignment - token_stream.next(); - let value = self.parse_expression(token_stream, scope)?; - self.eat_plain(token_stream, kw(";"), "assignment"); - Statement::Assign(expr_first, value, *eq_sign_pos) - }, - Some(_other) => { - let declaration_type = if state_decl.is_some() { - IdentifierType::State - } else { - IdentifierType::Local - }; - // This is a declaration! - let name = self.eat_identifier(token_stream, "declaration")?; - match token_stream.next() { - Some(TokenTreeNode::PlainToken(tok, eq_sign_pos)) if tok.get_type() == kw("=") => { - // Parse set value expression before adding declaration. The variable name should not yet be in scope - let value = self.parse_expression(token_stream, scope)?; - self.eat_plain(token_stream, kw(";"), "declaration"); - let id = self.add_declaration(expr_first, name, declaration_type, declarations, scope); - Statement::Assign((Expression::Named(IdentifierIdx::new_local(id)), Span::from(name.position)), value, *eq_sign_pos) - }, - Some(TokenTreeNode::PlainToken(tok, _)) if tok.get_type() == kw(";") => { - self.add_declaration(expr_first, name, declaration_type, declarations, scope); - //Statement::Declare(self.to_signal_declaration(expr_first, name, declaration_type)?) - return Some(()); - }, - other => { - self.errors.push(error_unexpected_tree_node(&[kw(";"), kw("=")], other, token_stream.last_idx, "declaration")); // easy way to throw the End Of Scope error - return None; - // Statement::Declare(self.to_signal_declaration(expr_first, name)?) - } + None => { + // Ends the loop + return self.parse_statement_handle_end(left_expressions, all_decls, statements); + } + other => { + self.errors.push(error_unexpected_tree_node(&[kw(";"), kw("="), kw(",")], other, token_stream.unexpected_eof_token, "statement")); + return None } } - }; + } + } + + fn parse_statement_handle_equals(&mut self, left_expressions: Vec<(SpanExpression, usize)>, assign_pos: &usize, token_stream: &mut TokenStream<'_>, scope: &mut LocalVariableContext<'_>, statements: &mut Vec<(Statement, Span)>, start_at: usize) -> Option<()> { + if left_expressions.len() == 0 { + self.errors.push(error_unexpected_token(&[TOKEN_IDENTIFIER], kw("="), *assign_pos, "statement")); + None + } else if let Some(value) = self.parse_expression(token_stream, scope) { + let converted_left : Vec = left_expressions.into_iter().map(|(expr, reg_count)| expr).collect(); + let end_at = value.1.1; + statements.push((Statement::Assign(converted_left, value), Span(start_at, end_at))); + self.eat_plain(token_stream, kw(";"), "right-hand side of expression")?; + Some(()) + } else { + None + // errors reported by self.parse_expression + } + } - statements.push((resulting_statement, Span(start_at, token_stream.last_idx))); - return Some(()) + fn parse_statement_handle_end(&mut self, left_expressions: Vec<(SpanExpression, usize)>, all_decls: bool, statements: &mut Vec<(Statement, Span)>) -> Option<()> { + // Declarations or single expression only + // T a; + // myFunc(x, y); + if left_expressions.len() == 0 { + return None + } else if left_expressions.len() == 1 { + // Is a single big expression, or a single declaration + let (expr, _reg_count) = left_expressions.into_iter().next().unwrap(); + if all_decls { + // decls have been taken care of during try_parse_declaration step + return Some(()); + } else { + let expr_span = expr.1; + statements.push((Statement::Assign(Vec::new(), expr), expr_span)); + return None; + } + } else { + self.errors.push(error_basic_str(Span(left_expressions[1].0.1.0, left_expressions[left_expressions.len()-1].0.1.1), "Multiple declarations are only allowed in function call syntax: int a, int b = f(x);")); + return None; + } } fn parse_code_block(&mut self, block_tokens : &[TokenTreeNode], span : Span, declarations : &mut Vec, outer_scope : &LocalVariableContext) -> Vec { let mut token_stream = TokenStream::new(block_tokens, span.0, span.1); @@ -481,9 +526,10 @@ impl<'a> ASTParserContext<'a> { let mut inner_scope = LocalVariableContext::new_extend(outer_scope); + while token_stream.peek().is_some() { - if token_stream.peek_is_plain(kw(";")) { - token_stream.next(); + // Allow empty statements + if token_stream.eat_is_plain(kw(";")).is_some() { continue; } if let Some(TokenTreeNode::Block(typ, contents, block_span)) = token_stream.peek() { @@ -493,6 +539,7 @@ impl<'a> ASTParserContext<'a> { continue; // Can't add condition to if let, so have to do some weird control flow here } } + if self.parse_statement(&mut token_stream, declarations, &mut inner_scope, &mut statements).is_none() { // Error recovery. Find end of statement token_stream.skip_until_one_of(&[kw(";"), kw("{")]); diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 5aeedb3..85b2ea8 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -55,7 +55,7 @@ pub const ALL_KEYWORDS : [(&'static str, u8); 17] = [ // Extra data is opreator prescedence. Lower number is higher prescedence of operators // ordered by which to prefer when parsing -pub const ALL_SYMBOLS : [(&'static str, u8); 34] = [ +pub const ALL_SYMBOLS : [(&'static str, u8); 33] = [ // 'Meta symbols', for comments. Not actually used in further parsing ("/*", 0), ("//", 0), @@ -80,8 +80,7 @@ pub const ALL_SYMBOLS : [(&'static str, u8); 34] = [ ("^", 3), ("<", 2), (">", 2), - ("!", 0), - ("@", 0), // End of operators (see is_operator()) + ("!", 0),// End of operators (see is_operator()) ("#", 0), ("=", 0), ("(", 0), // Close parens are always 1 larger than their open variant, (see closes()) @@ -157,7 +156,7 @@ pub fn is_symbol(typ : TokenTypeIdx) -> bool { typ < TOKEN_IDENTIFIER } pub fn is_operator(typ : TokenTypeIdx) -> bool { - typ >= kw("<=") && typ <= kw("@") + typ >= kw("<=") && typ <= kw("!") } pub fn is_unary_operator(typ : TokenTypeIdx) -> bool { typ == kw("|") || typ == kw("&") || typ == kw("^") || typ == kw("+") || typ == kw("*") || typ == kw("!")