Skip to content

Commit

Permalink
Add parsing support for inline variable declaration
Browse files Browse the repository at this point in the history
Variables can be defined in the body using the syntax
```st
{def} VAR y : DINT := 0; (*Pragma to avoid confusion with var blocks *)
(*Pragma is optional once in body*)
VAR x := 0; (*Implicit type declaration to DINT*)
FOR VAR x := 0 TO 10 BEGIN
(*Implicit variable declaration as loop counter*)
END_FOR
```

Ref: #973
  • Loading branch information
GitExample authored and ghaith committed Nov 10, 2023
1 parent cd92cca commit 3f70a78
Show file tree
Hide file tree
Showing 12 changed files with 634 additions and 65 deletions.
36 changes: 36 additions & 0 deletions compiler/plc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -619,6 +619,8 @@ pub enum AstStatement {
ReturnStatement(ReturnStatement),
JumpStatement(JumpStatement),
LabelStatement(LabelStatement),
InlineVariable(InlineVariable),
DataTypeDeclaration(Box<DataTypeDeclaration>),
}

impl Debug for AstNode {
Expand Down Expand Up @@ -738,6 +740,12 @@ impl Debug for AstNode {
AstStatement::LabelStatement(LabelStatement { name, .. }) => {
f.debug_struct("LabelStatement").field("name", name).finish()
}
AstStatement::InlineVariable(InlineVariable { name, datatype }) => {
f.debug_struct("InlineVariable").field("name", name).field("datatype", datatype).finish()
}
AstStatement::DataTypeDeclaration(decl) => {
f.debug_tuple("DataTypeDeclaration").field(decl).finish()
}
}
}
}
Expand Down Expand Up @@ -1512,6 +1520,27 @@ impl AstFactory {
pub fn create_label_statement(name: String, location: SourceLocation, id: AstId) -> AstNode {
AstNode { stmt: AstStatement::LabelStatement(LabelStatement { name }), location, id }
}

/// Creates a new inline declaration by boxing the name and datatype
pub fn create_inline_declaration(
name: AstNode,
datatype: Option<AstNode>,
id: AstId,
location: SourceLocation,
) -> AstNode {
let name = Box::new(name);
let datatype = datatype.map(Box::new);
AstNode { stmt: AstStatement::InlineVariable(InlineVariable { name, datatype }), id, location }
}

pub fn create_type_declaration(
datatype: DataTypeDeclaration,
id: AstId,
location: SourceLocation,
) -> AstNode {
let datatype = Box::new(datatype);
AstNode { stmt: AstStatement::DataTypeDeclaration(datatype), id, location }
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct EmptyStatement {}
Expand Down Expand Up @@ -1594,3 +1623,10 @@ pub struct JumpStatement {
pub struct LabelStatement {
pub name: String,
}

/// Represents a new vaiable declaration in the body
#[derive(Clone, Debug, PartialEq)]
pub struct InlineVariable {
pub name: Box<AstNode>,
pub datatype: Option<Box<AstNode>>,
}
4 changes: 3 additions & 1 deletion src/lexer/tests/lexer_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,16 @@ fn undefined_pragmas_are_ignored_by_the_lexer() {
#[test]
fn registered_pragmas_parsed() {
let mut lexer = lex(r"
{external}{ref}{sized}{not_registerd}
{external}{ref}{sized}{def}{not_registerd}
");
assert_eq!(lexer.token, PropertyExternal, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, PropertyByRef, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, PropertySized, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, PropertyDef, "Token : {}", lexer.slice());
lexer.advance();
assert_eq!(lexer.token, End);
}

Expand Down
3 changes: 3 additions & 0 deletions src/lexer/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ pub enum Token {
#[token("{sized}")]
PropertySized,

#[token("{def}")]
PropertyDef,

#[token("PROGRAM", ignore(case))]
KeywordProgram,

Expand Down
110 changes: 47 additions & 63 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ fn parse_super_class(lexer: &mut ParseSession) -> Option<String> {
fn parse_return_type(lexer: &mut ParseSession, pou_type: &PouType) -> Option<DataTypeDeclaration> {
let start_return_type = lexer.range().start;
if lexer.try_consume(&KeywordColon) {
if let Some((declaration, initializer)) = parse_data_type_definition(lexer, None) {
if let Some((declaration, initializer)) = parse_datatype_with_initializer(lexer, None) {
if let Some(init) = initializer {
lexer.accept_diagnostic(Diagnostic::unexpected_initializer_on_function_return(
init.get_location(),
Expand Down Expand Up @@ -587,7 +587,7 @@ fn parse_full_data_type_definition(
None,
))
} else {
parse_data_type_definition(lexer, name).map(|(type_def, initializer)| {
parse_datatype_with_initializer(lexer, name).map(|(type_def, initializer)| {
if lexer.try_consume(&KeywordDotDotDot) {
(
DataTypeDeclaration::DataTypeDefinition {
Expand All @@ -605,23 +605,29 @@ fn parse_full_data_type_definition(
})
}

// TYPE xxx : 'STRUCT' | '(' | IDENTIFIER
fn parse_data_type_definition(
fn parse_datatype_with_initializer(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<DataTypeWithInitializer> {
parse_data_type_definition(lexer, name).map(|type_def| {
let initializer =
if lexer.try_consume(&KeywordAssignment) { Some(parse_expression(lexer)) } else { None };

(type_def, initializer)
})
}

// TYPE xxx : 'STRUCT' | '(' | IDENTIFIER
fn parse_data_type_definition(lexer: &mut ParseSession, name: Option<String>) -> Option<DataTypeDeclaration> {
let start = lexer.location();
if lexer.try_consume(&KeywordStruct) {
// Parse struct
let variables = parse_variable_list(lexer);
Some((
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::StructType { name, variables },
location: start.span(&lexer.location()),
scope: lexer.scope.clone(),
},
None,
))
Some(DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::StructType { name, variables },
location: start.span(&lexer.location()),
scope: lexer.scope.clone(),
})
} else if lexer.try_consume(&KeywordArray) {
parse_array_type_definition(lexer, name)
} else if lexer.try_consume(&KeywordPointer) {
Expand Down Expand Up @@ -661,23 +667,18 @@ fn parse_pointer_definition(
lexer: &mut ParseSession,
name: Option<String>,
start_pos: usize,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
parse_data_type_definition(lexer, None).map(|(decl, initializer)| {
(
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::PointerType { name, referenced_type: Box::new(decl) },
location: lexer.source_range_factory.create_range(start_pos..lexer.last_range.end),
scope: lexer.scope.clone(),
},
initializer,
)
) -> Option<DataTypeDeclaration> {
parse_data_type_definition(lexer, None).map(|decl| DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::PointerType { name, referenced_type: Box::new(decl) },
location: lexer.source_range_factory.create_range(start_pos..lexer.last_range.end),
scope: lexer.scope.clone(),
})
}

fn parse_type_reference_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
) -> Option<DataTypeDeclaration> {
let start = lexer.range().start;
//Subrange
let referenced_type = lexer.slice_and_advance();
Expand All @@ -692,9 +693,6 @@ fn parse_type_reference_type_definition(
None
};

let initial_value =
if lexer.try_consume(&KeywordAssignment) { Some(parse_expression(lexer)) } else { None };

let end = lexer.last_range.end;
if name.is_some() || bounds.is_some() {
let data_type = match bounds {
Expand Down Expand Up @@ -732,15 +730,12 @@ fn parse_type_reference_type_definition(
scope: lexer.scope.clone(),
},
};
Some((data_type, initial_value))
Some(data_type)
} else {
Some((
DataTypeDeclaration::DataTypeReference {
referenced_type,
location: lexer.source_range_factory.create_range(start..end),
},
initial_value,
))
Some(DataTypeDeclaration::DataTypeReference {
referenced_type,
location: lexer.source_range_factory.create_range(start..end),
})
}
}

Expand Down Expand Up @@ -778,7 +773,7 @@ fn parse_string_size_expression(lexer: &mut ParseSession) -> Option<AstNode> {
fn parse_string_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
) -> Option<DataTypeDeclaration> {
let text = lexer.slice().to_string();
let start = lexer.range().start;
let is_wide = lexer.token == KeywordWideString;
Expand All @@ -805,34 +800,26 @@ fn parse_string_type_definition(
}),
_ => Some(DataTypeDeclaration::DataTypeReference { referenced_type: text, location }),
}
.zip(Some(lexer.try_consume(&KeywordAssignment).then(|| parse_expression(lexer))))
}

fn parse_enum_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
fn parse_enum_type_definition(lexer: &mut ParseSession, name: Option<String>) -> Option<DataTypeDeclaration> {
let start = lexer.last_location();
let elements = parse_any_in_region(lexer, vec![KeywordParensClose], |lexer| {
// Parse Enum - we expect at least one element
let elements = parse_expression_list(lexer);
Some(elements)
})?;
let initializer = lexer.try_consume(&KeywordAssignment).then(|| parse_expression(lexer));
Some((
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::EnumType { name, elements, numeric_type: DINT_TYPE.to_string() },
location: start.span(&lexer.last_location()),
scope: lexer.scope.clone(),
},
initializer,
))
Some(DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::EnumType { name, elements, numeric_type: DINT_TYPE.to_string() },
location: start.span(&lexer.last_location()),
scope: lexer.scope.clone(),
})
}

fn parse_array_type_definition(
lexer: &mut ParseSession,
name: Option<String>,
) -> Option<(DataTypeDeclaration, Option<AstNode>)> {
) -> Option<DataTypeDeclaration> {
let start = lexer.last_range.start;
let range = parse_any_in_region(lexer, vec![KeywordOf], |lexer| {
// Parse Array range
Expand All @@ -849,7 +836,7 @@ fn parse_array_type_definition(
})?;

let inner_type_defintion = parse_data_type_definition(lexer, None);
inner_type_defintion.map(|(reference, initializer)| {
inner_type_defintion.map(|reference| {
let reference_end = reference.get_location().to_range().map(|it| it.end).unwrap_or(0);
let location = lexer.source_range_factory.create_range(start..reference_end);

Expand All @@ -876,19 +863,16 @@ fn parse_array_type_definition(
}
};

(
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::ArrayType {
name,
bounds: range,
referenced_type: Box::new(reference),
is_variable_length,
},
location,
scope: lexer.scope.clone(),
DataTypeDeclaration::DataTypeDefinition {
data_type: DataType::ArrayType {
name,
bounds: range,
referenced_type: Box::new(reference),
is_variable_length,
},
initializer,
)
location,
scope: lexer.scope.clone(),
}
})
}

Expand Down
29 changes: 28 additions & 1 deletion src/parser/expressions_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use plc_source::source_location::SourceLocation;
use regex::{Captures, Regex};
use std::{ops::Range, str::FromStr};

use super::parse_hardware_access;
use super::{parse_data_type_definition, parse_hardware_access};

macro_rules! parse_left_associative_expression {
($lexer: expr, $action : expr,
Expand Down Expand Up @@ -281,6 +281,12 @@ fn parse_atomic_leaf_expression(lexer: &mut ParseSession<'_>) -> Result<AstNode,
LiteralNull => parse_null_literal(lexer),
KeywordSquareParensOpen => parse_array_literal(lexer),
DirectAccess(access) => parse_direct_access(lexer, access),
PropertyDef => {
//Just consume the {def} and go further, if it's a variable we parse it next
lexer.advance();
parse_atomic_leaf_expression(lexer)
}
KeywordVar => parse_inline_declaration(lexer),
_ => {
if lexer.closing_keywords.contains(&vec![KeywordParensClose])
&& matches!(lexer.last_token, KeywordOutputAssignment | KeywordAssignment)
Expand All @@ -296,6 +302,27 @@ fn parse_atomic_leaf_expression(lexer: &mut ParseSession<'_>) -> Result<AstNode,
}
}

fn parse_inline_declaration(lexer: &mut ParseSession<'_>) -> Result<AstNode, Diagnostic> {
//Consume the direct access
let location = lexer.location();
//Inline variable declaration
lexer.advance();
//Parse the name
let name = parse_identifier(lexer);
let datatype = if lexer.try_consume(&KeywordColon) {
//Parse datatype
let type_location = lexer.location();
parse_data_type_definition(lexer, None).map(|it| {
AstFactory::create_type_declaration(it, lexer.next_id(), type_location.span(&lexer.location()))
})
} else {
None
};
let location = location.span(&lexer.last_location());

Ok(AstFactory::create_inline_declaration(name, datatype, lexer.next_id(), location))
}

fn parse_identifier(lexer: &mut ParseSession<'_>) -> AstNode {
AstFactory::create_identifier(&lexer.slice_and_advance(), &lexer.last_location(), lexer.next_id())
}
Expand Down
1 change: 1 addition & 0 deletions src/parser/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ mod control_parser_tests;
mod expressions_parser_tests;
mod function_parser_tests;
mod initializer_parser_tests;
mod inline_variable_tests;
mod misc_parser_tests;
mod parse_errors;
mod parse_generics;
Expand Down
Loading

0 comments on commit 3f70a78

Please sign in to comment.