Compare commits

..

7 Commits

Author SHA1 Message Date
Jesse Brault
eaebf8c926 Add derive for leaf enum spec, fix compilation errors. 2025-09-29 12:22:28 -05:00
Jesse Brault
058b33ece5 Fix ast_node gen errors. 2025-09-29 09:49:02 -05:00
Jesse Brault
c32ae72beb WIP redoing name analysis. 2025-09-29 09:39:13 -05:00
Jesse Brault
d6faa37515 Add missing call of f on self. 2025-09-29 09:14:45 -05:00
Jesse Brault
e8a4268949 Fix missing _. 2025-09-29 09:12:28 -05:00
Jesse Brault
5b772443f8 Add walk impl. 2025-09-29 09:10:57 -05:00
Jesse Brault
dd0bee1c91 Finish AstNode impls. 2025-09-29 08:53:19 -05:00
20 changed files with 1810 additions and 1403 deletions

View File

@ -1,6 +1,50 @@
use crate::spec::tree_enum_spec::TreeEnumBuildSpec;
use convert_case::{Case, Casing};
use crate::spec::tree_enum_spec::{EnumRuleChildKind, TreeEnumBuildSpec};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
pub fn make_enum_ast_node_impl(enum_spec: &TreeEnumBuildSpec) -> TokenStream {
todo!()
let type_ident = format_ident!("{}", enum_spec.build());
let match_arms = enum_spec
.rules()
.map(|rule| {
let rule_ident = format_ident!("{}", rule.rule());
match rule.child() {
Some(child) => {
match child.kind() {
EnumRuleChildKind::Node(node_child) => {
let child_ident = format_ident!("{}", node_child.node_kind().to_case(Case::Snake));
quote! {
#type_ident::#rule_ident(#child_ident) => vec![
#child_ident.as_node_ref()
]
}
}
_ => quote! {
#type_ident::#rule_ident(_) => vec![]
}
}
},
None => {
quote! {
#type_ident::#rule_ident => vec![]
}
}
}
})
.collect::<Vec<_>>();
quote! {
impl AstNode for #type_ident {
fn children(&self) -> Vec<AstNodeRef> {
match self {
#(#match_arms,)*
}
}
fn as_node_ref(&self) -> AstNodeRef {
AstNodeRef::#type_ident(&self)
}
}
}
}

View File

@ -1,6 +1,18 @@
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use crate::spec::leaf_enum_spec::LeafEnumBuildSpec;
pub fn make_leaf_enum_ast_node_impl(spec: &LeafEnumBuildSpec) -> TokenStream {
todo!()
let type_ident = format_ident!("{}", spec.build());
quote! {
impl AstNode for #type_ident {
fn children(&self) -> Vec<AstNodeRef> {
vec![]
}
fn as_node_ref(&self) -> AstNodeRef {
AstNodeRef::#type_ident(&self)
}
}
}
}

View File

@ -1,6 +1,18 @@
use proc_macro2::TokenStream;
use crate::spec::leaf_struct_spec::LeafStructBuildSpec;
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
pub fn make_leaf_struct_ast_node_impl(spec: &LeafStructBuildSpec) -> TokenStream {
todo!()
}
let type_ident = format_ident!("{}", spec.build());
quote! {
impl AstNode for #type_ident {
fn children(&self) -> Vec<AstNodeRef> {
vec![]
}
fn as_node_ref(&self) -> AstNodeRef {
AstNodeRef::#type_ident(&self)
}
}
}
}

View File

@ -41,12 +41,33 @@ pub fn make_ast_node_impl(build_spec: &BuildSpec) -> Option<TokenStream> {
}
}
pub fn make_ast_enum_member(build_spec: &BuildSpec) -> Option<TokenStream> {
pub fn make_ast_enum_member(build_spec: &BuildSpec) -> Option<TokenStream> {
match build_spec {
BuildSpec::Struct(struct_spec) => {
let type_ident = format_ident!("{}", struct_spec.build());
Some(quote! { #type_ident(#type_ident) })
Some(format_ident!("{}", struct_spec.build()))
}
_ => None,
BuildSpec::LeafStruct(leaf_struct) => {
Some(format_ident!("{}", leaf_struct.build()))
}
BuildSpec::Enum(enum_spec) => {
Some(format_ident!("{}", enum_spec.build()))
}
BuildSpec::LeafEnum(leaf_enum) => {
Some(format_ident!("{}", leaf_enum.build()))
}
BuildSpec::PolymorphicType(polymorphic_type) => {
Some(format_ident!("{}", polymorphic_type.name()))
},
BuildSpec::PolymorphicEnumLoop(polymorphic_enum_loop) => {
Some(format_ident!("{}", polymorphic_enum_loop.name()))
}
BuildSpec::PolymorphicPassThrough(_) => None,
BuildSpec::Production(_) => None,
BuildSpec::NodeProduction(_) => None,
}
.map(|type_ident| {
quote! {
#type_ident(&'a #type_ident)
}
})
}

View File

@ -1,6 +1,49 @@
use crate::spec::polymorphic_enum_loop_spec::{
PolymorphicEnumLoopBuildSpec, PolymorphicEnumLoopRule, PolymorphicEnumLoopRuleBuildChild,
};
use proc_macro2::TokenStream;
use crate::spec::polymorphic_enum_loop_spec::PolymorphicEnumLoopBuildSpec;
use quote::{format_ident, quote};
pub fn make_polymorphic_enum_loop_ast_node_impl(spec: &PolymorphicEnumLoopBuildSpec) -> TokenStream {
todo!()
}
pub fn make_polymorphic_enum_loop_ast_node_impl(
spec: &PolymorphicEnumLoopBuildSpec,
) -> TokenStream {
let type_ident = format_ident!("{}", spec.name());
let build_rule = spec
.rules()
.filter_map(|rule| match rule {
PolymorphicEnumLoopRule::PassThrough(_) => None,
PolymorphicEnumLoopRule::Build(build_rule) => Some(build_rule),
})
.next()
.unwrap();
let child_adders = build_rule
.children()
.map(|child| {
let child_ident = match child {
PolymorphicEnumLoopRuleBuildChild::UseCurrent(use_current) => {
format_ident!("{}", use_current.name())
}
PolymorphicEnumLoopRuleBuildChild::OnEach(on_each) => {
format_ident!("{}", on_each.name())
}
};
quote! {
children.push(self.#child_ident().as_node_ref())
}
})
.collect::<Vec<_>>();
quote! {
impl AstNode for #type_ident {
fn children(&self) -> Vec<AstNodeRef> {
let mut children: Vec<AstNodeRef> = vec![];
#(#child_adders;)*
children
}
fn as_node_ref(&self) -> AstNodeRef {
AstNodeRef::#type_ident(&self)
}
}
}
}

View File

@ -1,6 +1,32 @@
use proc_macro2::TokenStream;
use crate::spec::polymorphic_type_spec::PolymorphicTypeBuildSpec;
use convert_case::{Case, Casing};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
pub fn make_polymorphic_type_ast_node_impl(spec: &PolymorphicTypeBuildSpec) -> TokenStream {
todo!()
}
let type_ident = format_ident!("{}", spec.name());
let match_arms = spec
.variants()
.map(|variant| {
let variant_ident = format_ident!("{}", variant.name());
let child_ident = format_ident!("{}", variant.inner_kind().to_case(Case::Snake));
quote! {
#type_ident::#variant_ident(#child_ident) => vec![#child_ident.as_node_ref()]
}
})
.collect::<Vec<_>>();
quote! {
impl AstNode for #type_ident {
fn children(&self) -> Vec<AstNodeRef> {
match self {
#(#match_arms,)*
}
}
fn as_node_ref(&self) -> AstNodeRef {
AstNodeRef::#type_ident(&self)
}
}
}
}

View File

@ -8,41 +8,44 @@ pub fn make_struct_ast_node_impl(spec: &StructSpec) -> TokenStream {
.children()
.map(|child| match child {
StructChild::SkipChild(_) => None,
StructChild::VecChild(vec_child) => {
match vec_child.build() {
VecChildBuild::String(_) => None,
VecChildBuild::Node(_) => {
let child_ident = format_ident!("{}", vec_child.name());
let children_stream = quote! {
for child in self.#child_ident().map(AstNode::as_node_ref).collect() {
children.push(child);
StructChild::VecChild(vec_child) => match vec_child.build() {
VecChildBuild::String(_) => None,
VecChildBuild::Node(_) => {
let child_ident = format_ident!("{}", vec_child.name());
let children_stream = quote! {
for child in self.#child_ident().map(AstNode::as_node_ref) {
children.push(child);
}
};
Some(children_stream)
}
},
StructChild::MemberChild(member_child) => match member_child.build() {
MemberChildBuild::Node(_) => {
let child_ident = format_ident!("{}", member_child.name());
if member_child.optional() {
Some(quote! {
if let Some(#child_ident) = self.#child_ident() {
children.push(#child_ident.as_node_ref());
}
};
Some(children_stream)
})
} else {
Some(quote! {
children.push(self.#child_ident().as_node_ref())
})
}
}
}
StructChild::MemberChild(member_child) => {
match member_child.build() {
MemberChildBuild::Node(_) => {
let child_ident = format_ident!("{}", member_child.name());
let child_stream = quote! {
children.add(self.#child_ident().as_node_ref())
};
Some(child_stream)
}
MemberChildBuild::Boolean(_) => None
}
}
MemberChildBuild::Boolean(_) => None,
},
})
.filter(Option::is_some)
.map(Option::unwrap)
.collect::<Vec<_>>();
quote! {
impl AstNode for #type_ident {
fn children(&self) -> Vec<AstNodeRef> {
let children: Vec<AstNodeRef> = vec![];
let mut children: Vec<AstNodeRef> = vec![];
#(#child_adders;)*
children
}

View File

@ -23,5 +23,15 @@ pub fn deserialize_leaf_struct(name: &str, props: &Yaml) -> LeafStructBuildSpec
})
.map(Box::new)
.collect();
LeafStructBuildSpec::new(name, members)
let derive = props["derive"]
.as_vec()
.map(|derive_yaml| {
derive_yaml.iter()
.map(|trait_yaml| {
trait_yaml.as_str().unwrap().to_string()
}).collect::<Vec<_>>()
})
.unwrap_or_default();
LeafStructBuildSpec::new(name, members, derive)
}

View File

@ -4,12 +4,14 @@ mod deserialize;
mod pretty_print;
mod spec;
mod type_gen;
mod walk;
use crate::ast_node::{make_ast_enum_member, make_ast_node_impl};
use crate::build_fn::make_build_fn;
use crate::deserialize::deserialize_yaml_spec;
use crate::pretty_print::make_pretty_print_impl;
use crate::type_gen::make_type;
use crate::walk::make_walk_fn;
use proc_macro2::TokenStream;
use quote::quote;
use spec::BuildSpec;
@ -166,7 +168,7 @@ fn generate_ast_node_file(build_specs: &[BuildSpec]) -> AstGeneratedFile {
let combined = quote! {
use crate::ast::node::*;
pub enum AstNodeRef {
pub enum AstNodeRef<'a> {
#(#ast_enum_members,)*
}
@ -184,6 +186,14 @@ fn generate_ast_node_file(build_specs: &[BuildSpec]) -> AstGeneratedFile {
}
}
fn generate_walk_file(build_specs: &[BuildSpec]) -> AstGeneratedFile {
let stream = make_walk_fn(build_specs);
AstGeneratedFile {
name: String::from("walk.rs"),
contents: token_stream_to_string(stream),
}
}
pub fn get_build_specs(yaml: &str) -> Vec<BuildSpec> {
deserialize_yaml_spec(yaml)
}
@ -194,5 +204,6 @@ pub fn generate_files(build_specs: &[BuildSpec]) -> Vec<AstGeneratedFile> {
generate_node_file(build_specs),
generate_pretty_print_file(build_specs),
generate_ast_node_file(build_specs),
generate_walk_file(build_specs),
]
}

View File

@ -1,13 +1,15 @@
pub struct LeafStructBuildSpec {
build: String,
members: Vec<Box<LeafStructMember>>,
derive: Vec<String>,
}
impl LeafStructBuildSpec {
pub fn new(build: &str, members: Vec<Box<LeafStructMember>>) -> Self {
pub fn new(build: &str, members: Vec<Box<LeafStructMember>>, derive: Vec<String>) -> Self {
Self {
build: build.to_string(),
members,
derive,
}
}
@ -18,6 +20,10 @@ impl LeafStructBuildSpec {
pub fn members(&self) -> impl Iterator<Item = &LeafStructMember> {
self.members.iter().map(Box::as_ref)
}
pub fn derive(&self) -> impl Iterator<Item = &str> {
self.derive.iter().map(String::as_str)
}
}
pub struct LeafStructMember {

View File

@ -83,10 +83,27 @@ pub fn make_leaf_struct_type(build_spec: &LeafStructBuildSpec) -> TokenStream {
})
.collect::<Vec<_>>();
quote! {
pub struct #type_ident {
#(#annotated_members),*
let struct_stream = if build_spec.derive().count() > 0 {
let derives = build_spec.derive().map(|derive| {
format_ident!("{}", derive)
}).collect::<Vec<_>>();
quote! {
#[derive(#(#derives),*)]
pub struct #type_ident {
#(#annotated_members),*
}
}
} else {
quote! {
pub struct #type_ident {
#(#annotated_members),*
}
}
};
quote! {
#struct_stream
impl #type_ident {
pub fn new(#(#member_args),*) -> Self {

61
ast-generator/src/walk.rs Normal file
View File

@ -0,0 +1,61 @@
use crate::spec::BuildSpec;
use convert_case::{Case, Casing};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
pub fn make_walk_fn(specs: &[BuildSpec]) -> TokenStream {
let child_match_arms = specs
.iter()
.map(|spec| match spec {
BuildSpec::Enum(enum_spec) => Some((
format_ident!("{}", enum_spec.build()),
format_ident!("{}", enum_spec.build().to_case(Case::Snake)),
)),
BuildSpec::LeafEnum(leaf_enum) => Some((
format_ident!("{}", leaf_enum.build()),
format_ident!("{}", leaf_enum.build().to_case(Case::Snake)),
)),
BuildSpec::Struct(struct_spec) => Some((
format_ident!("{}", struct_spec.build()),
format_ident!("{}", struct_spec.build().to_case(Case::Snake)),
)),
BuildSpec::LeafStruct(leaf_struct) => Some((
format_ident!("{}", leaf_struct.build()),
format_ident!("{}", leaf_struct.build().to_case(Case::Snake)),
)),
BuildSpec::Production(_) => None,
BuildSpec::NodeProduction(_) => None,
BuildSpec::PolymorphicType(polymorphic_type) => Some((
format_ident!("{}", polymorphic_type.name()),
format_ident!("{}", polymorphic_type.name().to_case(Case::Snake)),
)),
BuildSpec::PolymorphicEnumLoop(polymorphic_enum_loop) => Some((
format_ident!("{}", polymorphic_enum_loop.name()),
format_ident!("{}", polymorphic_enum_loop.name().to_case(Case::Snake)),
)),
BuildSpec::PolymorphicPassThrough(_) => None,
})
.filter(Option::is_some)
.map(Option::unwrap)
.map(|(type_ident, inner_ident)| {
quote! {
#type_ident(#inner_ident) => walk_depth_first(#inner_ident, f)
}
})
.collect::<Vec<_>>();
quote! {
use crate::ast::node::*;
use crate::ast::ast_node::*;
pub fn walk_depth_first(node: &impl AstNode, f: &mut impl FnMut(AstNodeRef)) {
use AstNodeRef::*;
for child in node.children() {
match child {
#(#child_match_arms,)*
}
}
f(node.as_node_ref());
}
}
}

View File

@ -161,3 +161,7 @@ pub mod ast_node {
}
}
}
pub mod walk {
include!(concat!(env!("OUT_DIR"), "/src/ast/walk.rs"));
}

View File

@ -32,7 +32,7 @@ pub fn name_analysis(paths: &Vec<PathBuf>) -> Result<(), Box<dyn std::error::Err
add_std_core_symbols(&mut symbol_table).expect("Failed to add std::core symbols.");
let diagnostics = analyze_names(
&mut compilation_units.iter().map(Box::as_mut),
compilation_units.as_mut_slice(),
&mut symbol_table
);
if diagnostics.is_empty() {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -19,32 +19,39 @@ The resolve phase has one main responsibility: resolve all references based on t
`scope_id` property.
*/
use crate::ast::node::CompilationUnit;
use crate::ast::node::{CompilationUnit, Identifier};
use crate::diagnostic::DmDiagnostic;
use crate::name_analysis::gather::gather_compilation_unit;
use crate::name_analysis::resolve::resolve_compilation_unit;
// use crate::name_analysis::resolve::resolve_compilation_unit;
use crate::name_analysis::symbol_table::SymbolTable;
use std::collections::HashMap;
mod fqn_context;
mod gather;
mod resolve;
// mod resolve;
pub mod symbol;
pub mod symbol_table;
pub fn analyze_names(
compilation_units: &mut [CompilationUnit],
compilation_units: &mut [Box<CompilationUnit>],
symbol_table: &mut SymbolTable,
) -> Vec<DmDiagnostic> {
let mut diagnostics = vec![];
let mut identifier_scope_ids: HashMap<Identifier, usize> = HashMap::new();
// gather symbols
for compilation_unit in compilation_units.iter_mut() {
gather_compilation_unit(compilation_unit, symbol_table, &mut diagnostics);
gather_compilation_unit(
compilation_unit,
symbol_table,
&mut identifier_scope_ids,
&mut diagnostics,
);
}
// resolve symbols
for compilation_unit in compilation_units.iter_mut() {
resolve_compilation_unit(compilation_unit, symbol_table, &mut diagnostics);
// resolve_compilation_unit(compilation_unit, symbol_table, &mut diagnostics);
}
diagnostics.into()
@ -64,7 +71,7 @@ pub fn analyze_names(
// use indoc::indoc;
// use pest::Parser;
// use std::collections::HashMap;
//
//
// fn assert_number_of_diagnostics(
// sources: HashMap<&str, &str>,
// symbol_table: &mut SymbolTable,
@ -72,7 +79,7 @@ pub fn analyze_names(
// ) -> Vec<CompilationUnit> {
// let mut files = SimpleFiles::new();
// let mut compilation_units = vec![];
//
//
// for (file_name, source) in sources {
// let file_id = files.add(file_name, source);
// let parse_result = DeimosParser::parse(Rule::CompilationUnit, source);
@ -83,35 +90,35 @@ pub fn analyze_names(
// if pairs.as_str().trim() != source.trim() {
// panic!("Parsing did not consume entire input.");
// }
//
//
// compilation_units.push(build_ast(file_name, file_id, pairs.next().unwrap()));
// }
//
//
// let diagnostics = analyze_names(&mut compilation_units, symbol_table);
//
//
// if diagnostics.len() != n_diagnostics {
// let writer = StandardStream::stderr(ColorChoice::Always);
// let config = term::Config::default();
//
//
// for diagnostic in &diagnostics {
// term::emit(&mut writer.lock(), &config, &files, &diagnostic).unwrap();
// }
//
//
// eprintln!("{}", symbol_table);
// }
//
//
// assert_eq!(n_diagnostics, diagnostics.len());
//
//
// compilation_units
// }
//
//
// fn assert_no_diagnostics(
// sources: HashMap<&str, &str>,
// symbol_table: &mut SymbolTable,
// ) -> Vec<CompilationUnit> {
// assert_number_of_diagnostics(sources, symbol_table, 0)
// }
//
//
// fn assert_saved_symbols(compilation_unit: &CompilationUnit) {
// walk_depth_first(compilation_unit, &mut |node_ref| match node_ref {
// NodeRef::Identifier(identifier) => {
@ -130,7 +137,7 @@ pub fn analyze_names(
// _ => {}
// })
// }
//
//
// fn assert_resolved_symbols(compilation_unit: &CompilationUnit) {
// walk_depth_first(compilation_unit, &mut |node_ref| match node_ref {
// NodeRef::UseStatement(use_statement) => match use_statement {
@ -139,7 +146,7 @@ pub fn analyze_names(
// _ => {}
// })
// }
//
//
// #[test]
// fn params_seen() {
// let sources: HashMap<&str, &str> = HashMap::from([(
@ -149,13 +156,13 @@ pub fn analyze_names(
// let x = args;
// }"},
// )]);
//
//
// let cus = assert_no_diagnostics(sources, &mut SymbolTable::new());
// for ref cu in cus {
// assert_saved_symbols(cu);
// }
// }
//
//
// #[test]
// fn two_files() {
// let sources: HashMap<&str, &str> = HashMap::from([
@ -169,19 +176,19 @@ pub fn analyze_names(
// "deps.dm",
// indoc! {"
// ns test;
//
//
// pub class Greeter {}
// "},
// ),
// ]);
//
//
// let cus = assert_no_diagnostics(sources, &mut SymbolTable::new());
// for ref cu in cus {
// assert_saved_symbols(cu);
// assert_resolved_symbols(cu);
// }
// }
//
//
// #[test]
// fn sees_std_core_println() {
// let sources: HashMap<&str, &str> = HashMap::from([(
@ -192,7 +199,7 @@ pub fn analyze_names(
// }
// "},
// )]);
//
//
// let mut symbol_table = SymbolTable::new();
// add_std_core_symbols(&mut symbol_table).expect("Failed to add std::core symbols.");
// let cus = assert_no_diagnostics(sources, &mut symbol_table);
@ -201,7 +208,7 @@ pub fn analyze_names(
// assert_resolved_symbols(cu);
// }
// }
//
//
// #[test]
// fn sees_duplicate_fn() {
// let sources: HashMap<&str, &str> = HashMap::from([(
@ -213,7 +220,7 @@ pub fn analyze_names(
// )]);
// assert_number_of_diagnostics(sources, &mut SymbolTable::new(), 1);
// }
//
//
// #[test]
// fn use_class_from_other_file() {
// let sources: HashMap<&str, &str> = HashMap::from([
@ -221,7 +228,7 @@ pub fn analyze_names(
// "main.dm",
// indoc! {"
// use greeter::Greeter;
//
//
// fn test(greeter: Greeter) {}
// "},
// ),
@ -229,7 +236,7 @@ pub fn analyze_names(
// "greeter.dm",
// indoc! {"
// ns greeter;
//
//
// class Greeter {}
// "},
// ),
@ -241,7 +248,7 @@ pub fn analyze_names(
// assert_resolved_symbols(cu);
// }
// }
//
//
// #[test]
// fn shadow_import() {
// let sources: HashMap<&str, &str> = HashMap::from([
@ -249,7 +256,7 @@ pub fn analyze_names(
// "main.dm",
// indoc! {"
// use greeter::Greeter;
//
//
// class Greeter {}
// "},
// ),
@ -257,7 +264,7 @@ pub fn analyze_names(
// "greeter.dm",
// indoc! {"
// ns greeter;
//
//
// class Greeter {}
// "},
// ),

View File

@ -57,6 +57,11 @@ $defs:
description: Ordered child fields for this node.
items:
$ref: "#/$defs/StructChild"
derive:
type: array
description: Traits to derive.
items:
type: string
required:
- children
StructChild:
@ -201,6 +206,11 @@ $defs:
description: Ordered members for this node.
items:
$ref: "#/$defs/LeafStructMemberDefinition"
derive:
type: array
description: Traits to derive.
items:
type: string
required:
- members
required:

View File

@ -37,6 +37,11 @@ Identifier:
kind: file_id
- range:
kind: range
derive:
- Clone
- PartialEq
- Eq
- Hash
FullyQualifiedName:
struct:
children:
@ -867,8 +872,8 @@ ComparisonRhs:
- expression:
member:
rule: ShiftExpression
build:
node:
build:
node:
kind: Expression
with: ShiftExpression
ComparisonOperator:
@ -907,8 +912,8 @@ ShiftRhs:
- expression:
member:
rule: AdditiveExpression
build:
node:
build:
node:
kind: Expression
with: AdditiveExpression
ShiftOperator:
@ -943,8 +948,8 @@ AdditiveRhs:
- expression:
member:
rule: MultiplicativeExpression
build:
node:
build:
node:
kind: Expression
with: MultiplicativeExpression
AdditiveOperator:
@ -979,8 +984,8 @@ MultiplicativeRhs:
- expression:
member:
rule: PrefixExpression
build:
node:
build:
node:
kind: Expression
with: PrefixExpression
MultiplicativeOperator:
@ -999,10 +1004,10 @@ PrefixExpression:
variant: Prefix
children:
- operator:
on_each:
on_each:
rule: PrefixOperator
- expression:
use_current:
use_current:
kind: Expression
- SuffixExpression:
pass_through:
@ -1075,7 +1080,7 @@ ListExpression:
children:
- expression_list
ParenthesizedExpression:
node_production:
node_production:
kind: Expression
with: Expression