Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f437464c authored by Henri Chataing's avatar Henri Chataing Committed by Gerrit Code Review
Browse files

Merge changes Ic205b6a1,I3bef064b,I5f06a09b,I790ef147

* changes:
  pdl: Use the analyzer ast for the default rust backend
  pdl: Remove all lint diagnostics from the legacy linter
  pdl: Implement group inlining in the new analyzer
  pdl: Remove scope members checksums, children, payload, sizes, groups
parents 104653cb 1da0880b
Loading
Loading
Loading
Loading
+154 −3
Original line number Diff line number Diff line
@@ -6,6 +6,7 @@ use std::collections::HashMap;

use crate::ast::*;
use crate::parser::ast as parser_ast;
use crate::utils;

pub mod ast {
    use serde::Serialize;
@@ -25,7 +26,7 @@ pub mod ast {
        Unknown,
    }

    #[derive(Debug, Serialize, Default)]
    #[derive(Debug, Serialize, Default, Clone, PartialEq)]
    pub struct Annotation;

    #[derive(Default, Debug, Clone)]
@@ -1205,8 +1206,67 @@ fn compute_field_sizes(file: &parser_ast::File) -> ast::File {
}

/// Inline group fields and remove group declarations.
fn inline_groups(_file: &mut ast::File) -> Result<(), Diagnostics> {
    // TODO
fn inline_groups(file: &mut ast::File) -> Result<(), Diagnostics> {
    fn inline_fields<'a>(
        fields: impl Iterator<Item = &'a ast::Field>,
        groups: &HashMap<String, ast::Decl>,
        constraints: &HashMap<String, Constraint>,
    ) -> Vec<ast::Field> {
        fields
            .flat_map(|field| match &field.desc {
                FieldDesc::Group { group_id, constraints: group_constraints } => {
                    let mut constraints = constraints.clone();
                    constraints.extend(
                        group_constraints
                            .iter()
                            .map(|constraint| (constraint.id.clone(), constraint.clone())),
                    );
                    inline_fields(groups.get(group_id).unwrap().fields(), groups, &constraints)
                }
                FieldDesc::Scalar { id, width } if constraints.contains_key(id) => {
                    vec![ast::Field {
                        desc: FieldDesc::FixedScalar {
                            width: *width,
                            value: constraints.get(id).unwrap().value.unwrap(),
                        },
                        loc: field.loc,
                        annot: field.annot.clone(),
                    }]
                }
                FieldDesc::Typedef { id, type_id, .. } if constraints.contains_key(id) => {
                    vec![ast::Field {
                        desc: FieldDesc::FixedEnum {
                            enum_id: type_id.clone(),
                            tag_id: constraints
                                .get(id)
                                .and_then(|constraint| constraint.tag_id.clone())
                                .unwrap(),
                        },
                        loc: field.loc,
                        annot: field.annot.clone(),
                    }]
                }
                _ => vec![field.clone()],
            })
            .collect()
    }

    let groups = utils::drain_filter(&mut file.declarations, |decl| {
        matches!(&decl.desc, DeclDesc::Group { .. })
    })
    .into_iter()
    .map(|decl| (decl.id().unwrap().to_owned(), decl))
    .collect::<HashMap<String, _>>();

    for decl in file.declarations.iter_mut() {
        match &mut decl.desc {
            DeclDesc::Packet { fields, .. } | DeclDesc::Struct { fields, .. } => {
                *fields = inline_fields(fields.iter(), &groups, &HashMap::new())
            }
            _ => (),
        }
    }

    Ok(())
}

@@ -2052,4 +2112,95 @@ mod test {
        "#
        );
    }

    fn desugar(text: &str) -> analyzer::ast::File {
        let mut db = SourceDatabase::new();
        let file =
            parse_inline(&mut db, "stdin".to_owned(), text.to_owned()).expect("parsing failure");
        analyzer::analyze(&file).expect("analyzer failure")
    }

    #[test]
    fn test_inline_groups() {
        assert_eq!(
            desugar(
                r#"
        little_endian_packets
        enum E : 8 { X=0, Y=1 }
        group G {
            a: 8,
            b: E,
        }
        packet A {
            G { }
        }
        "#
            ),
            desugar(
                r#"
        little_endian_packets
        enum E : 8 { X=0, Y=1 }
        packet A {
            a: 8,
            b: E,
        }
        "#
            )
        );

        assert_eq!(
            desugar(
                r#"
        little_endian_packets
        enum E : 8 { X=0, Y=1 }
        group G {
            a: 8,
            b: E,
        }
        packet A {
            G { a=1, b=X }
        }
        "#
            ),
            desugar(
                r#"
        little_endian_packets
        enum E : 8 { X=0, Y=1 }
        packet A {
            _fixed_ = 1: 8,
            _fixed_ = X: E,
        }
        "#
            )
        );

        assert_eq!(
            desugar(
                r#"
        little_endian_packets
        enum E : 8 { X=0, Y=1 }
        group G1 {
            a: 8,
        }
        group G2 {
            G1 { a=1 },
            b: E,
        }
        packet A {
            G2 { b=X }
        }
        "#
            ),
            desugar(
                r#"
        little_endian_packets
        enum E : 8 { X=0, Y=1 }
        packet A {
            _fixed_ = 1: 8,
            _fixed_ = X: E,
        }
        "#
            )
        );
    }
}
+60 −85
Original line number Diff line number Diff line
use crate::lint;
use codespan_reporting::diagnostic;
use codespan_reporting::files;
use serde::Serialize;
@@ -64,7 +63,7 @@ pub struct Tag {
    pub value: usize,
}

#[derive(Debug, Serialize, Clone, PartialEq, Eq)]
#[derive(Debug, Serialize, Clone)]
#[serde(tag = "kind", rename = "constraint")]
pub struct Constraint {
    pub id: String,
@@ -112,7 +111,7 @@ pub enum FieldDesc {
    Group { group_id: String, constraints: Vec<Constraint> },
}

#[derive(Debug, Serialize, PartialEq, Eq)]
#[derive(Debug, Serialize, Clone)]
pub struct Field<A: Annotation> {
    pub loc: SourceRange,
    #[serde(skip_serializing)]
@@ -128,7 +127,7 @@ pub struct TestCase {
    pub input: String,
}

#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, PartialEq, Eq)]
#[serde(tag = "kind")]
pub enum DeclDesc<A: Annotation> {
    #[serde(rename = "checksum_declaration")]
@@ -235,6 +234,47 @@ impl ops::Add<SourceRange> for SourceRange {
    }
}

impl Eq for Endianness {}
impl PartialEq for Endianness {
    fn eq(&self, other: &Self) -> bool {
        // Implement structual equality, leave out loc.
        self.value == other.value
    }
}

impl Eq for Tag {}
impl PartialEq for Tag {
    fn eq(&self, other: &Self) -> bool {
        // Implement structual equality, leave out loc.
        self.id == other.id && self.value == other.value
    }
}

impl Eq for Constraint {}
impl PartialEq for Constraint {
    fn eq(&self, other: &Self) -> bool {
        // Implement structual equality, leave out loc.
        self.id == other.id && self.value == other.value && self.tag_id == other.tag_id
    }
}

impl Eq for TestCase {}
impl PartialEq for TestCase {
    fn eq(&self, other: &Self) -> bool {
        // Implement structual equality, leave out loc.
        self.input == other.input
    }
}

impl<A: Annotation + std::cmp::PartialEq> Eq for File<A> {}
impl<A: Annotation + std::cmp::PartialEq> PartialEq for File<A> {
    fn eq(&self, other: &Self) -> bool {
        // Implement structual equality, leave out comments and PDL
        // version information.
        self.endianness == other.endianness && self.declarations == other.declarations
    }
}

impl<A: Annotation> File<A> {
    pub fn new(file: FileId) -> File<A> {
        File {
@@ -259,6 +299,14 @@ impl<A: Annotation> File<A> {
    }
}

impl<A: Annotation + std::cmp::PartialEq> Eq for Decl<A> {}
impl<A: Annotation + std::cmp::PartialEq> PartialEq for Decl<A> {
    fn eq(&self, other: &Self) -> bool {
        // Implement structual equality, leave out loc and annot.
        self.desc == other.desc
    }
}

impl<A: Annotation> Decl<A> {
    pub fn new(loc: SourceRange, desc: DeclDesc<A>) -> Decl<A> {
        Decl { loc, annot: Default::default(), desc }
@@ -335,31 +383,6 @@ impl<A: Annotation> Decl<A> {
        }
    }

    /// Determine the size of a declaration type in bits, if possible.
    ///
    /// If the type is dynamically sized (e.g. contains an array or
    /// payload), `None` is returned. If `skip_payload` is set,
    /// payload and body fields are counted as having size `0` rather
    /// than a variable size.
    pub fn width(&self, scope: &lint::Scope<'_>, skip_payload: bool) -> Option<usize> {
        match &self.desc {
            DeclDesc::Enum { width, .. } | DeclDesc::Checksum { width, .. } => Some(*width),
            DeclDesc::CustomField { width, .. } => *width,
            DeclDesc::Packet { fields, parent_id, .. }
            | DeclDesc::Struct { fields, parent_id, .. } => {
                let mut packet_size = match parent_id {
                    None => 0,
                    Some(id) => scope.typedef.get(id.as_str())?.width(scope, true)?,
                };
                for field in fields.iter() {
                    packet_size += field.width(scope, skip_payload)?;
                }
                Some(packet_size)
            }
            DeclDesc::Group { .. } | DeclDesc::Test { .. } => None,
        }
    }

    pub fn fields(&self) -> std::slice::Iter<'_, Field<A>> {
        match &self.desc {
            DeclDesc::Packet { fields, .. }
@@ -382,6 +405,14 @@ impl<A: Annotation> Decl<A> {
    }
}

impl<A: Annotation> Eq for Field<A> {}
impl<A: Annotation> PartialEq for Field<A> {
    fn eq(&self, other: &Self) -> bool {
        // Implement structual equality, leave out loc and annot.
        self.desc == other.desc
    }
}

impl<A: Annotation> Field<A> {
    pub fn annotate<B: Annotation>(&self, annot: B::FieldAnnotation) -> Field<B> {
        Field { loc: self.loc, annot, desc: self.desc.clone() }
@@ -406,62 +437,6 @@ impl<A: Annotation> Field<A> {
        }
    }

    pub fn is_bitfield(&self, scope: &lint::Scope<'_>) -> bool {
        match &self.desc {
            FieldDesc::Size { .. }
            | FieldDesc::Count { .. }
            | FieldDesc::ElementSize { .. }
            | FieldDesc::FixedScalar { .. }
            | FieldDesc::FixedEnum { .. }
            | FieldDesc::Reserved { .. }
            | FieldDesc::Scalar { .. } => true,
            FieldDesc::Typedef { type_id, .. } => {
                let field = scope.typedef.get(type_id.as_str());
                matches!(field, Some(Decl { desc: DeclDesc::Enum { .. }, .. }))
            }
            _ => false,
        }
    }

    pub fn declaration<'a>(
        &self,
        scope: &'a lint::Scope<'a>,
    ) -> Option<&'a crate::parser::ast::Decl> {
        match &self.desc {
            FieldDesc::FixedEnum { enum_id, .. } => scope.typedef.get(enum_id).copied(),
            FieldDesc::Array { type_id: Some(type_id), .. } => scope.typedef.get(type_id).copied(),
            FieldDesc::Typedef { type_id, .. } => scope.typedef.get(type_id.as_str()).copied(),
            _ => None,
        }
    }

    /// Determine the size of a field in bits, if possible.
    ///
    /// If the field is dynamically sized (e.g. unsized array or
    /// payload field), `None` is returned. If `skip_payload` is set,
    /// payload and body fields are counted as having size `0` rather
    /// than a variable size.
    pub fn width(&self, scope: &lint::Scope<'_>, skip_payload: bool) -> Option<usize> {
        match &self.desc {
            FieldDesc::Scalar { width, .. }
            | FieldDesc::Size { width, .. }
            | FieldDesc::Count { width, .. }
            | FieldDesc::ElementSize { width, .. }
            | FieldDesc::Reserved { width, .. }
            | FieldDesc::FixedScalar { width, .. } => Some(*width),
            FieldDesc::FixedEnum { .. } => self.declaration(scope)?.width(scope, false),
            FieldDesc::Padding { .. } => todo!(),
            FieldDesc::Array { size: Some(size), width, .. } => {
                let width = width.or_else(|| self.declaration(scope)?.width(scope, false))?;
                Some(width * size)
            }
            FieldDesc::Typedef { .. } => self.declaration(scope)?.width(scope, false),
            FieldDesc::Checksum { .. } => Some(0),
            FieldDesc::Payload { .. } | FieldDesc::Body { .. } if skip_payload => Some(0),
            _ => None,
        }
    }

    pub fn kind(&self) -> &str {
        match &self.desc {
            FieldDesc::Checksum { .. } => "payload",
+36 −32
Original line number Diff line number Diff line
@@ -6,7 +6,7 @@ use quote::{format_ident, quote};
use std::collections::BTreeSet;
use std::path::Path;

use crate::parser::ast as parser_ast;
use crate::analyzer::ast as analyzer_ast;

mod parser;
mod preamble;
@@ -48,19 +48,19 @@ pub fn mask_bits(n: usize, suffix: &str) -> syn::LitInt {

fn generate_packet_size_getter(
    scope: &lint::Scope<'_>,
    fields: &[&parser_ast::Field],
    fields: &[&analyzer_ast::Field],
    is_packet: bool,
) -> (usize, proc_macro2::TokenStream) {
    let mut constant_width = 0;
    let mut dynamic_widths = Vec::new();

    for field in fields {
        if let Some(width) = field.width(scope, false) {
        if let Some(width) = scope.get_field_width(field, false) {
            constant_width += width;
            continue;
        }

        let decl = field.declaration(scope);
        let decl = scope.get_field_declaration(field);
        dynamic_widths.push(match &field.desc {
            ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body { .. } => {
                if is_packet {
@@ -80,7 +80,7 @@ fn generate_packet_size_getter(
            ast::FieldDesc::Array { id, width, .. } => {
                let id = format_ident!("{id}");
                match &decl {
                    Some(parser_ast::Decl {
                    Some(analyzer_ast::Decl {
                        desc: ast::DeclDesc::Struct { .. } | ast::DeclDesc::CustomField { .. },
                        ..
                    }) => {
@@ -88,9 +88,10 @@ fn generate_packet_size_getter(
                            self.#id.iter().map(|elem| elem.get_size()).sum::<usize>()
                        }
                    }
                    Some(parser_ast::Decl { desc: ast::DeclDesc::Enum { .. }, .. }) => {
                        let width =
                            syn::Index::from(decl.unwrap().width(scope, false).unwrap() / 8);
                    Some(analyzer_ast::Decl { desc: ast::DeclDesc::Enum { .. }, .. }) => {
                        let width = syn::Index::from(
                            scope.get_decl_width(decl.unwrap(), false).unwrap() / 8,
                        );
                        let mul_width = (width.index > 1).then(|| quote!(* #width));
                        quote! {
                            self.#id.len() #mul_width
@@ -125,7 +126,7 @@ fn generate_packet_size_getter(
    )
}

fn top_level_packet<'a>(scope: &lint::Scope<'a>, packet_name: &'a str) -> &'a parser_ast::Decl {
fn top_level_packet<'a>(scope: &lint::Scope<'a>, packet_name: &'a str) -> &'a analyzer_ast::Decl {
    let mut decl = scope.typedef[packet_name];
    while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
    | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc
@@ -135,18 +136,14 @@ fn top_level_packet<'a>(scope: &lint::Scope<'a>, packet_name: &'a str) -> &'a pa
    decl
}

fn get_packet_children<'a>(scope: &'a lint::Scope<'_>, id: &str) -> &'a [&'a parser_ast::Decl] {
    scope.children.get(id).map(Vec::as_slice).unwrap_or_default()
}

/// Find all constrained fields in children of `id`.
fn find_constrained_fields<'a>(
    scope: &'a lint::Scope<'a>,
    id: &'a str,
) -> Vec<&'a parser_ast::Field> {
) -> Vec<&'a analyzer_ast::Field> {
    let mut fields = Vec::new();
    let mut field_names = BTreeSet::new();
    let mut children = Vec::from(get_packet_children(scope, id));
    let mut children = scope.iter_children(id).collect::<Vec<_>>();

    while let Some(child) = children.pop() {
        if let ast::DeclDesc::Packet { id, constraints, .. }
@@ -158,7 +155,7 @@ fn find_constrained_fields<'a>(
                    fields.push(packet_scope.all_fields[&constraint.id]);
                }
            }
            children.extend(get_packet_children(scope, id));
            children.extend(scope.iter_children(id).collect::<Vec<_>>());
        }
    }

@@ -173,7 +170,7 @@ fn find_constrained_fields<'a>(
fn find_constrained_parent_fields<'a>(
    scope: &'a lint::Scope<'a>,
    id: &'a str,
) -> impl Iterator<Item = &'a parser_ast::Field> {
) -> impl Iterator<Item = &'a analyzer_ast::Field> {
    let packet_scope = &scope.scopes[&scope.typedef[id]];
    find_constrained_fields(scope, id).into_iter().filter(|field| {
        let id = field.id().unwrap();
@@ -223,16 +220,16 @@ fn generate_data_struct(
    };

    let visibility = if is_packet { quote!() } else { quote!(pub) };
    let has_payload = packet_scope.payload.is_some();
    let children = get_packet_children(scope, id);
    let has_children_or_payload = !children.is_empty() || has_payload;
    let has_payload = packet_scope.get_payload_field().is_some();
    let has_children = scope.iter_children(id).next().is_some();

    let struct_name = if is_packet { format_ident!("{id}Data") } else { format_ident!("{id}") };
    let fields_with_ids =
        packet_scope.fields.iter().filter(|f| f.id().is_some()).collect::<Vec<_>>();
    let mut field_names =
        fields_with_ids.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
    let mut field_types = fields_with_ids.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
    if has_children_or_payload {
    if has_children || has_payload {
        if is_packet {
            field_names.push(format_ident!("child"));
            let field_type = format_ident!("{id}DataChild");
@@ -295,7 +292,7 @@ fn generate_data_struct(
/// Find all parents from `id`.
///
/// This includes the `Decl` for `id` itself.
fn find_parents<'a>(scope: &lint::Scope<'a>, id: &str) -> Vec<&'a parser_ast::Decl> {
fn find_parents<'a>(scope: &lint::Scope<'a>, id: &str) -> Vec<&'a analyzer_ast::Decl> {
    let mut decl = scope.typedef[id];
    let mut parents = vec![decl];
    while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
@@ -422,7 +419,7 @@ fn generate_packet_decl(
            })
            .collect::<Vec<_>>();

        if parent_packet_scope.payload.is_some() {
        if parent_packet_scope.get_payload_field().is_some() {
            field.push(format_ident!("child"));
            if idx == 0 {
                // Top-most parent, the child is simply created from
@@ -451,8 +448,8 @@ fn generate_packet_decl(
        }
    });

    let children = get_packet_children(scope, id);
    let has_payload = packet_scope.payload.is_some();
    let children = scope.iter_children(id).collect::<Vec<_>>();
    let has_payload = packet_scope.get_payload_field().is_some();
    let has_children_or_payload = !children.is_empty() || has_payload;
    let child =
        children.iter().map(|child| format_ident!("{}", child.id().unwrap())).collect::<Vec<_>>();
@@ -723,8 +720,8 @@ fn generate_enum_decl(id: &str, tags: &[ast::Tag]) -> proc_macro2::TokenStream {

fn generate_decl(
    scope: &lint::Scope<'_>,
    file: &parser_ast::File,
    decl: &parser_ast::Decl,
    file: &analyzer_ast::File,
    decl: &analyzer_ast::Decl,
) -> String {
    match &decl.desc {
        ast::DeclDesc::Packet { id, .. } => {
@@ -747,13 +744,13 @@ fn generate_decl(
///
/// The code is not formatted, pipe it through `rustfmt` to get
/// readable source code.
pub fn generate(sources: &ast::SourceDatabase, file: &parser_ast::File) -> String {
pub fn generate(sources: &ast::SourceDatabase, file: &analyzer_ast::File) -> String {
    let mut code = String::new();

    let source = sources.get(file.file).expect("could not read source");
    code.push_str(&preamble::generate(Path::new(source.name())));

    let scope = lint::Scope::new(file).unwrap();
    let scope = lint::Scope::new(file);
    for decl in &file.declarations {
        code.push_str(&generate_decl(&scope, file, decl));
        code.push_str("\n\n");
@@ -765,6 +762,7 @@ pub fn generate(sources: &ast::SourceDatabase, file: &parser_ast::File) -> Strin
#[cfg(test)]
mod tests {
    use super::*;
    use crate::analyzer;
    use crate::ast;
    use crate::parser::parse_inline;
    use crate::test_utils::{assert_snapshot_eq, rustfmt};
@@ -775,9 +773,11 @@ mod tests {
    /// # Panics
    ///
    /// Panics on parse errors.
    pub fn parse_str(text: &str) -> parser_ast::File {
    pub fn parse_str(text: &str) -> analyzer_ast::File {
        let mut db = ast::SourceDatabase::new();
        parse_inline(&mut db, String::from("stdin"), String::from(text)).expect("parse error")
        let file =
            parse_inline(&mut db, String::from("stdin"), String::from(text)).expect("parse error");
        analyzer::analyze(&file).expect("analyzer error")
    }

    #[track_caller]
@@ -796,19 +796,22 @@ mod tests {
                a: 8,
                b: 8,
                c: 8,
                _payload_,
              }
              packet Child: Parent(a = 10) {
                x: 8,
                _payload_,
              }
              packet GrandChild: Child(b = 20) {
                y: 8,
                _payload_,
              }
              packet GrandGrandChild: GrandChild(c = 30) {
                z: 8,
              }
            ";
        let file = parse_str(code);
        let scope = lint::Scope::new(&file).unwrap();
        let scope = lint::Scope::new(&file);
        let find_fields =
            |id| find_constrained_parent_fields(&scope, id).map(|field| field.id().unwrap());
        assert_iter_eq(find_fields("Parent"), vec![]);
@@ -841,6 +844,7 @@ mod tests {
                    let code = format!("{endianness}_packets\n{}", $code);
                    let mut db = ast::SourceDatabase::new();
                    let file = parse_inline(&mut db, String::from("test"), code).unwrap();
                    let file = analyzer::analyze(&file).unwrap();
                    let actual_code = generate(&db, &file);
                    assert_snapshot_eq(
                        &format!("tests/generated/{name}_{endianness}.rs"),
+26 −24

File changed.

Preview size limit exceeded, changes collapsed.

+16 −12
Original line number Diff line number Diff line
use crate::analyzer::ast as analyzer_ast;
use crate::backends::rust::{mask_bits, types};
use crate::parser::ast as parser_ast;
use crate::{ast, lint};
use heck::ToUpperCamelCase;
use quote::{format_ident, quote};
@@ -39,11 +39,11 @@ impl<'a> FieldSerializer<'a> {
        }
    }

    pub fn add(&mut self, field: &parser_ast::Field) {
    pub fn add(&mut self, field: &analyzer_ast::Field) {
        match &field.desc {
            _ if field.is_bitfield(self.scope) => self.add_bit_field(field),
            _ if self.scope.is_bitfield(field) => self.add_bit_field(field),
            ast::FieldDesc::Array { id, width, .. } => {
                self.add_array_field(id, *width, field.declaration(self.scope))
                self.add_array_field(id, *width, self.scope.get_field_declaration(field))
            }
            ast::FieldDesc::Typedef { id, type_id } => {
                self.add_typedef_field(id, type_id);
@@ -55,8 +55,8 @@ impl<'a> FieldSerializer<'a> {
        }
    }

    fn add_bit_field(&mut self, field: &parser_ast::Field) {
        let width = field.width(self.scope, false).unwrap();
    fn add_bit_field(&mut self, field: &analyzer_ast::Field) {
        let width = self.scope.get_field_width(field, false).unwrap();
        let shift = self.shift;

        match &field.desc {
@@ -115,7 +115,7 @@ impl<'a> FieldSerializer<'a> {
                let field_type = types::Integer::new(*width);
                // TODO: size modifier

                let value_field_decl = value_field.declaration(self.scope);
                let value_field_decl = self.scope.get_field_declaration(value_field);

                let field_size_name = format_ident!("{field_id}_size");
                let array_size = match (&value_field.desc, value_field_decl.map(|decl| &decl.desc))
@@ -240,7 +240,12 @@ impl<'a> FieldSerializer<'a> {
        self.shift = 0;
    }

    fn add_array_field(&mut self, id: &str, width: Option<usize>, decl: Option<&parser_ast::Decl>) {
    fn add_array_field(
        &mut self,
        id: &str,
        width: Option<usize>,
        decl: Option<&analyzer_ast::Decl>,
    ) {
        // TODO: padding

        let serialize = match width {
@@ -297,10 +302,9 @@ impl<'a> FieldSerializer<'a> {
        let decl = self.scope.typedef[self.packet_name];
        let is_packet = matches!(&decl.desc, ast::DeclDesc::Packet { .. });

        let children =
            self.scope.children.get(self.packet_name).map(Vec::as_slice).unwrap_or_default();
        let child_ids = children
            .iter()
        let child_ids = self
            .scope
            .iter_children(self.packet_name)
            .map(|child| format_ident!("{}", child.id().unwrap()))
            .collect::<Vec<_>>();

Loading