Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit dcf00b30 authored by Martin Geisler's avatar Martin Geisler
Browse files

pdl: add ‘Chunk::generate_write_adjustments’

Test: atest pdl_tests pdl_inline_tests
Change-Id: I6cd6ff18b75ab3b40c21ac87cae14a04e816beef
parent cca1f564
Loading
Loading
Loading
Loading
+5 −115
Original line number Diff line number Diff line
@@ -43,70 +43,6 @@ pub fn get_field_range(offset: usize, width: usize) -> std::ops::Range<usize> {
    start..end
}

fn generate_chunk_write_field_adjustments(chunk: &[ast::Field]) -> proc_macro2::TokenStream {
    // Work directly with the field name if we are writing a single
    // field. This generates simpler code.
    if let [ast::Field::Scalar { id, .. }] = chunk {
        // If there is a single field in the chunk, then we don't have to
        // shift, mask, or cast.
        let field_name = format_ident!("{id}");
        return quote! {
            let #field_name = self.#field_name;
        };
    }

    let chunk_fields = chunk.iter().map(Field::from).collect::<Vec<_>>();
    let chunk_width = Chunk::new(&chunk_fields).get_width();
    let chunk_type = types::Integer::new(chunk_width);

    let mut field_parsers = Vec::new();
    let mut field_offset = 0;
    for field in chunk {
        match field {
            ast::Field::Scalar { id, width, .. } => {
                let field_name = format_ident!("{id}");
                let field_type = types::Integer::new(*width);

                let mut field = quote! {
                    self.#field_name
                };

                if field_type.width < chunk_type.width {
                    field = quote! {
                        (#field as #chunk_type)
                    };
                }

                if *width < field_type.width {
                    let bit_mask = mask_bits(*width);
                    field = quote! {
                        (#field & #bit_mask)
                    };
                }

                if field_offset > 0 {
                    let field_offset = syn::Index::from(field_offset);
                    let op = syn::parse_str::<syn::BinOp>("<<").unwrap();
                    field = quote! {
                        (#field #op #field_offset)
                    };
                }

                field_offset += width;
                field_parsers.push(quote! {
                    let chunk = chunk | #field;
                });
            }
            _ => todo!("unsupported field: {:?}", field),
        }
    }

    quote! {
        let chunk = 0;
        #(#field_parsers)*
    }
}

/// Generate a bit-mask which masks out `n` least significant bits.
pub fn mask_bits(n: usize) -> syn::LitInt {
    syn::parse_str::<syn::LitInt>(&format!("{:#x}", (1u64 << n) - 1)).unwrap()
@@ -214,16 +150,10 @@ fn generate_packet_decl(
    let mut offset = 0;
    for chunk in chunks {
        let chunk_fields = chunk.iter().map(Field::from).collect::<Vec<_>>();
        field_parsers.push(Chunk::new(&chunk_fields).generate_read(
            id,
            file.endianness.value,
            offset,
        ));

        field_writers.push(generate_chunk_write_field_adjustments(chunk));
        field_writers.push(Chunk::new(&chunk_fields).generate_write(file.endianness.value, offset));

        offset += Chunk::new(&chunk_fields).get_width();
        let chunk = Chunk::new(&chunk_fields);
        field_parsers.push(chunk.generate_read(id, file.endianness.value, offset));
        field_writers.push(chunk.generate_write(file.endianness.value, offset));
        offset += chunk.get_width();
    }

    let field_names = fields.iter().map(|field| Field::from(field).get_ident()).collect::<Vec<_>>();
@@ -401,7 +331,7 @@ mod tests {
    use super::*;
    use crate::ast;
    use crate::parser::parse_inline;
    use crate::test_utils::{assert_expr_eq, assert_snapshot_eq, rustfmt};
    use crate::test_utils::{assert_snapshot_eq, rustfmt};

    /// Parse a string fragment as a PDL file.
    ///
@@ -547,44 +477,4 @@ mod tests {
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 4), (0..2));
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 20), (0..4));
    }

    #[test]
    fn test_generate_chunk_write_field_adjustments_8bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 5 },
        ];
        assert_expr_eq(
            generate_chunk_write_field_adjustments(&fields),
            quote! {
                let chunk = 0;
                let chunk = chunk | (self.a & 0x7) ;
                let chunk = chunk | ((self.b & 0x1f) << 3);
            },
        );
    }

    #[test]
    fn test_generate_chunk_write_field_adjustments_48bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 8 },
            ast::Field::Scalar { loc, id: String::from("c"), width: 10 },
            ast::Field::Scalar { loc, id: String::from("d"), width: 18 },
            ast::Field::Scalar { loc, id: String::from("e"), width: 9 },
        ];
        assert_expr_eq(
            generate_chunk_write_field_adjustments(&fields),
            quote! {
                let chunk = 0;
                let chunk = chunk | ((self.a as u64) & 0x7);
                let chunk = chunk | ((self.b as u64) << 3);
                let chunk = chunk | (((self.c as u64) & 0x3ff) << 11);
                let chunk = chunk | (((self.d as u64) & 0x3ffff) << 21);
                let chunk = chunk | (((self.e as u64) & 0x1ff) << 39);
            },
        );
    }
}
+74 −0
Original line number Diff line number Diff line
@@ -161,10 +161,38 @@ impl Chunk<'_> {
        let end = syn::Index::from(range.end);
        // TODO(mgeisler): let slice = (chunk_type_width > chunk_width).then( ... )
        let chunk_byte_width = syn::Index::from(chunk_width / 8);
        let write_adjustments = self.generate_write_adjustments();
        quote! {
            #write_adjustments
            buffer[#start..#end].copy_from_slice(&#chunk_name.#writer()[0..#chunk_byte_width]);
        }
    }

    fn generate_write_adjustments(&self) -> proc_macro2::TokenStream {
        if let [field] = self.fields {
            // If there is a single field in the chunk, then we don't have to
            // shift, mask, or cast.
            let field_name = field.get_ident();
            return quote! {
                let #field_name = self.#field_name;
            };
        }

        let chunk_width = self.get_width();
        let chunk_type = Integer::new(chunk_width);

        let mut field_parsers = Vec::new();
        let mut field_offset = 0;
        for field in self.fields {
            field_parsers.push(field.generate_write_adjustment(field_offset, chunk_type));
            field_offset += field.get_width();
        }

        quote! {
            let chunk = 0;
            #(#field_parsers)*
        }
    }
}

#[cfg(test)]
@@ -352,6 +380,7 @@ mod tests {
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            quote! {
                let a = self.a;
                buffer[10..11].copy_from_slice(&a.to_be_bytes()[0..1]);
            },
        );
@@ -364,6 +393,7 @@ mod tests {
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            quote! {
                let a = self.a;
                buffer[10..12].copy_from_slice(&a.to_be_bytes()[0..2]);
            },
        );
@@ -376,6 +406,7 @@ mod tests {
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            quote! {
                let a = self.a;
                buffer[10..13].copy_from_slice(&a.to_be_bytes()[0..3]);
            },
        );
@@ -391,8 +422,51 @@ mod tests {
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            quote! {
                let chunk = 0;
                let chunk = chunk | (self.a as u64);
                let chunk = chunk | (((self.b as u64) & 0xffffff) << 16);
                buffer[10..15].copy_from_slice(&chunk.to_be_bytes()[0..5]);
            },
        );
    }

    #[test]
    fn test_generate_write_adjustments_8bit() {
        let fields = vec![
            Field::Scalar(ScalarField { id: String::from("a"), width: 3 }),
            Field::Scalar(ScalarField { id: String::from("b"), width: 5 }),
        ];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_write_adjustments(),
            quote! {
                let chunk = 0;
                let chunk = chunk | (self.a & 0x7) ;
                let chunk = chunk | ((self.b & 0x1f) << 3);
            },
        );
    }

    #[test]
    fn test_generate_write_adjustments_48bit() {
        let fields = vec![
            Field::Scalar(ScalarField { id: String::from("a"), width: 3 }),
            Field::Scalar(ScalarField { id: String::from("b"), width: 8 }),
            Field::Scalar(ScalarField { id: String::from("c"), width: 10 }),
            Field::Scalar(ScalarField { id: String::from("d"), width: 18 }),
            Field::Scalar(ScalarField { id: String::from("e"), width: 9 }),
        ];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_write_adjustments(),
            quote! {
                let chunk = 0;
                let chunk = chunk | ((self.a as u64) & 0x7);
                let chunk = chunk | ((self.b as u64) << 3);
                let chunk = chunk | (((self.c as u64) & 0x3ff) << 11);
                let chunk = chunk | (((self.d as u64) & 0x3ffff) << 21);
                let chunk = chunk | (((self.e as u64) & 0x1ff) << 39);
            },
        );
    }
}
+48 −0
Original line number Diff line number Diff line
@@ -86,6 +86,44 @@ impl ScalarField {
            let #field_name = #field;
        }
    }

    fn generate_write_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let field_type = self.get_type();

        let mut field = quote! {
            self.#field_name
        };

        if field_type.width < chunk_type.width {
            field = quote! {
                (#field as #chunk_type)
            };
        }

        if self.width < field_type.width {
            let bit_mask = mask_bits(self.width);
            field = quote! {
                (#field & #bit_mask)
            };
        }

        if offset > 0 {
            let field_offset = syn::Index::from(offset);
            let op = syn::parse_str::<syn::BinOp>("<<").unwrap();
            field = quote! {
                (#field #op #field_offset)
            };
        }

        quote! {
            let chunk = chunk | #field;
        }
    }
}

/// Projection of [`ast::Field`] with the bits needed for the Rust
@@ -144,4 +182,14 @@ impl Field {
            Field::Scalar(field) => field.generate_read_adjustment(offset, chunk_type),
        }
    }

    pub fn generate_write_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        match self {
            Field::Scalar(field) => field.generate_write_adjustment(offset, chunk_type),
        }
    }
}