Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 5f2b05e2 authored by Martin Geisler's avatar Martin Geisler Committed by Automerger Merge Worker
Browse files

Merge changes I6cd6ff18,I686e0558,I2532bf2b,I11416277,I97bdd2bf am: a100e31e...

Merge changes I6cd6ff18,I686e0558,I2532bf2b,I11416277,I97bdd2bf am: a100e31e am: 1947c5aa am: a8b41daf

Original change: https://android-review.googlesource.com/c/platform/packages/modules/Bluetooth/+/2241667



Change-Id: Ibd073b2f8f929108329ecdd65766c690b01d1af1
Signed-off-by: default avatarAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
parents 4ac74884 a8b41daf
Loading
Loading
Loading
Loading
+3 −6
Original line number Diff line number Diff line
@@ -9,7 +9,6 @@ package {

rust_defaults {
    name: "pdl_defaults",
    srcs: ["src/main.rs"],
    // LINT.IfChange
    rustlibs: [
        "libcodespan_reporting",
@@ -31,11 +30,13 @@ rust_defaults {
rust_binary_host {
    name: "pdl",
    defaults: ["pdl_defaults"],
    srcs: ["src/main.rs"],
}

rust_test_host {
    name: "pdl_inline_tests",
    defaults: ["pdl_defaults"],
    srcs: ["src/main.rs"],
    test_suites: ["general-tests"],
    enabled: false, // rustfmt is only available on x86.
    arch: {
@@ -57,6 +58,7 @@ rust_test_host {

rust_test_host {
    name: "pdl_tests",
    defaults: ["pdl_defaults"],
    srcs: ["tests/pdl_tests.rs"],
    test_suites: ["general-tests"],
    enabled: false, // rustfmt is only available on x86.
@@ -65,11 +67,6 @@ rust_test_host {
            enabled: true,
        },
    },
    // LINT.IfChange
    rustlibs: [
        "libtempfile",
    ],
    // LINT.ThenChange(Cargo.toml)
    data: [
        ":bluetooth_packetgen",
        ":pdl",
+6 −490
Original line number Diff line number Diff line
@@ -43,204 +43,11 @@ pub fn get_field_range(offset: usize, width: usize) -> std::ops::Range<usize> {
    start..end
}

/// Read data for a byte-aligned chunk.
fn generate_chunk_read(
    packet_name: &str,
    endianness_value: ast::EndiannessValue,
    offset: usize,
    chunk: &Chunk,
) -> proc_macro2::TokenStream {
    assert!(offset % 8 == 0, "Chunks must be byte-aligned, got offset: {offset}");
    let getter = match endianness_value {
        ast::EndiannessValue::BigEndian => format_ident!("from_be_bytes"),
        ast::EndiannessValue::LittleEndian => format_ident!("from_le_bytes"),
    };

    // Work directly with the field name if we are reading a single
    // field. This generates simpler code.
    let chunk_name = chunk.get_name();
    let chunk_width = chunk.get_width();
    let chunk_type = types::Integer::new(chunk_width);
    assert!(chunk_width % 8 == 0, "Chunks must have a byte size, got width: {chunk_width}");

    let range = get_field_range(offset, chunk_width);
    let indices = range.map(syn::Index::from).collect::<Vec<_>>();

    // TODO(mgeisler): emit just a single length check per chunk. We
    // could even emit a single length check per packet.
    let length_checks = chunk.generate_length_checks(packet_name, offset);

    // When the chunk_type.width is larger than chunk_width (e.g.
    // chunk_width is 24 but chunk_type.width is 32), then we need
    // zero padding.
    let zero_padding_len = (chunk_type.width - chunk_width) / 8;
    // We need the padding on the MSB side of the payload, so for
    // big-endian, we need to padding on the left, for little-endian
    // we need it on the right.
    let (zero_padding_before, zero_padding_after) = match endianness_value {
        ast::EndiannessValue::BigEndian => (vec![syn::Index::from(0); zero_padding_len], vec![]),
        ast::EndiannessValue::LittleEndian => (vec![], vec![syn::Index::from(0); zero_padding_len]),
    };

    quote! {
        #(#length_checks)*
        let #chunk_name = #chunk_type::#getter([
            #(#zero_padding_before,)* #(bytes[#indices]),* #(, #zero_padding_after)*
        ]);
    }
}

fn generate_chunk_read_field_adjustments(fields: &[ast::Field]) -> proc_macro2::TokenStream {
    // If there is a single field in the chunk, then we don't have to
    // shift, mask, or cast.
    if fields.len() == 1 {
        return quote! {};
    }

    let chunk_fields = fields.iter().map(Field::from).collect::<Vec<_>>();
    let chunk_width = Chunk::new(&chunk_fields).get_width();
    let chunk_type = types::Integer::new(chunk_width);

    let mut field_parsers = Vec::new();
    let mut field_offset = 0;
    for field in fields {
        let field_name = Field::from(field).get_ident();
        match field {
            ast::Field::Scalar { width, .. } => {
                let field_type = types::Integer::new(*width);

                let mut field = quote! {
                    chunk
                };
                if field_offset > 0 {
                    let field_offset = syn::Index::from(field_offset);
                    let op = syn::parse_str::<syn::BinOp>(">>").unwrap();
                    field = quote! {
                        (#field #op #field_offset)
                    };
                }

                if *width < field_type.width {
                    let bit_mask = mask_bits(*width);
                    field = quote! {
                        (#field & #bit_mask)
                    };
                }

                if field_type.width < chunk_type.width {
                    field = quote! {
                        #field as #field_type;
                    };
                }

                field_offset += width;
                field_parsers.push(quote! {
                    let #field_name = #field;
                });
            }
            _ => todo!("unsupported field: {:?}", field),
        }
    }

    quote! {
        #(#field_parsers)*
    }
}

fn generate_chunk_write_field_adjustments(chunk: &[ast::Field]) -> proc_macro2::TokenStream {
    // Work directly with the field name if we are writing a single
    // field. This generates simpler code.
    if let [ast::Field::Scalar { id, .. }] = chunk {
        // If there is a single field in the chunk, then we don't have to
        // shift, mask, or cast.
        let field_name = format_ident!("{id}");
        return quote! {
            let #field_name = self.#field_name;
        };
    }

    let chunk_fields = chunk.iter().map(Field::from).collect::<Vec<_>>();
    let chunk_width = Chunk::new(&chunk_fields).get_width();
    let chunk_type = types::Integer::new(chunk_width);

    let mut field_parsers = Vec::new();
    let mut field_offset = 0;
    for field in chunk {
        match field {
            ast::Field::Scalar { id, width, .. } => {
                let field_name = format_ident!("{id}");
                let field_type = types::Integer::new(*width);

                let mut field = quote! {
                    self.#field_name
                };

                if field_type.width < chunk_type.width {
                    field = quote! {
                        (#field as #chunk_type)
                    };
                }

                if *width < field_type.width {
                    let bit_mask = mask_bits(*width);
                    field = quote! {
                        (#field & #bit_mask)
                    };
                }

                if field_offset > 0 {
                    let field_offset = syn::Index::from(field_offset);
                    let op = syn::parse_str::<syn::BinOp>("<<").unwrap();
                    field = quote! {
                        (#field #op #field_offset)
                    };
                }

                field_offset += width;
                field_parsers.push(quote! {
                    let chunk = chunk | #field;
                });
            }
            _ => todo!("unsupported field: {:?}", field),
        }
    }

    quote! {
        let chunk = 0;
        #(#field_parsers)*
    }
}

/// Generate a bit-mask which masks out `n` least significant bits.
fn mask_bits(n: usize) -> syn::LitInt {
pub fn mask_bits(n: usize) -> syn::LitInt {
    syn::parse_str::<syn::LitInt>(&format!("{:#x}", (1u64 << n) - 1)).unwrap()
}

fn generate_chunk_write(
    endianness_value: ast::EndiannessValue,
    offset: usize,
    chunk: &[ast::Field],
) -> proc_macro2::TokenStream {
    let writer = match endianness_value {
        ast::EndiannessValue::BigEndian => format_ident!("to_be_bytes"),
        ast::EndiannessValue::LittleEndian => format_ident!("to_le_bytes"),
    };

    let chunk_fields = chunk.iter().map(Field::from).collect::<Vec<_>>();
    let chunk_width = Chunk::new(&chunk_fields).get_width();
    let chunk_name = Chunk::new(&chunk_fields).get_name();
    assert!(chunk_width % 8 == 0, "Chunks must have a byte size, got width: {chunk_width}");

    let range = get_field_range(offset, chunk_width);
    let start = syn::Index::from(range.start);
    let end = syn::Index::from(range.end);
    // TODO(mgeisler): let slice = (chunk_type_width > chunk_width).then( ... )
    let chunk_byte_width = syn::Index::from(chunk_width / 8);
    quote! {
        buffer[#start..#end].copy_from_slice(&#chunk_name.#writer()[0..#chunk_byte_width]);
    }
}

/// Generate code for an `ast::Decl::Packet` enum value.
fn generate_packet_decl(
    file: &ast::File,
@@ -343,18 +150,10 @@ fn generate_packet_decl(
    let mut offset = 0;
    for chunk in chunks {
        let chunk_fields = chunk.iter().map(Field::from).collect::<Vec<_>>();
        field_parsers.push(generate_chunk_read(
            id,
            file.endianness.value,
            offset,
            &Chunk::new(&chunk_fields),
        ));
        field_parsers.push(generate_chunk_read_field_adjustments(chunk));

        field_writers.push(generate_chunk_write_field_adjustments(chunk));
        field_writers.push(generate_chunk_write(file.endianness.value, offset, chunk));

        offset += Chunk::new(&chunk_fields).get_width();
        let chunk = Chunk::new(&chunk_fields);
        field_parsers.push(chunk.generate_read(id, file.endianness.value, offset));
        field_writers.push(chunk.generate_write(file.endianness.value, offset));
        offset += chunk.get_width();
    }

    let field_names = fields.iter().map(|field| Field::from(field).get_ident()).collect::<Vec<_>>();
@@ -531,9 +330,8 @@ pub fn generate(sources: &ast::SourceDatabase, file: &ast::File) -> String {
mod tests {
    use super::*;
    use crate::ast;
    use crate::backends::rust::field::ScalarField;
    use crate::parser::parse_inline;
    use crate::test_utils::{assert_eq_with_diff, assert_snapshot_eq, rustfmt};
    use crate::test_utils::{assert_snapshot_eq, rustfmt};

    /// Parse a string fragment as a PDL file.
    ///
@@ -679,286 +477,4 @@ mod tests {
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 4), (0..2));
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 20), (0..4));
    }

    // Assert that an expression equals the given expression.
    //
    // Both expressions are wrapped in a `main` function (so we can
    // format it with `rustfmt`) and a diff is be shown if they
    // differ.
    #[track_caller]
    fn assert_expr_eq(left: proc_macro2::TokenStream, right: proc_macro2::TokenStream) {
        let left = quote! {
            fn main() { #left }
        };
        let right = quote! {
            fn main() { #right }
        };
        assert_eq_with_diff(
            "left",
            &rustfmt(&left.to_string()),
            "right",
            &rustfmt(&right.to_string()),
        );
    }

    #[test]
    fn test_generate_chunk_read_8bit() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 8 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            generate_chunk_read("Foo", ast::EndiannessValue::BigEndian, 80, &chunk),
            quote! {
                if bytes.len() < 11 {
                    return Err(Error::InvalidLengthError {
                        obj: "Foo".to_string(),
                        field: "a".to_string(),
                        wanted: 11,
                        got: bytes.len(),
                    });
                }
                let a = u8::from_be_bytes([bytes[10]]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_16bit_le() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 16 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            generate_chunk_read("Foo", ast::EndiannessValue::LittleEndian, 80, &chunk),
            quote! {
                if bytes.len() < 12 {
                    return Err(Error::InvalidLengthError {
                        obj: "Foo".to_string(),
                        field: "a".to_string(),
                        wanted: 12,
                        got: bytes.len(),
                    });
                }
                let a = u16::from_le_bytes([bytes[10], bytes[11]]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_16bit_be() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 16 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            generate_chunk_read("Foo", ast::EndiannessValue::BigEndian, 80, &chunk),
            quote! {
                if bytes.len() < 12 {
                    return Err(Error::InvalidLengthError {
                        obj: "Foo".to_string(),
                        field: "a".to_string(),
                        wanted: 12,
                        got: bytes.len(),
                    });
                }
                let a = u16::from_be_bytes([bytes[10], bytes[11]]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_24bit_le() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 24 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            generate_chunk_read("Foo", ast::EndiannessValue::LittleEndian, 80, &chunk),
            quote! {
                if bytes.len() < 13 {
                    return Err(Error::InvalidLengthError {
                        obj: "Foo".to_string(),
                        field: "a".to_string(),
                        wanted: 13,
                        got: bytes.len(),
                    });
                }
                let a = u32::from_le_bytes([bytes[10], bytes[11], bytes[12], 0]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_24bit_be() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 24 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            generate_chunk_read("Foo", ast::EndiannessValue::BigEndian, 80, &chunk),
            quote! {
                if bytes.len() < 13 {
                    return Err(Error::InvalidLengthError {
                        obj: "Foo".to_string(),
                        field: "a".to_string(),
                        wanted: 13,
                        got: bytes.len(),
                    });
                }
                let a = u32::from_be_bytes([0, bytes[10], bytes[11], bytes[12]]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_multiple_fields() {
        let fields = [
            Field::Scalar(ScalarField { id: String::from("a"), width: 16 }),
            Field::Scalar(ScalarField { id: String::from("b"), width: 24 }),
        ];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            generate_chunk_read("Foo", ast::EndiannessValue::BigEndian, 80, &chunk),
            quote! {
                if bytes.len() < 12 {
                    return Err(Error::InvalidLengthError {
                        obj: "Foo".to_string(),
                        field: "a".to_string(),
                        wanted: 12,
                        got: bytes.len(),
                    });
                }
                if bytes.len() < 15 {
                    return Err(Error::InvalidLengthError {
                        obj: "Foo".to_string(),
                        field: "b".to_string(),
                        wanted: 15,
                        got: bytes.len(),
                    });
                }
                let chunk =
                    u64::from_be_bytes([0, 0, 0, bytes[10], bytes[11], bytes[12], bytes[13], bytes[14]]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_field_adjustments_8bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 5 },
        ];
        assert_expr_eq(
            generate_chunk_read_field_adjustments(&fields),
            quote! {
                let a = (chunk & 0x7);
                let b = ((chunk >> 3) & 0x1f);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_field_adjustments_48bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 8 },
            ast::Field::Scalar { loc, id: String::from("c"), width: 10 },
            ast::Field::Scalar { loc, id: String::from("d"), width: 18 },
            ast::Field::Scalar { loc, id: String::from("e"), width: 9 },
        ];
        assert_expr_eq(
            generate_chunk_read_field_adjustments(&fields),
            quote! {
                let a = (chunk & 0x7) as u8;
                let b = (chunk >> 3) as u8;
                let c = ((chunk >> 11) & 0x3ff) as u16;
                let d = ((chunk >> 21) & 0x3ffff) as u32;
                let e = ((chunk >> 39) & 0x1ff) as u16;
            },
        );
    }

    #[test]
    fn test_generate_chunk_write_8bit() {
        let loc = ast::SourceRange::default();
        let fields = &[ast::Field::Scalar { loc, id: String::from("a"), width: 8 }];
        assert_expr_eq(
            generate_chunk_write(ast::EndiannessValue::BigEndian, 80, fields),
            quote! {
                buffer[10..11].copy_from_slice(&a.to_be_bytes()[0..1]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_write_16bit() {
        let loc = ast::SourceRange::default();
        let fields = &[ast::Field::Scalar { loc, id: String::from("a"), width: 16 }];
        assert_expr_eq(
            generate_chunk_write(ast::EndiannessValue::BigEndian, 80, fields),
            quote! {
                buffer[10..12].copy_from_slice(&a.to_be_bytes()[0..2]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_write_24bit() {
        let loc = ast::SourceRange::default();
        let fields = &[ast::Field::Scalar { loc, id: String::from("a"), width: 24 }];
        assert_expr_eq(
            generate_chunk_write(ast::EndiannessValue::BigEndian, 80, fields),
            quote! {
                buffer[10..13].copy_from_slice(&a.to_be_bytes()[0..3]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_write_multiple_fields() {
        let loc = ast::SourceRange::default();
        let fields = &[
            ast::Field::Scalar { loc, id: String::from("a"), width: 16 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 24 },
        ];
        assert_expr_eq(
            generate_chunk_write(ast::EndiannessValue::BigEndian, 80, fields),
            quote! {
                buffer[10..15].copy_from_slice(&chunk.to_be_bytes()[0..5]);
            },
        );
    }

    #[test]
    fn test_generate_chunk_write_field_adjustments_8bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 5 },
        ];
        assert_expr_eq(
            generate_chunk_write_field_adjustments(&fields),
            quote! {
                let chunk = 0;
                let chunk = chunk | (self.a & 0x7) ;
                let chunk = chunk | ((self.b & 0x1f) << 3);
            },
        );
    }

    #[test]
    fn test_generate_chunk_write_field_adjustments_48bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 8 },
            ast::Field::Scalar { loc, id: String::from("c"), width: 10 },
            ast::Field::Scalar { loc, id: String::from("d"), width: 18 },
            ast::Field::Scalar { loc, id: String::from("e"), width: 9 },
        ];
        assert_expr_eq(
            generate_chunk_write_field_adjustments(&fields),
            quote! {
                let chunk = 0;
                let chunk = chunk | ((self.a as u64) & 0x7);
                let chunk = chunk | ((self.b as u64) << 3);
                let chunk = chunk | (((self.c as u64) & 0x3ff) << 11);
                let chunk = chunk | (((self.d as u64) & 0x3ffff) << 21);
                let chunk = chunk | (((self.e as u64) & 0x1ff) << 39);
            },
        );
    }
}
+404 −0

File changed.

Preview size limit exceeded, changes collapsed.

+101 −2
Original line number Diff line number Diff line
use quote::{format_ident, quote};

use crate::ast;
use crate::backends::rust::mask_bits;
use crate::backends::rust::types;

/// Like [`ast::Field::Scalar`].
@@ -27,9 +28,13 @@ impl ScalarField {
        format_ident!("{}", self.id)
    }

    fn get_type(&self) -> types::Integer {
        types::Integer::new(self.width)
    }

    fn generate_decl(&self, visibility: syn::Visibility) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let field_type = types::Integer::new(self.width);
        let field_type = self.get_type();
        quote! {
            #visibility #field_name: #field_type
        }
@@ -38,13 +43,87 @@ impl ScalarField {
    fn generate_getter(&self, packet_name: &syn::Ident) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let getter_name = format_ident!("get_{}", self.id);
        let field_type = types::Integer::new(self.width);
        let field_type = self.get_type();
        quote! {
            pub fn #getter_name(&self) -> #field_type {
                self.#packet_name.as_ref().#field_name
            }
        }
    }

    fn generate_read_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let field_type = self.get_type();
        let mut field = quote! {
            chunk
        };
        if offset > 0 {
            let offset = syn::Index::from(offset);
            let op = syn::parse_str::<syn::BinOp>(">>").unwrap();
            field = quote! {
                (#field #op #offset)
            };
        }

        if self.width < field_type.width {
            let bit_mask = mask_bits(self.width);
            field = quote! {
                (#field & #bit_mask)
            };
        }

        if field_type.width < chunk_type.width {
            field = quote! {
                #field as #field_type;
            };
        }

        quote! {
            let #field_name = #field;
        }
    }

    fn generate_write_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let field_type = self.get_type();

        let mut field = quote! {
            self.#field_name
        };

        if field_type.width < chunk_type.width {
            field = quote! {
                (#field as #chunk_type)
            };
        }

        if self.width < field_type.width {
            let bit_mask = mask_bits(self.width);
            field = quote! {
                (#field & #bit_mask)
            };
        }

        if offset > 0 {
            let field_offset = syn::Index::from(offset);
            let op = syn::parse_str::<syn::BinOp>("<<").unwrap();
            field = quote! {
                (#field #op #field_offset)
            };
        }

        quote! {
            let chunk = chunk | #field;
        }
    }
}

/// Projection of [`ast::Field`] with the bits needed for the Rust
@@ -93,4 +172,24 @@ impl Field {
            Field::Scalar(field) => field.generate_getter(packet_name),
        }
    }

    pub fn generate_read_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        match self {
            Field::Scalar(field) => field.generate_read_adjustment(offset, chunk_type),
        }
    }

    pub fn generate_write_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        match self {
            Field::Scalar(field) => field.generate_write_adjustment(offset, chunk_type),
        }
    }
}
+1 −0
Original line number Diff line number Diff line
//! Utility functions for dealing with Rust integer types.

/// A Rust integer type such as `u8`.
#[derive(Copy, Clone)]
pub struct Integer {
    pub width: usize,
}
Loading