Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 4a659ad6 authored by Martin Geisler's avatar Martin Geisler
Browse files

pdl: add ‘Chunk::generate_read_adjustments’

Test: atest pdl_tests pdl_inline_tests
Change-Id: I2532bf2b90b8bd8107070cac5782b6d15c15ee54
parent 6dab87f1
Loading
Loading
Loading
Loading
+1 −97
Original line number Original line Diff line number Diff line
@@ -43,63 +43,6 @@ pub fn get_field_range(offset: usize, width: usize) -> std::ops::Range<usize> {
    start..end
    start..end
}
}


fn generate_chunk_read_field_adjustments(fields: &[ast::Field]) -> proc_macro2::TokenStream {
    // If there is a single field in the chunk, then we don't have to
    // shift, mask, or cast.
    if fields.len() == 1 {
        return quote! {};
    }

    let chunk_fields = fields.iter().map(Field::from).collect::<Vec<_>>();
    let chunk_width = Chunk::new(&chunk_fields).get_width();
    let chunk_type = types::Integer::new(chunk_width);

    let mut field_parsers = Vec::new();
    let mut field_offset = 0;
    for field in fields {
        let field_name = Field::from(field).get_ident();
        match field {
            ast::Field::Scalar { width, .. } => {
                let field_type = types::Integer::new(*width);

                let mut field = quote! {
                    chunk
                };
                if field_offset > 0 {
                    let field_offset = syn::Index::from(field_offset);
                    let op = syn::parse_str::<syn::BinOp>(">>").unwrap();
                    field = quote! {
                        (#field #op #field_offset)
                    };
                }

                if *width < field_type.width {
                    let bit_mask = mask_bits(*width);
                    field = quote! {
                        (#field & #bit_mask)
                    };
                }

                if field_type.width < chunk_type.width {
                    field = quote! {
                        #field as #field_type;
                    };
                }

                field_offset += width;
                field_parsers.push(quote! {
                    let #field_name = #field;
                });
            }
            _ => todo!("unsupported field: {:?}", field),
        }
    }

    quote! {
        #(#field_parsers)*
    }
}

fn generate_chunk_write_field_adjustments(chunk: &[ast::Field]) -> proc_macro2::TokenStream {
fn generate_chunk_write_field_adjustments(chunk: &[ast::Field]) -> proc_macro2::TokenStream {
    // Work directly with the field name if we are writing a single
    // Work directly with the field name if we are writing a single
    // field. This generates simpler code.
    // field. This generates simpler code.
@@ -165,7 +108,7 @@ fn generate_chunk_write_field_adjustments(chunk: &[ast::Field]) -> proc_macro2::
}
}


/// Generate a bit-mask which masks out `n` least significant bits.
/// Generate a bit-mask which masks out `n` least significant bits.
fn mask_bits(n: usize) -> syn::LitInt {
pub fn mask_bits(n: usize) -> syn::LitInt {
    syn::parse_str::<syn::LitInt>(&format!("{:#x}", (1u64 << n) - 1)).unwrap()
    syn::parse_str::<syn::LitInt>(&format!("{:#x}", (1u64 << n) - 1)).unwrap()
}
}


@@ -301,7 +244,6 @@ fn generate_packet_decl(
            file.endianness.value,
            file.endianness.value,
            offset,
            offset,
        ));
        ));
        field_parsers.push(generate_chunk_read_field_adjustments(chunk));


        field_writers.push(generate_chunk_write_field_adjustments(chunk));
        field_writers.push(generate_chunk_write_field_adjustments(chunk));
        field_writers.push(generate_chunk_write(file.endianness.value, offset, chunk));
        field_writers.push(generate_chunk_write(file.endianness.value, offset, chunk));
@@ -631,44 +573,6 @@ mod tests {
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 20), (0..4));
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 20), (0..4));
    }
    }


    #[test]
    fn test_generate_chunk_read_field_adjustments_8bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 5 },
        ];
        assert_expr_eq(
            generate_chunk_read_field_adjustments(&fields),
            quote! {
                let a = (chunk & 0x7);
                let b = ((chunk >> 3) & 0x1f);
            },
        );
    }

    #[test]
    fn test_generate_chunk_read_field_adjustments_48bit() {
        let loc = ast::SourceRange::default();
        let fields = vec![
            ast::Field::Scalar { loc, id: String::from("a"), width: 3 },
            ast::Field::Scalar { loc, id: String::from("b"), width: 8 },
            ast::Field::Scalar { loc, id: String::from("c"), width: 10 },
            ast::Field::Scalar { loc, id: String::from("d"), width: 18 },
            ast::Field::Scalar { loc, id: String::from("e"), width: 9 },
        ];
        assert_expr_eq(
            generate_chunk_read_field_adjustments(&fields),
            quote! {
                let a = (chunk & 0x7) as u8;
                let b = (chunk >> 3) as u8;
                let c = ((chunk >> 11) & 0x3ff) as u16;
                let d = ((chunk >> 21) & 0x3ffff) as u32;
                let e = ((chunk >> 39) & 0x1ff) as u16;
            },
        );
    }

    #[test]
    #[test]
    fn test_generate_chunk_write_8bit() {
    fn test_generate_chunk_write_8bit() {
        let loc = ast::SourceRange::default();
        let loc = ast::SourceRange::default();
+65 −0
Original line number Original line Diff line number Diff line
@@ -109,11 +109,36 @@ impl Chunk<'_> {
            }
            }
        };
        };


        let read_adjustments = self.generate_read_adjustments();

        quote! {
        quote! {
            #(#length_checks)*
            #(#length_checks)*
            let #chunk_name = #chunk_type::#getter([
            let #chunk_name = #chunk_type::#getter([
                #(#zero_padding_before,)* #(bytes[#indices]),* #(, #zero_padding_after)*
                #(#zero_padding_before,)* #(bytes[#indices]),* #(, #zero_padding_after)*
            ]);
            ]);
            #read_adjustments
        }
    }

    fn generate_read_adjustments(&self) -> proc_macro2::TokenStream {
        // If there is a single field in the chunk, then we don't have to
        // shift, mask, or cast.
        if self.fields.len() == 1 {
            return quote! {};
        }

        let chunk_width = self.get_width();
        let chunk_type = Integer::new(chunk_width);

        let mut field_parsers = Vec::new();
        let mut field_offset = 0;
        for field in self.fields {
            field_parsers.push(field.generate_read_adjustment(field_offset, chunk_type));
            field_offset += field.get_width();
        }

        quote! {
            #(#field_parsers)*
        }
        }
    }
    }
}
}
@@ -252,6 +277,46 @@ mod tests {
                }
                }
                let chunk =
                let chunk =
                    u64::from_be_bytes([0, 0, 0, bytes[10], bytes[11], bytes[12], bytes[13], bytes[14]]);
                    u64::from_be_bytes([0, 0, 0, bytes[10], bytes[11], bytes[12], bytes[13], bytes[14]]);
                let a = chunk as u16;
                let b = ((chunk >> 16) & 0xffffff) as u32;
            },
        );
    }

    #[test]
    fn test_generate_read_adjustments_8bit() {
        let fields = vec![
            Field::Scalar(ScalarField { id: String::from("a"), width: 3 }),
            Field::Scalar(ScalarField { id: String::from("b"), width: 5 }),
        ];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_read_adjustments(),
            quote! {
                let a = (chunk & 0x7);
                let b = ((chunk >> 3) & 0x1f);
            },
        );
    }

    #[test]
    fn test_generate_read_adjustments_48bit() {
        let fields = vec![
            Field::Scalar(ScalarField { id: String::from("a"), width: 3 }),
            Field::Scalar(ScalarField { id: String::from("b"), width: 8 }),
            Field::Scalar(ScalarField { id: String::from("c"), width: 10 }),
            Field::Scalar(ScalarField { id: String::from("d"), width: 18 }),
            Field::Scalar(ScalarField { id: String::from("e"), width: 9 }),
        ];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_read_adjustments(),
            quote! {
                let a = (chunk & 0x7) as u8;
                let b = (chunk >> 3) as u8;
                let c = ((chunk >> 11) & 0x3ff) as u16;
                let d = ((chunk >> 21) & 0x3ffff) as u32;
                let e = ((chunk >> 39) & 0x1ff) as u16;
            },
            },
        );
        );
    }
    }
+53 −2
Original line number Original line Diff line number Diff line
use quote::{format_ident, quote};
use quote::{format_ident, quote};


use crate::ast;
use crate::ast;
use crate::backends::rust::mask_bits;
use crate::backends::rust::types;
use crate::backends::rust::types;


/// Like [`ast::Field::Scalar`].
/// Like [`ast::Field::Scalar`].
@@ -27,9 +28,13 @@ impl ScalarField {
        format_ident!("{}", self.id)
        format_ident!("{}", self.id)
    }
    }


    fn get_type(&self) -> types::Integer {
        types::Integer::new(self.width)
    }

    fn generate_decl(&self, visibility: syn::Visibility) -> proc_macro2::TokenStream {
    fn generate_decl(&self, visibility: syn::Visibility) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let field_name = self.get_ident();
        let field_type = types::Integer::new(self.width);
        let field_type = self.get_type();
        quote! {
        quote! {
            #visibility #field_name: #field_type
            #visibility #field_name: #field_type
        }
        }
@@ -38,13 +43,49 @@ impl ScalarField {
    fn generate_getter(&self, packet_name: &syn::Ident) -> proc_macro2::TokenStream {
    fn generate_getter(&self, packet_name: &syn::Ident) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let field_name = self.get_ident();
        let getter_name = format_ident!("get_{}", self.id);
        let getter_name = format_ident!("get_{}", self.id);
        let field_type = types::Integer::new(self.width);
        let field_type = self.get_type();
        quote! {
        quote! {
            pub fn #getter_name(&self) -> #field_type {
            pub fn #getter_name(&self) -> #field_type {
                self.#packet_name.as_ref().#field_name
                self.#packet_name.as_ref().#field_name
            }
            }
        }
        }
    }
    }

    fn generate_read_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        let field_name = self.get_ident();
        let field_type = self.get_type();
        let mut field = quote! {
            chunk
        };
        if offset > 0 {
            let offset = syn::Index::from(offset);
            let op = syn::parse_str::<syn::BinOp>(">>").unwrap();
            field = quote! {
                (#field #op #offset)
            };
        }

        if self.width < field_type.width {
            let bit_mask = mask_bits(self.width);
            field = quote! {
                (#field & #bit_mask)
            };
        }

        if field_type.width < chunk_type.width {
            field = quote! {
                #field as #field_type;
            };
        }

        quote! {
            let #field_name = #field;
        }
    }
}
}


/// Projection of [`ast::Field`] with the bits needed for the Rust
/// Projection of [`ast::Field`] with the bits needed for the Rust
@@ -93,4 +134,14 @@ impl Field {
            Field::Scalar(field) => field.generate_getter(packet_name),
            Field::Scalar(field) => field.generate_getter(packet_name),
        }
        }
    }
    }

    pub fn generate_read_adjustment(
        &self,
        offset: usize,
        chunk_type: types::Integer,
    ) -> proc_macro2::TokenStream {
        match self {
            Field::Scalar(field) => field.generate_read_adjustment(offset, chunk_type),
        }
    }
}
}
+1 −0
Original line number Original line Diff line number Diff line
//! Utility functions for dealing with Rust integer types.
//! Utility functions for dealing with Rust integer types.


/// A Rust integer type such as `u8`.
/// A Rust integer type such as `u8`.
#[derive(Copy, Clone)]
pub struct Integer {
pub struct Integer {
    pub width: usize,
    pub width: usize,
}
}