Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 116632e7 authored by Martin Geisler's avatar Martin Geisler Committed by Automerger Merge Worker
Browse files

Merge changes If47c2f03,Ifa6f822d,Ia6f39472,I3d124501 am: cec70ff5

parents 623e7218 cec70ff5
Loading
Loading
Loading
Loading
+5 −1
Original line number Diff line number Diff line
@@ -37,6 +37,9 @@ rust_test_host {
    name: "pdl_tests",
    defaults: ["pdl_defaults"],
    srcs: ["src/main.rs"],
    proc_macros: [
        "libpaste",
    ],
    test_suites: ["general-tests"],
    enabled: false, // rustfmt is only available on x86.
    arch: {
@@ -55,7 +58,8 @@ rust_test_host {
        "tests/generated/generate_chunk_read_multiple_fields.rs",
        "tests/generated/packet_decl_complex_big_endian.rs",
        "tests/generated/packet_decl_complex_little_endian.rs",
        "tests/generated/packet_decl_empty.rs",
        "tests/generated/packet_decl_empty_big_endian.rs",
        "tests/generated/packet_decl_empty_little_endian.rs",
        "tests/generated/packet_decl_simple_big_endian.rs",
        "tests/generated/packet_decl_simple_little_endian.rs",
        "tests/generated/preamble.rs",
+1 −0
Original line number Diff line number Diff line
@@ -23,3 +23,4 @@ bytes = "1.2.1"
num-derive = "0.3.3"
num-traits = "0.2.15"
thiserror = "1.0.37"
paste = "1.0.6"
+75 −154
Original line number Diff line number Diff line
@@ -32,16 +32,6 @@ macro_rules! quote_block {
    }
}

/// Find byte indices covering `offset..offset+width` bits.
pub fn get_field_range(offset: usize, width: usize) -> std::ops::Range<usize> {
    let start = offset / 8;
    let mut end = (offset + width) / 8;
    if (offset + width) % 8 != 0 {
        end += 1;
    }
    start..end
}

/// Generate a bit-mask which masks out `n` least significant bits.
pub fn mask_bits(n: usize) -> syn::LitInt {
    syn::parse_str::<syn::LitInt>(&format!("{:#x}", (1u64 << n) - 1)).unwrap()
@@ -147,22 +137,20 @@ fn generate_packet_decl(

    let mut chunk_width = 0;
    let chunks = fields.split_inclusive(|field| {
        chunk_width += field.get_width();
        chunk_width += field.width();
        chunk_width % 8 == 0
    });
    let mut field_parsers = Vec::new();
    let mut field_writers = Vec::new();
    let mut offset = 0;
    for fields in chunks {
        let chunk = Chunk::new(fields);
        field_parsers.push(chunk.generate_read(id, file.endianness.value, offset));
        field_writers.push(chunk.generate_write(file.endianness.value, offset));
        offset += chunk.get_width();
        field_parsers.push(chunk.generate_read(id, file.endianness.value));
        field_writers.push(chunk.generate_write(file.endianness.value));
    }

    let field_names = fields.iter().map(Field::get_ident).collect::<Vec<_>>();
    let field_names = fields.iter().map(Field::ident).collect::<Vec<_>>();

    let packet_size_bits = Chunk::new(fields).get_width();
    let packet_size_bits = Chunk::new(fields).width();
    if packet_size_bits % 8 != 0 {
        panic!("packet {id} does not end on a byte boundary, size: {packet_size_bits} bits",);
    }
@@ -180,7 +168,7 @@ fn generate_packet_decl(
                #conforms
            }

            fn parse(bytes: &[u8]) -> Result<Self> {
            fn parse(mut bytes: &[u8]) -> Result<Self> {
                #(#field_parsers)*
                Ok(Self { #(#field_names),* })
            }
@@ -202,8 +190,7 @@ fn generate_packet_decl(
    code.push_str(&quote_block! {
        impl Packet for #packet_name {
            fn to_bytes(self) -> Bytes {
                let mut buffer = BytesMut::new();
                buffer.resize(self.#ident.get_total_size(), 0);
                let mut buffer = BytesMut::with_capacity(self.#ident.get_total_size());
                self.#ident.write_to(&mut buffer);
                buffer.freeze()
            }
@@ -238,7 +225,7 @@ fn generate_packet_decl(
    let field_getters = fields.iter().map(|field| field.generate_getter(&ident));
    code.push_str(&quote_block! {
        impl #packet_name {
            pub fn parse(bytes: &[u8]) -> Result<Self> {
            pub fn parse(mut bytes: &[u8]) -> Result<Self> {
                Ok(Self::new(Arc::new(#data_name::parse(bytes)?)).unwrap())
            }

@@ -308,106 +295,70 @@ mod tests {
    use crate::ast;
    use crate::parser::parse_inline;
    use crate::test_utils::{assert_snapshot_eq, rustfmt};
    use paste::paste;

    /// Parse a string fragment as a PDL file.
    /// Create a unit test for the given PDL `code`.
    ///
    /// # Panics
    /// The unit test will compare the generated Rust code for all
    /// declarations with previously saved snapshots. The snapshots
    /// are read from `"tests/generated/{name}_{endianness}_{id}.rs"`
    /// where `is` taken from the declaration.
    ///
    /// Panics on parse errors.
    pub fn parse_str(text: &str) -> ast::File {
        let mut db = ast::SourceDatabase::new();
        parse_inline(&mut db, String::from("stdin"), String::from(text)).expect("parse error")
    }

    #[test]
    fn test_generate_packet_decl_empty() {
        let file = parse_str(
            r#"
              big_endian_packets
              packet Foo {}
            "#,
        );
        let scope = lint::Scope::new(&file).unwrap();
        let decl = &file.declarations[0];
        let actual_code = generate_decl(&scope, &file, decl);
        assert_snapshot_eq("tests/generated/packet_decl_empty.rs", &rustfmt(&actual_code));
    }

    /// When adding new tests or modifying existing ones, use
    /// `UPDATE_SNAPSHOTS=1 cargo test` to automatically populate the
    /// snapshots with the expected output.
    ///
    /// The `code` cannot have an endianness declaration, instead you
    /// must supply either `little_endian` or `big_endian` as
    /// `endianness`.
    macro_rules! make_pdl_test {
        ($name:ident, $code:expr, $endianness:ident) => {
            paste! {
                #[test]
    fn test_generate_packet_decl_simple_little_endian() {
        let file = parse_str(
            r#"
              little_endian_packets

              packet Foo {
                x: 8,
                y: 16,
                z: 24,
              }
            "#,
        );
        let scope = lint::Scope::new(&file).unwrap();
        let decl = &file.declarations[0];
        let actual_code = generate_decl(&scope, &file, decl);
                fn [< test_ $name _ $endianness >]() {
                    let name = stringify!($name);
                    let endianness = stringify!($endianness);
                    let code = format!("{endianness}_packets\n{}", $code);
                    let mut db = ast::SourceDatabase::new();
                    let file = parse_inline(&mut db, String::from("test"), code).unwrap();
                    let actual_code = generate(&db, &file);
                    assert_snapshot_eq(
            "tests/generated/packet_decl_simple_little_endian.rs",
                        &format!("tests/generated/{name}_{endianness}.rs"),
                        &rustfmt(&actual_code),
                    );
                }
            }
        };
    }

    #[test]
    fn test_generate_packet_decl_simple_big_endian() {
        let file = parse_str(
            r#"
              big_endian_packets
    /// Create little- and bit-endian tests for the given PDL `code`.
    ///
    /// The `code` cannot have an endianness declaration: we will
    /// automatically generate unit tests for both
    /// "little_endian_packets" and "big_endian_packets".
    macro_rules! test_pdl {
        ($name:ident, $code:expr $(,)?) => {
            make_pdl_test!($name, $code, little_endian);
            make_pdl_test!($name, $code, big_endian);
        };
    }

    test_pdl!(packet_decl_empty, "packet Foo {}");

    test_pdl!(
        packet_decl_simple,
        r#"
          packet Foo {
            x: 8,
            y: 16,
            z: 24,
          }
            "#,
        );
        let scope = lint::Scope::new(&file).unwrap();
        let decl = &file.declarations[0];
        let actual_code = generate_decl(&scope, &file, decl);
        assert_snapshot_eq(
            "tests/generated/packet_decl_simple_big_endian.rs",
            &rustfmt(&actual_code),
        );
    }

    #[test]
    fn test_generate_packet_decl_complex_little_endian() {
        let file = parse_str(
            r#"
              little_endian_packets

              packet Foo {
                a: 3,
                b: 8,
                c: 5,
                d: 24,
                e: 12,
                f: 4,
              }
            "#,
        );
        let scope = lint::Scope::new(&file).unwrap();
        let decl = &file.declarations[0];
        let actual_code = generate_decl(&scope, &file, decl);
        assert_snapshot_eq(
            "tests/generated/packet_decl_complex_little_endian.rs",
            &rustfmt(&actual_code),
        "#
    );
    }

    #[test]
    fn test_generate_packet_decl_complex_big_endian() {
        let file = parse_str(
    test_pdl!(
        packet_decl_complex,
        r#"
              big_endian_packets

          packet Foo {
            a: 3,
            b: 8,
@@ -418,34 +369,4 @@ mod tests {
          }
        "#,
    );
        let scope = lint::Scope::new(&file).unwrap();
        let decl = &file.declarations[0];
        let actual_code = generate_decl(&scope, &file, decl);
        assert_snapshot_eq(
            "tests/generated/packet_decl_complex_big_endian.rs",
            &rustfmt(&actual_code),
        );
    }

    #[test]
    fn test_get_field_range() {
        // Zero widths will give you an empty slice iff the offset is
        // byte aligned. In both cases, the slice covers the empty
        // width. In practice, PDL doesn't allow zero-width fields.
        assert_eq!(get_field_range(/*offset=*/ 0, /*width=*/ 0), (0..0));
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 0), (0..1));
        assert_eq!(get_field_range(/*offset=*/ 8, /*width=*/ 0), (1..1));
        assert_eq!(get_field_range(/*offset=*/ 9, /*width=*/ 0), (1..2));

        // Non-zero widths work as expected.
        assert_eq!(get_field_range(/*offset=*/ 0, /*width=*/ 1), (0..1));
        assert_eq!(get_field_range(/*offset=*/ 0, /*width=*/ 5), (0..1));
        assert_eq!(get_field_range(/*offset=*/ 0, /*width=*/ 8), (0..1));
        assert_eq!(get_field_range(/*offset=*/ 0, /*width=*/ 20), (0..3));

        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 1), (0..1));
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 3), (0..1));
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 4), (0..2));
        assert_eq!(get_field_range(/*offset=*/ 5, /*width=*/ 20), (0..4));
    }
}
+101 −79
Original line number Diff line number Diff line
use crate::ast;
use crate::backends::rust::field::Field;
use crate::backends::rust::get_field_range;
use crate::backends::rust::types::Integer;
use quote::{format_ident, quote};

fn endianness_suffix(width: usize, endianness_value: ast::EndiannessValue) -> &'static str {
    if width > 8 && endianness_value == ast::EndiannessValue::LittleEndian {
        "_le"
    } else {
        ""
    }
}

/// Parse an unsigned integer from `buffer`.
///
/// The generated code requires that `buffer` is a mutable
/// `bytes::Buf` value.
fn get_uint(
    endianness: ast::EndiannessValue,
    buffer: proc_macro2::Ident,
    width: usize,
) -> proc_macro2::TokenStream {
    let suffix = endianness_suffix(width, endianness);
    let rust_integer_widths = [8, 16, 32, 64];
    if rust_integer_widths.contains(&width) {
        // We can use Buf::get_uNN.
        let get_u = format_ident!("get_u{}{}", width, suffix);
        quote! {
            #buffer.#get_u()
        }
    } else {
        // We fall back to Buf::get_uint.
        let get_uint = format_ident!("get_uint{}", suffix);
        let value_type = Integer::new(width);
        let value_nbytes = proc_macro2::Literal::usize_unsuffixed(width / 8);
        quote! {
            #buffer.#get_uint(#value_nbytes) as #value_type
        }
    }
}

/// Write an unsigned integer `value` to `buffer`.
///
/// The generated code requires that `buffer` is a mutable
/// `bytes::BufMut` value.
fn put_uint(
    endianness: ast::EndiannessValue,
    buffer: proc_macro2::Ident,
    value: proc_macro2::TokenStream,
    width: usize,
) -> proc_macro2::TokenStream {
    let suffix = endianness_suffix(width, endianness);
    let rust_integer_widths = [8, 16, 32, 64];
    if rust_integer_widths.contains(&width) {
        // We can use BufMut::put_uNN.
        let put_u = format_ident!("put_u{}{}", width, suffix);
        quote! {
            #buffer.#put_u(#value)
        }
    } else {
        // We fall back to BufMut::put_uint.
        let put_uint = format_ident!("put_uint{}", suffix);
        let value_nbytes = proc_macro2::Literal::usize_unsuffixed(width / 8);
        quote! {
            #buffer.#put_uint(#value as u64, #value_nbytes)
        }
    }
}

/// A chunk of field.
///
/// While fields can have arbitrary widths, a chunk is always an
@@ -22,32 +85,27 @@ impl Chunk<'_> {
    /// Generate a name for this chunk.
    ///
    /// The name is `"chunk"` if there is more than one field.
    pub fn get_name(&self) -> proc_macro2::Ident {
    pub fn name(&self) -> proc_macro2::Ident {
        match self.fields {
            [field] => field.get_ident(),
            [field] => field.ident(),
            _ => format_ident!("chunk"),
        }
    }

    /// Return the width in bits.
    pub fn get_width(&self) -> usize {
        self.fields.iter().map(|field| field.get_width()).sum()
    pub fn width(&self) -> usize {
        self.fields.iter().map(|field| field.width()).sum()
    }

    /// Generate length checks for this chunk.
    pub fn generate_length_check(
        &self,
        packet_name: &str,
        offset: usize,
    ) -> proc_macro2::TokenStream {
        let range = get_field_range(offset, self.get_width());
        let wanted_length = syn::Index::from(range.end);
    pub fn generate_length_check(&self, packet_name: &str) -> proc_macro2::TokenStream {
        let wanted_length = proc_macro2::Literal::usize_unsuffixed(self.width() / 8);
        quote! {
            if bytes.len() < #wanted_length {
            if bytes.remaining() < #wanted_length {
                return Err(Error::InvalidLengthError {
                    obj: #packet_name.to_string(),
                    wanted: #wanted_length,
                    got: bytes.len(),
                    got: bytes.remaining(),
                });
            }
        }
@@ -58,46 +116,18 @@ impl Chunk<'_> {
        &self,
        packet_name: &str,
        endianness_value: ast::EndiannessValue,
        offset: usize,
    ) -> proc_macro2::TokenStream {
        assert!(offset % 8 == 0, "Chunks must be byte-aligned, got offset: {offset}");
        let getter = match endianness_value {
            ast::EndiannessValue::BigEndian => format_ident!("from_be_bytes"),
            ast::EndiannessValue::LittleEndian => format_ident!("from_le_bytes"),
        };

        let chunk_name = self.get_name();
        let chunk_width = self.get_width();
        let chunk_type = Integer::new(chunk_width);
        let chunk_name = self.name();
        let chunk_width = self.width();
        assert!(chunk_width % 8 == 0, "Chunks must have a byte size, got width: {chunk_width}");

        let range = get_field_range(offset, chunk_width);
        let indices = range.map(syn::Index::from).collect::<Vec<_>>();
        let length_check = self.generate_length_check(packet_name, offset);

        // When the chunk_type.width is larger than chunk_width (e.g.
        // chunk_width is 24 but chunk_type.width is 32), then we need
        // zero padding.
        let zero_padding_len = (chunk_type.width - chunk_width) / 8;
        // We need the padding on the MSB side of the payload, so for
        // big-endian, we need to padding on the left, for little-endian
        // we need it on the right.
        let (zero_padding_before, zero_padding_after) = match endianness_value {
            ast::EndiannessValue::BigEndian => {
                (vec![syn::Index::from(0); zero_padding_len], vec![])
            }
            ast::EndiannessValue::LittleEndian => {
                (vec![], vec![syn::Index::from(0); zero_padding_len])
            }
        };

        let length_check = self.generate_length_check(packet_name);
        let read = get_uint(endianness_value, format_ident!("bytes"), chunk_width);
        let read_adjustments = self.generate_read_adjustments();

        quote! {
            #length_check
            let #chunk_name = #chunk_type::#getter([
                #(#zero_padding_before,)* #(bytes[#indices]),* #(, #zero_padding_after)*
            ]);
            let #chunk_name = #read;
            #read_adjustments
        }
    }
@@ -109,14 +139,14 @@ impl Chunk<'_> {
            return quote! {};
        }

        let chunk_width = self.get_width();
        let chunk_width = self.width();
        let chunk_type = Integer::new(chunk_width);

        let mut field_parsers = Vec::new();
        let mut field_offset = 0;
        for field in self.fields {
            field_parsers.push(field.generate_read_adjustment(field_offset, chunk_type));
            field_offset += field.get_width();
            field_offset += field.width();
        }

        quote! {
@@ -127,26 +157,18 @@ impl Chunk<'_> {
    pub fn generate_write(
        &self,
        endianness_value: ast::EndiannessValue,
        offset: usize,
    ) -> proc_macro2::TokenStream {
        let writer = match endianness_value {
            ast::EndiannessValue::BigEndian => format_ident!("to_be_bytes"),
            ast::EndiannessValue::LittleEndian => format_ident!("to_le_bytes"),
        };

        let chunk_width = self.get_width();
        let chunk_name = self.get_name();
        let chunk_width = self.width();
        let chunk_name = self.name();
        assert!(chunk_width % 8 == 0, "Chunks must have a byte size, got width: {chunk_width}");

        let range = get_field_range(offset, chunk_width);
        let start = syn::Index::from(range.start);
        let end = syn::Index::from(range.end);
        // TODO(mgeisler): let slice = (chunk_type_width > chunk_width).then( ... )
        let chunk_byte_width = syn::Index::from(chunk_width / 8);
        let write_adjustments = self.generate_write_adjustments();
        let write =
            put_uint(endianness_value, format_ident!("buffer"), quote!(#chunk_name), chunk_width);
        quote! {
            #write_adjustments
            buffer[#start..#end].copy_from_slice(&#chunk_name.#writer()[0..#chunk_byte_width]);
            #write;
        }
    }

@@ -154,20 +176,20 @@ impl Chunk<'_> {
        if let [field] = self.fields {
            // If there is a single field in the chunk, then we don't have to
            // shift, mask, or cast.
            let field_name = field.get_ident();
            let field_name = field.ident();
            return quote! {
                let #field_name = self.#field_name;
            };
        }

        let chunk_width = self.get_width();
        let chunk_width = self.width();
        let chunk_type = Integer::new(chunk_width);

        let mut field_parsers = Vec::new();
        let mut field_offset = 0;
        for field in self.fields {
            field_parsers.push(field.generate_write_adjustment(field_offset, chunk_type));
            field_offset += field.get_width();
            field_offset += field.width();
        }

        quote! {
@@ -187,7 +209,7 @@ mod tests {
    fn test_generate_read_8bit() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 8 })];
        let chunk = Chunk::new(&fields);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian, 80);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian);
        let code = quote! { fn main() { #chunk_read } };
        assert_snapshot_eq(
            "tests/generated/generate_chunk_read_8bit.rs",
@@ -199,7 +221,7 @@ mod tests {
    fn test_generate_read_16bit_le() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 16 })];
        let chunk = Chunk::new(&fields);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::LittleEndian, 80);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::LittleEndian);
        let code = quote! { fn main() { #chunk_read } };
        assert_snapshot_eq(
            "tests/generated/generate_chunk_read_16bit_le.rs",
@@ -211,7 +233,7 @@ mod tests {
    fn test_generate_read_16bit_be() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 16 })];
        let chunk = Chunk::new(&fields);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian, 80);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian);
        let code = quote! { fn main() { #chunk_read } };
        assert_snapshot_eq(
            "tests/generated/generate_chunk_read_16bit_be.rs",
@@ -223,7 +245,7 @@ mod tests {
    fn test_generate_read_24bit_le() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 24 })];
        let chunk = Chunk::new(&fields);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::LittleEndian, 80);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::LittleEndian);
        let code = quote! { fn main() { #chunk_read } };
        assert_snapshot_eq(
            "tests/generated/generate_chunk_read_24bit_le.rs",
@@ -235,7 +257,7 @@ mod tests {
    fn test_generate_read_24bit_be() {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 24 })];
        let chunk = Chunk::new(&fields);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian, 80);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian);
        let code = quote! { fn main() { #chunk_read } };
        assert_snapshot_eq(
            "tests/generated/generate_chunk_read_24bit_be.rs",
@@ -250,7 +272,7 @@ mod tests {
            Field::Scalar(ScalarField { id: String::from("b"), width: 24 }),
        ];
        let chunk = Chunk::new(&fields);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian, 80);
        let chunk_read = chunk.generate_read("Foo", ast::EndiannessValue::BigEndian);
        let code = quote! { fn main() { #chunk_read } };
        assert_snapshot_eq(
            "tests/generated/generate_chunk_read_multiple_fields.rs",
@@ -301,10 +323,10 @@ mod tests {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 8 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            chunk.generate_write(ast::EndiannessValue::BigEndian),
            quote! {
                let a = self.a;
                buffer[10..11].copy_from_slice(&a.to_be_bytes()[0..1]);
                buffer.put_u8(a);
            },
        );
    }
@@ -314,10 +336,10 @@ mod tests {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 16 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            chunk.generate_write(ast::EndiannessValue::BigEndian),
            quote! {
                let a = self.a;
                buffer[10..12].copy_from_slice(&a.to_be_bytes()[0..2]);
                buffer.put_u16(a);
            },
        );
    }
@@ -327,10 +349,10 @@ mod tests {
        let fields = [Field::Scalar(ScalarField { id: String::from("a"), width: 24 })];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            chunk.generate_write(ast::EndiannessValue::BigEndian),
            quote! {
                let a = self.a;
                buffer[10..13].copy_from_slice(&a.to_be_bytes()[0..3]);
                buffer.put_uint(a as u64, 3);
            },
        );
    }
@@ -343,12 +365,12 @@ mod tests {
        ];
        let chunk = Chunk::new(&fields);
        assert_expr_eq(
            chunk.generate_write(ast::EndiannessValue::BigEndian, 80),
            chunk.generate_write(ast::EndiannessValue::BigEndian),
            quote! {
                let chunk = 0;
                let chunk = chunk | (self.a as u64);
                let chunk = chunk | (((self.b as u64) & 0xffffff) << 16);
                buffer[10..15].copy_from_slice(&chunk.to_be_bytes()[0..5]);
                buffer.put_uint(chunk as u64, 5);
            },
        );
    }
+13 −13

File changed.

Preview size limit exceeded, changes collapsed.

Loading