From b01d57fab68a5dbce603e2c1fa38a4298496b7ce Mon Sep 17 00:00:00 2001 From: Konrad Kohbrok Date: Tue, 14 Nov 2023 13:23:01 +0100 Subject: [PATCH 1/5] tls_codec: Rename functions in DeserializeBytes trait (#1251) * rename functions in DeserializeBytes trait, using _bytes postfix * remove unnecessary qualifications * remove deserialize_bytes from Deserialize trait * add Changelog --- tls_codec/CHANGELOG.md | 16 ++++++ tls_codec/benches/quic_vec.rs | 6 +-- tls_codec/benches/tls_vec.rs | 12 ++--- tls_codec/derive/src/lib.rs | 10 ++-- tls_codec/derive/tests/decode.rs | 27 ++++------ tls_codec/derive/tests/decode_bytes.rs | 73 +++++++++++--------------- tls_codec/src/arrays.rs | 2 +- tls_codec/src/lib.rs | 17 ++---- tls_codec/src/primitives.rs | 26 ++++----- tls_codec/src/quic_vec.rs | 16 +++--- tls_codec/src/tls_vec.rs | 15 +++--- tls_codec/tests/decode.rs | 38 +++++++------- tls_codec/tests/decode_bytes.rs | 6 +-- 13 files changed, 123 insertions(+), 141 deletions(-) create mode 100644 tls_codec/CHANGELOG.md diff --git a/tls_codec/CHANGELOG.md b/tls_codec/CHANGELOG.md new file mode 100644 index 000000000..d1f2d4dd5 --- /dev/null +++ b/tls_codec/CHANGELOG.md @@ -0,0 +1,16 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Changed + +- [#1251](https://github.com/RustCrypto/formats/pull/1251): Add `_bytes` suffix to function names in the `DeserializeBytes` trait to avoid collisions with function names in the `Deserialize` trait + +### Removed + +- [#1251](https://github.com/RustCrypto/formats/pull/1251): Remove the `tls_deserialize_bytes` function from the `Deserialize` trait diff --git a/tls_codec/benches/quic_vec.rs b/tls_codec/benches/quic_vec.rs index 7b7c5e546..ece392d22 100644 --- a/tls_codec/benches/quic_vec.rs +++ b/tls_codec/benches/quic_vec.rs @@ -20,8 +20,7 @@ fn vector(c: &mut Criterion) { }, |serialized_long_vec| { let _deserialized_long_vec = - as Deserialize>::tls_deserialize(&mut serialized_long_vec.as_slice()) - .unwrap(); + Vec::::tls_deserialize(&mut serialized_long_vec.as_slice()).unwrap(); }, BatchSize::SmallInput, ) @@ -47,8 +46,7 @@ fn byte_vector(c: &mut Criterion) { }, |serialized_long_vec| { let _deserialized_long_vec = - ::tls_deserialize(&mut serialized_long_vec.as_slice()) - .unwrap(); + VLBytes::tls_deserialize(&mut serialized_long_vec.as_slice()).unwrap(); }, BatchSize::SmallInput, ) diff --git a/tls_codec/benches/tls_vec.rs b/tls_codec/benches/tls_vec.rs index fc4244e05..996a72c76 100644 --- a/tls_codec/benches/tls_vec.rs +++ b/tls_codec/benches/tls_vec.rs @@ -19,10 +19,8 @@ fn vector(c: &mut Criterion) { TlsSliceU32(&long_vector).tls_serialize_detached().unwrap() }, |serialized_long_vec| { - let _deserialized_long_vec = as Deserialize>::tls_deserialize( - &mut serialized_long_vec.as_slice(), - ) - .unwrap(); + let _deserialized_long_vec = + TlsVecU32::::tls_deserialize(&mut serialized_long_vec.as_slice()).unwrap(); }, BatchSize::SmallInput, ) @@ -49,10 +47,8 @@ fn byte_vector(c: &mut Criterion) { .unwrap() }, |serialized_long_vec| { - let _deserialized_long_vec = as Deserialize>::tls_deserialize( - &mut serialized_long_vec.as_slice(), - ) - .unwrap(); + let _deserialized_long_vec = + TlsVecU32::::tls_deserialize(&mut serialized_long_vec.as_slice()).unwrap(); }, BatchSize::SmallInput, ) diff --git a/tls_codec/derive/src/lib.rs b/tls_codec/derive/src/lib.rs index dc5c9d417..07abfad98 100644 --- a/tls_codec/derive/src/lib.rs +++ b/tls_codec/derive/src/lib.rs @@ -1005,8 +1005,8 @@ fn impl_deserialize_bytes(parsed_ast: TlsStruct) -> TokenStream2 { let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); quote! { impl #impl_generics tls_codec::DeserializeBytes for #ident #ty_generics #where_clause { - fn tls_deserialize(bytes: &[u8]) -> core::result::Result<(Self, &[u8]), tls_codec::Error> { - #(let (#members_values, bytes) = #prefixes::tls_deserialize(bytes)?;)* + fn tls_deserialize_bytes(bytes: &[u8]) -> core::result::Result<(Self, &[u8]), tls_codec::Error> { + #(let (#members_values, bytes) = #prefixes::tls_deserialize_bytes(bytes)?;)* Ok((Self { #(#members: #members_values,)* #(#members_default: Default::default(),)* @@ -1049,7 +1049,7 @@ fn impl_deserialize_bytes(parsed_ast: TlsStruct) -> TokenStream2 { .collect::>(); quote! { #discriminant => { - #(let (#member_values, remainder) = #prefixes::tls_deserialize(remainder)?;)* + #(let (#member_values, remainder) = #prefixes::tls_deserialize_bytes(remainder)?;)* let result = #ident::#variant_id { #(#members: #member_values,)* }; Ok((result, remainder)) }, @@ -1059,9 +1059,9 @@ fn impl_deserialize_bytes(parsed_ast: TlsStruct) -> TokenStream2 { let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); quote! { impl #impl_generics tls_codec::DeserializeBytes for #ident #ty_generics #where_clause { - fn tls_deserialize(bytes: &[u8]) -> core::result::Result<(Self, &[u8]), tls_codec::Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> core::result::Result<(Self, &[u8]), tls_codec::Error> { #discriminant_constants - let (discriminant, remainder) = <#repr as tls_codec::DeserializeBytes>::tls_deserialize(bytes)?; + let (discriminant, remainder) = #repr::tls_deserialize_bytes(bytes)?; match discriminant { #(#arms)* _ => { diff --git a/tls_codec/derive/tests/decode.rs b/tls_codec/derive/tests/decode.rs index 107624e23..93d5fa430 100644 --- a/tls_codec/derive/tests/decode.rs +++ b/tls_codec/derive/tests/decode.rs @@ -1,6 +1,7 @@ #![cfg(feature = "std")] use tls_codec::{ - Deserialize, Error, Serialize, Size, TlsSliceU16, TlsVecU16, TlsVecU32, TlsVecU8, VLBytes, + Deserialize, DeserializeBytes, Error, Serialize, Size, TlsSliceU16, TlsVecU16, TlsVecU32, + TlsVecU8, VLBytes, }; use tls_codec_derive::{TlsDeserialize, TlsDeserializeBytes, TlsSerialize, TlsSize}; @@ -58,8 +59,7 @@ fn tuple_struct() { let serialized_t1 = t1.tls_serialize_detached().unwrap(); let deserialized_t1 = TupleStruct1::tls_deserialize(&mut serialized_t1.as_slice()).unwrap(); let (deserialized_bytes_t1, _remainder) = - ::tls_deserialize(serialized_t1.as_slice()) - .unwrap(); + TupleStruct1::tls_deserialize_bytes(serialized_t1.as_slice()).unwrap(); assert_eq!(t1, deserialized_t1); assert_eq!(t1, deserialized_bytes_t1); assert_eq!( @@ -75,8 +75,7 @@ fn tuple_struct() { let serialized_t2 = t2.tls_serialize_detached().unwrap(); let deserialized_t2 = TupleStruct::tls_deserialize(&mut serialized_t2.as_slice()).unwrap(); let (deserialized_bytes_t2, _remainder) = - ::tls_deserialize(serialized_t2.as_slice()) - .unwrap(); + TupleStruct::tls_deserialize_bytes(serialized_t2.as_slice()).unwrap(); assert_eq!(t2, deserialized_t2); assert_eq!(t2, deserialized_bytes_t2); assert_eq!( @@ -94,8 +93,7 @@ fn simple_enum() { let b = &[0u8, 5] as &[u8]; let mut b_reader = b; let deserialized = ExtensionType::tls_deserialize(&mut b_reader).unwrap(); - let (deserialized_bytes, _remainder) = - ::tls_deserialize(b).unwrap(); + let (deserialized_bytes, _remainder) = ExtensionType::tls_deserialize_bytes(b).unwrap(); assert_eq!(ExtensionType::RatchetTree, deserialized); assert_eq!(ExtensionType::RatchetTree, deserialized_bytes); @@ -109,8 +107,7 @@ fn simple_enum() { for variant in variants.iter() { let deserialized = ExtensionType::tls_deserialize(&mut b_reader).unwrap(); assert_eq!(variant, &deserialized); - let (deserialized_bytes, remainder) = - ::tls_deserialize(b).unwrap(); + let (deserialized_bytes, remainder) = ExtensionType::tls_deserialize_bytes(b).unwrap(); b = remainder; assert_eq!(variant, &deserialized_bytes); } @@ -129,11 +126,9 @@ fn deserialize_tls_vec() { assert_eq!(long_vector.len(), deserialized_long_vec.len()); assert_eq!(long_vector, deserialized_long_vec); let (deserialized_long_vec_bytes, _remainder): (Vec, &[u8]) = - as tls_codec::DeserializeBytes>::tls_deserialize( - serialized_long_vec.as_slice(), - ) - .map(|(v, r)| (v.into(), r)) - .unwrap(); + TlsVecU16::::tls_deserialize_bytes(serialized_long_vec.as_slice()) + .map(|(v, r)| (v.into(), r)) + .unwrap(); assert_eq!(long_vector.len(), deserialized_long_vec_bytes.len()); assert_eq!(long_vector, deserialized_long_vec_bytes); } @@ -304,8 +299,8 @@ mod custom_bytes { TlsByteSliceU32(v).tls_serialize(writer) } - pub fn tls_deserialize(bytes: &[u8]) -> Result<(Vec, &[u8]), tls_codec::Error> { - let (vec, remainder) = TlsByteVecU32::tls_deserialize(bytes)?; + pub fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Vec, &[u8]), tls_codec::Error> { + let (vec, remainder) = TlsByteVecU32::tls_deserialize_bytes(bytes)?; Ok((vec.into_vec(), remainder)) } } diff --git a/tls_codec/derive/tests/decode_bytes.rs b/tls_codec/derive/tests/decode_bytes.rs index 4a3267f01..c08ce3be8 100644 --- a/tls_codec/derive/tests/decode_bytes.rs +++ b/tls_codec/derive/tests/decode_bytes.rs @@ -31,15 +31,13 @@ struct SomeValue { #[test] fn simple_enum() { let serialized = ExtensionType::KeyId.tls_serialize().unwrap(); - let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = ExtensionType::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, ExtensionType::KeyId); - assert_eq!(rest, []); + assert!(rest.is_empty()); let serialized = ExtensionType::SomethingElse.tls_serialize().unwrap(); - let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = ExtensionType::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, ExtensionType::SomethingElse); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[test] @@ -50,10 +48,9 @@ fn simple_struct() { additional_data: None, }; let serialized = extension.tls_serialize().unwrap(); - let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = ExtensionStruct::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, extension); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[test] @@ -65,19 +62,18 @@ fn tuple_struct() { }; let x = TupleStruct(ext, 6); let serialized = x.tls_serialize().unwrap(); - let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = TupleStruct::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[test] fn byte_arrays() { let x = [0u8, 1, 2, 3]; let serialized = x.tls_serialize().unwrap(); - let (deserialized, rest) = <[u8; 4] as DeserializeBytes>::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = <[u8; 4]>::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[derive(TlsSerializeBytes, TlsDeserializeBytes, TlsSize, Debug, PartialEq)] @@ -98,10 +94,10 @@ mod custom { v.tls_serialize() } - pub fn tls_deserialize( + pub fn tls_deserialize_bytes( bytes: &[u8], ) -> Result<(T, &[u8]), tls_codec::Error> { - ::tls_deserialize(bytes) + T::tls_deserialize_bytes(bytes) } } @@ -113,9 +109,9 @@ fn custom() { }; let serialized = x.tls_serialize().unwrap(); assert_eq!(vec![3, 0, 1, 2, 3], serialized); - let (deserialized, rest) = ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = Custom::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[derive(TlsSerializeBytes, TlsDeserializeBytes, TlsSize, Debug, PartialEq)] @@ -128,10 +124,9 @@ enum EnumWithTupleVariant { fn enum_with_tuple_variant() { let x = EnumWithTupleVariant::A(3, 4); let serialized = x.tls_serialize().unwrap(); - let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = EnumWithTupleVariant::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[derive(TlsSerializeBytes, TlsDeserializeBytes, TlsSize, Debug, PartialEq)] @@ -144,10 +139,9 @@ enum EnumWithStructVariant { fn enum_with_struct_variant() { let x = EnumWithStructVariant::A { foo: 3, bar: 4 }; let serialized = x.tls_serialize().unwrap(); - let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = EnumWithStructVariant::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[derive(TlsSerializeBytes, TlsDeserializeBytes, TlsSize, Debug, PartialEq)] @@ -164,9 +158,9 @@ fn enum_with_data_and_discriminant() { let serialized = x.tls_serialize().unwrap(); let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + EnumWithDataAndDiscriminant::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[test] @@ -174,9 +168,9 @@ fn discriminant_is_incremented_implicitly() { let x = EnumWithDataAndDiscriminant::B; let serialized = x.tls_serialize().unwrap(); let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + EnumWithDataAndDiscriminant::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } mod discriminant { @@ -209,26 +203,23 @@ fn enum_with_data_and_const_discriminant() { let serialized = x.tls_serialize().unwrap(); assert_eq!(vec![0, 3, 4], serialized); let (deserialized, rest) = - ::tls_deserialize(&serialized) - .unwrap(); + EnumWithDataAndConstDiscriminant::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); let x = EnumWithDataAndConstDiscriminant::B; let serialized = x.tls_serialize().unwrap(); let (deserialized, rest) = - ::tls_deserialize(&serialized) - .unwrap(); + EnumWithDataAndConstDiscriminant::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); let x = EnumWithDataAndConstDiscriminant::C; let serialized = x.tls_serialize().unwrap(); let (deserialized, rest) = - ::tls_deserialize(&serialized) - .unwrap(); + EnumWithDataAndConstDiscriminant::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[derive(TlsSerializeBytes, TlsDeserializeBytes, TlsSize, Debug, PartialEq)] @@ -242,9 +233,9 @@ fn enum_with_custom_serialized_field() { let x = EnumWithCustomSerializedField::A(vec![1, 2, 3]); let serialized = x.tls_serialize().unwrap(); let (deserialized, rest) = - ::tls_deserialize(&serialized).unwrap(); + EnumWithCustomSerializedField::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, x); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[test] @@ -254,9 +245,9 @@ fn that_skip_attribute_on_struct_works() { T: std::fmt::Debug + PartialEq + SerializeBytes + Size, { let serialized = test.tls_serialize().unwrap(); - let (deserialized, rest) = ::tls_deserialize(&serialized).unwrap(); + let (deserialized, rest) = T::tls_deserialize_bytes(&serialized).unwrap(); assert_eq!(deserialized, expected); - assert_eq!(rest, []); + assert!(rest.is_empty()); } #[derive(Debug, PartialEq, TlsSerializeBytes, TlsDeserializeBytes, TlsSize)] diff --git a/tls_codec/src/arrays.rs b/tls_codec/src/arrays.rs index 451972a8a..3e6200896 100644 --- a/tls_codec/src/arrays.rs +++ b/tls_codec/src/arrays.rs @@ -34,7 +34,7 @@ impl Deserialize for [u8; LEN] { impl DeserializeBytes for [u8; LEN] { #[inline] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { let out = bytes .get(..LEN) .ok_or(Error::EndOfStream)? diff --git a/tls_codec/src/lib.rs b/tls_codec/src/lib.rs index d47c1ee4e..7693f193b 100644 --- a/tls_codec/src/lib.rs +++ b/tls_codec/src/lib.rs @@ -192,17 +192,6 @@ pub trait Deserialize: Size { Ok(out) } - /// This function deserializes the provided `bytes` and returns the populated - /// struct. - /// - /// Returns an error if one occurs during deserialization. - #[cfg(feature = "std")] - fn tls_deserialize_bytes(bytes: impl AsRef<[u8]>) -> Result - where - Self: Sized, - { - Self::tls_deserialize(&mut bytes.as_ref()) - } } pub trait DeserializeBytes: Size { @@ -212,7 +201,7 @@ pub trait DeserializeBytes: Size { /// In order to get the amount of bytes read, use [`Size::tls_serialized_len`]. /// /// Returns an error if one occurs during deserialization. - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> where Self: Sized; @@ -221,11 +210,11 @@ pub trait DeserializeBytes: Size { /// /// Returns an error if not all bytes are read from the input, or if an error /// occurs during deserialization. - fn tls_deserialize_exact(bytes: &[u8]) -> Result + fn tls_deserialize_exact_bytes(bytes: &[u8]) -> Result where Self: Sized, { - let (out, remainder) = Self::tls_deserialize(bytes)?; + let (out, remainder) = Self::tls_deserialize_bytes(bytes)?; if !remainder.is_empty() { return Err(Error::TrailingData); diff --git a/tls_codec/src/primitives.rs b/tls_codec/src/primitives.rs index ae2d2d18a..f788f6398 100644 --- a/tls_codec/src/primitives.rs +++ b/tls_codec/src/primitives.rs @@ -94,14 +94,14 @@ impl Deserialize for Option { impl DeserializeBytes for Option { #[inline] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (some_or_none, remainder) = ::tls_deserialize(bytes)?; + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + let (some_or_none, remainder) = ::tls_deserialize_bytes(bytes)?; match some_or_none { 0 => { Ok((None, remainder)) }, 1 => { - let (element, remainder) = T::tls_deserialize(remainder)?; + let (element, remainder) = T::tls_deserialize_bytes(remainder)?; Ok((Some(element), remainder)) }, _ => Err(Error::DecodingError(alloc::format!("Trying to decode Option with {} for option. It must be 0 for None and 1 for Some.", some_or_none))) @@ -123,7 +123,7 @@ macro_rules! impl_unsigned { impl DeserializeBytes for $t { #[inline] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { let len = core::mem::size_of::<$t>(); let out = bytes .get(..len) @@ -215,9 +215,9 @@ where U: DeserializeBytes, { #[inline(always)] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (first_element, remainder) = T::tls_deserialize(bytes)?; - let (second_element, remainder) = U::tls_deserialize(remainder)?; + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + let (first_element, remainder) = T::tls_deserialize_bytes(bytes)?; + let (second_element, remainder) = U::tls_deserialize_bytes(remainder)?; Ok(((first_element, second_element), remainder)) } } @@ -270,10 +270,10 @@ where V: DeserializeBytes, { #[inline(always)] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (first_element, remainder) = T::tls_deserialize(bytes)?; - let (second_element, remainder) = U::tls_deserialize(remainder)?; - let (third_element, remainder) = V::tls_deserialize(remainder)?; + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + let (first_element, remainder) = T::tls_deserialize_bytes(bytes)?; + let (second_element, remainder) = U::tls_deserialize_bytes(remainder)?; + let (third_element, remainder) = V::tls_deserialize_bytes(remainder)?; Ok(((first_element, second_element, third_element), remainder)) } } @@ -322,7 +322,7 @@ impl Deserialize for () { impl DeserializeBytes for () { #[inline(always)] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { Ok(((), bytes)) } } @@ -351,7 +351,7 @@ impl Deserialize for PhantomData { impl DeserializeBytes for PhantomData { #[inline(always)] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { Ok((PhantomData, bytes)) } } diff --git a/tls_codec/src/quic_vec.rs b/tls_codec/src/quic_vec.rs index 395ba19f6..6209e03a2 100644 --- a/tls_codec/src/quic_vec.rs +++ b/tls_codec/src/quic_vec.rs @@ -69,12 +69,12 @@ fn calculate_length(len_len_byte: u8) -> Result<(usize, usize), Error> { fn read_variable_length_bytes(bytes: &[u8]) -> Result<((usize, usize), &[u8]), Error> { // The length is encoded in the first two bits of the first byte. - let (len_len_byte, mut remainder) = ::tls_deserialize(bytes)?; + let (len_len_byte, mut remainder) = u8::tls_deserialize_bytes(bytes)?; let (mut length, len_len) = calculate_length(len_len_byte)?; for _ in 1..len_len { - let (next, next_remainder) = ::tls_deserialize(remainder)?; + let (next, next_remainder) = u8::tls_deserialize_bytes(remainder)?; remainder = next_remainder; length = (length << 8) + usize::from(next); } @@ -151,7 +151,7 @@ impl Size for &Vec { impl DeserializeBytes for Vec { #[inline(always)] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { let ((length, len_len), mut remainder) = read_variable_length_bytes(bytes)?; if length == 0 { @@ -162,7 +162,7 @@ impl DeserializeBytes for Vec { let mut result = Vec::new(); let mut read = len_len; while (read - len_len) < length { - let (element, next_remainder) = T::tls_deserialize(remainder)?; + let (element, next_remainder) = T::tls_deserialize_bytes(remainder)?; remainder = next_remainder; read += element.tls_serialized_len(); result.push(element); @@ -343,7 +343,7 @@ impl Size for VLBytes { impl DeserializeBytes for VLBytes { #[inline(always)] - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { let ((length, _), remainder) = read_variable_length_bytes(bytes)?; if length == 0 { return Ok((Self::new(vec![]), remainder)); @@ -676,11 +676,11 @@ mod secret_bytes { } impl DeserializeBytes for SecretVLBytes { - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> where Self: Sized, { - let (bytes, remainder) = ::tls_deserialize(bytes)?; + let (bytes, remainder) = VLBytes::tls_deserialize_bytes(bytes)?; Ok((Self(bytes), remainder)) } } @@ -696,7 +696,7 @@ mod secret_bytes { where Self: Sized, { - Ok(Self(::tls_deserialize(bytes)?)) + Ok(Self(VLBytes::tls_deserialize(bytes)?)) } } } diff --git a/tls_codec/src/tls_vec.rs b/tls_codec/src/tls_vec.rs index f24b2a4a6..7e91bd0b6 100644 --- a/tls_codec/src/tls_vec.rs +++ b/tls_codec/src/tls_vec.rs @@ -42,7 +42,7 @@ macro_rules! impl_byte_deserialize { #[cfg(feature = "std")] #[inline(always)] fn deserialize_bytes(bytes: &mut R) -> Result { - let len = <$size as Deserialize>::tls_deserialize(bytes)? as usize; + let len = <$size>::tls_deserialize(bytes)? as usize; // When fuzzing we limit the maximum size to allocate. // XXX: We should think about a configurable limit for the allocation // here. @@ -62,7 +62,7 @@ macro_rules! impl_byte_deserialize { #[inline(always)] fn deserialize_bytes_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { - let (type_len, remainder) = <$size as DeserializeBytes>::tls_deserialize(bytes)?; + let (type_len, remainder) = <$size>::tls_deserialize_bytes(bytes)?; let len = type_len as usize; // When fuzzing we limit the maximum size to allocate. // XXX: We should think about a configurable limit for the allocation @@ -89,7 +89,7 @@ macro_rules! impl_deserialize { #[inline(always)] fn deserialize(bytes: &mut R) -> Result { let mut result = Self { vec: Vec::new() }; - let len = <$size as Deserialize>::tls_deserialize(bytes)?; + let len = <$size>::tls_deserialize(bytes)?; let mut read = len.tls_serialized_len(); let len_len = read; while (read - len_len) < len as usize { @@ -107,12 +107,11 @@ macro_rules! impl_deserialize_bytes { #[inline(always)] fn deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { let mut result = Self { vec: Vec::new() }; - let (len, mut remainder) = <$size as DeserializeBytes>::tls_deserialize(bytes)?; + let (len, mut remainder) = <$size>::tls_deserialize_bytes(bytes)?; let mut read = len.tls_serialized_len(); let len_len = read; while (read - len_len) < len as usize { - let (element, next_remainder) = - ::tls_deserialize(remainder)?; + let (element, next_remainder) = T::tls_deserialize_bytes(remainder)?; remainder = next_remainder; read += element.tls_serialized_len(); result.push(element); @@ -261,7 +260,7 @@ macro_rules! impl_tls_vec_codec_generic { } impl DeserializeBytes for $name { - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { Self::deserialize_bytes(bytes) } } @@ -306,7 +305,7 @@ macro_rules! impl_tls_vec_codec_bytes { } impl DeserializeBytes for $name { - fn tls_deserialize(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { + fn tls_deserialize_bytes(bytes: &[u8]) -> Result<(Self, &[u8]), Error> { Self::deserialize_bytes_bytes(bytes) } } diff --git a/tls_codec/tests/decode.rs b/tls_codec/tests/decode.rs index 2f4c6f0ad..38cfca02b 100644 --- a/tls_codec/tests/decode.rs +++ b/tls_codec/tests/decode.rs @@ -29,8 +29,8 @@ fn deserialize_option_bytes() { use tls_codec::DeserializeBytes; for b in [Some(0u8), None] { let b_encoded = b.tls_serialize_detached().expect("Unable to tls_serialize"); - let (b_decoded, remainder) = - Option::::tls_deserialize(b_encoded.as_slice()).expect("Unable to tls_deserialize"); + let (b_decoded, remainder) = Option::::tls_deserialize_bytes(b_encoded.as_slice()) + .expect("Unable to tls_deserialize"); assert!(remainder.is_empty()); @@ -43,19 +43,19 @@ fn deserialize_bytes_primitives() { use tls_codec::DeserializeBytes; let b = &[77u8, 88, 1, 99] as &[u8]; - let (a, remainder) = u8::tls_deserialize(b).expect("Unable to tls_deserialize"); + let (a, remainder) = u8::tls_deserialize_bytes(b).expect("Unable to tls_deserialize"); assert_eq!(1, a.tls_serialized_len()); assert_eq!(77, a); - let (a, remainder) = u8::tls_deserialize(remainder).expect("Unable to tls_deserialize"); + let (a, remainder) = u8::tls_deserialize_bytes(remainder).expect("Unable to tls_deserialize"); assert_eq!(1, a.tls_serialized_len()); assert_eq!(88, a); - let (a, remainder) = u16::tls_deserialize(remainder).expect("Unable to tls_deserialize"); + let (a, remainder) = u16::tls_deserialize_bytes(remainder).expect("Unable to tls_deserialize"); assert_eq!(2, a.tls_serialized_len()); assert_eq!(355, a); // It's empty now. assert!(remainder.is_empty()); - assert!(u8::tls_deserialize(remainder).is_err()) + assert!(u8::tls_deserialize_bytes(remainder).is_err()) } #[test] @@ -91,22 +91,22 @@ fn deserialize_bytes_tls_vec() { use tls_codec::DeserializeBytes; let b = &[1u8, 4, 77, 88, 1, 99] as &[u8]; - let (a, remainder) = u8::tls_deserialize(b).expect("Unable to tls_deserialize"); + let (a, remainder) = u8::tls_deserialize_bytes(b).expect("Unable to tls_deserialize"); assert_eq!(1, a); assert_eq!(1, a.tls_serialized_len()); println!("b: {b:?}"); let (v, remainder) = - TlsVecU8::::tls_deserialize(remainder).expect("Unable to tls_deserialize"); + TlsVecU8::::tls_deserialize_bytes(remainder).expect("Unable to tls_deserialize"); assert_eq!(5, v.tls_serialized_len()); assert_eq!(&[77, 88, 1, 99], v.as_slice()); // It's empty now. - assert!(u8::tls_deserialize(remainder).is_err()); + assert!(u8::tls_deserialize_bytes(remainder).is_err()); let long_vector = vec![77u8; 65535]; let serialized_long_vec = TlsSliceU16(&long_vector).tls_serialize_detached().unwrap(); let (deserialized_long_vec, _remainder) = - TlsVecU16::::tls_deserialize(serialized_long_vec.as_slice()).unwrap(); + TlsVecU16::::tls_deserialize_bytes(serialized_long_vec.as_slice()).unwrap(); assert_eq!( deserialized_long_vec.tls_serialized_len(), long_vector.len() + 2 @@ -171,10 +171,8 @@ fn deserialize_tuples() { { use tls_codec::DeserializeBytes; let (deserialized_bytes, _remainder) = - <(TlsVecU16, TlsVecU32) as DeserializeBytes>::tls_deserialize( - bytes.as_slice(), - ) - .expect("Error deserializing tuple."); + <(TlsVecU16, TlsVecU32)>::tls_deserialize_bytes(bytes.as_slice()) + .expect("Error deserializing tuple."); assert_eq!(deserialized_bytes, t); } @@ -199,7 +197,7 @@ fn deserialize_var_len_vec() { use tls_codec::DeserializeBytes; let serialized = v.tls_serialize_detached().expect("Error encoding vector"); let (deserialized, _remainder): (Vec, &[u8]) = - as DeserializeBytes>::tls_deserialize(serialized.as_slice()) + Vec::::tls_deserialize_bytes(serialized.as_slice()) .expect("Error deserializing vector"); assert_eq!(deserialized, v); } @@ -258,7 +256,7 @@ fn deserialize_bytes_tls_vl_bytes() { use tls_codec::DeserializeBytes; let b = &[4u8, 77, 88, 1, 99]; - let (v, remainder) = VLBytes::tls_deserialize(b).expect("Unable to tls_deserialize"); + let (v, remainder) = VLBytes::tls_deserialize_bytes(b).expect("Unable to tls_deserialize"); assert_eq!(5, v.tls_serialized_len()); assert_eq!(&[77, 88, 1, 99], v.as_slice()); @@ -269,7 +267,7 @@ fn deserialize_bytes_tls_vl_bytes() { let serialized_long_vec = VLByteSlice(&long_vector).tls_serialize_detached().unwrap(); std::println!("bytes: {:x?}", &serialized_long_vec[0..5]); let (deserialized_long_vec, remainder) = - VLBytes::tls_deserialize(serialized_long_vec.as_slice()).unwrap(); + VLBytes::tls_deserialize_bytes(serialized_long_vec.as_slice()).unwrap(); assert_eq!( deserialized_long_vec.tls_serialized_len(), long_vector.len() + 4 @@ -296,7 +294,7 @@ fn deserialize_tls_vl_invalid_length() { fn deserialize_bytes_tls_vl_invalid_length() { use tls_codec::DeserializeBytes; let b = &[0x40u8, 3, 10, 20, 30] as &[u8]; - let result = VLBytes::tls_deserialize(b); + let result = VLBytes::tls_deserialize_bytes(b); if cfg!(feature = "mls") { assert_eq!(result, Err(Error::InvalidVectorLength)); } else { @@ -319,8 +317,8 @@ fn deserialize_empty_vl_bytes() { fn deserialize_bytes_empty_vl_bytes() { use tls_codec::DeserializeBytes; let b: &[u8] = &[0x00]; - VLBytes::tls_deserialize(b).expect("Error parsing empty bytes"); + VLBytes::tls_deserialize_bytes(b).expect("Error parsing empty bytes"); let b: &[u8] = &[]; - VLBytes::tls_deserialize(b).expect_err("Empty bytes were parsed successfully"); + VLBytes::tls_deserialize_bytes(b).expect_err("Empty bytes were parsed successfully"); } diff --git a/tls_codec/tests/decode_bytes.rs b/tls_codec/tests/decode_bytes.rs index 06e341517..ff1484cca 100644 --- a/tls_codec/tests/decode_bytes.rs +++ b/tls_codec/tests/decode_bytes.rs @@ -3,7 +3,7 @@ use tls_codec::{DeserializeBytes, TlsByteVecU16, TlsByteVecU32, TlsByteVecU8}; #[test] fn deserialize_tls_byte_vec_u8() { let bytes = [3, 2, 1, 0]; - let (result, rest) = TlsByteVecU8::tls_deserialize(&bytes).unwrap(); + let (result, rest) = TlsByteVecU8::tls_deserialize_bytes(&bytes).unwrap(); let expected_result = [2, 1, 0]; assert_eq!(result.as_slice(), expected_result); assert_eq!(rest, []); @@ -12,7 +12,7 @@ fn deserialize_tls_byte_vec_u8() { #[test] fn deserialize_tls_byte_vec_u16() { let bytes = [0, 3, 2, 1, 0]; - let (result, rest) = TlsByteVecU16::tls_deserialize(&bytes).unwrap(); + let (result, rest) = TlsByteVecU16::tls_deserialize_bytes(&bytes).unwrap(); let expected_result = [2, 1, 0]; assert_eq!(result.as_slice(), expected_result); assert_eq!(rest, []); @@ -21,7 +21,7 @@ fn deserialize_tls_byte_vec_u16() { #[test] fn deserialize_tls_byte_vec_u32() { let bytes = [0, 0, 0, 3, 2, 1, 0]; - let (result, rest) = TlsByteVecU32::tls_deserialize(&bytes).unwrap(); + let (result, rest) = TlsByteVecU32::tls_deserialize_bytes(&bytes).unwrap(); let expected_result = [2, 1, 0]; assert_eq!(result.as_slice(), expected_result); assert_eq!(rest, []); From 1caeb97bcab0a2bdf34dbc92718d1cb3b25a4503 Mon Sep 17 00:00:00 2001 From: Konrad Kohbrok Date: Wed, 15 Nov 2023 10:07:40 +0100 Subject: [PATCH 2/5] tls_codec: feature for conditional deserialization derivation (#1214) --- .github/workflows/tls_codec.yml | 1 + tls_codec/Cargo.toml | 14 +- tls_codec/derive/Cargo.toml | 3 +- tls_codec/derive/src/lib.rs | 256 ++++++++++++++++++++++++++----- tls_codec/derive/tests/decode.rs | 46 ++++++ 5 files changed, 279 insertions(+), 41 deletions(-) diff --git a/.github/workflows/tls_codec.yml b/.github/workflows/tls_codec.yml index d4142bcef..12c259b3e 100644 --- a/.github/workflows/tls_codec.yml +++ b/.github/workflows/tls_codec.yml @@ -71,3 +71,4 @@ jobs: - uses: RustCrypto/actions/cargo-hack-install@master - run: cargo hack test --feature-powerset - run: cargo hack test -p tls_codec_derive --feature-powerset --test encode\* --test decode\* + - run: cargo hack test -p tls_codec_derive --feature-powerset --doc diff --git a/tls_codec/Cargo.toml b/tls_codec/Cargo.toml index a94c289ab..863309d99 100644 --- a/tls_codec/Cargo.toml +++ b/tls_codec/Cargo.toml @@ -27,12 +27,16 @@ criterion = { version = "0.5", default-features = false } regex = "1.8" [features] -default = [ "std" ] -arbitrary = [ "std", "dep:arbitrary" ] -derive = [ "tls_codec_derive" ] -serde = [ "std", "dep:serde" ] +default = ["std"] +arbitrary = ["std", "dep:arbitrary"] +derive = ["tls_codec_derive"] +serde = ["std", "dep:serde"] mls = [] # In MLS variable length vectors are limited compared to QUIC. -std = [ "tls_codec_derive?/std" ] +std = ["tls_codec_derive?/std"] +conditional_deserialization = [ + "derive", + "tls_codec_derive/conditional_deserialization", +] [[bench]] name = "tls_vec" diff --git a/tls_codec/derive/Cargo.toml b/tls_codec/derive/Cargo.toml index 6e3c10396..834096147 100644 --- a/tls_codec/derive/Cargo.toml +++ b/tls_codec/derive/Cargo.toml @@ -23,5 +23,6 @@ tls_codec = { path = "../" } trybuild = "1" [features] -default = [ "std" ] +default = ["std"] +conditional_deserialization = ["syn/full"] std = [] diff --git a/tls_codec/derive/src/lib.rs b/tls_codec/derive/src/lib.rs index 07abfad98..5966a44f2 100644 --- a/tls_codec/derive/src/lib.rs +++ b/tls_codec/derive/src/lib.rs @@ -1,21 +1,35 @@ //! # Derive macros for traits in `tls_codec` //! +//! Derive macros can be used to automatically implement the +//! [`Serialize`](../tls_codec::Serialize), +//! [`SerializeBytes`](../tls_codec::SerializeBytes), +//! [`Deserialize`](../tls_codec::Deserialize), +//! [`DeserializeBytes`](../tls_codec::DeserializeBytes), and +//! [`Size`](../tls_codec::Size) traits for structs and enums. Note that the +//! functions of the [`Serialize`](../tls_codec::Serialize) and +//! [`Deserialize`](../tls_codec::Deserialize) traits (and thus the +//! corresponding derive macros) require the `"std"` feature to work. +//! //! ## Warning //! -//! The derive macros support deriving the `tls_codec` traits for enumerations and the resulting -//! serialized format complies with [the "variants" section of the TLS RFC](https://datatracker.ietf.org/doc/html/rfc8446#section-3.8). -//! However support is limited to enumerations that are serialized with their discriminant -//! immediately followed by the variant data. If this is not appropriate (e.g. the format requires -//! other fields between the discriminant and variant data), the `tls_codec` traits can be -//! implemented manually. +//! The derive macros support deriving the `tls_codec` traits for enumerations +//! and the resulting serialized format complies with [the "variants" section of +//! the TLS RFC](https://datatracker.ietf.org/doc/html/rfc8446#section-3.8). +//! However support is limited to enumerations that are serialized with their +//! discriminant immediately followed by the variant data. If this is not +//! appropriate (e.g. the format requires other fields between the discriminant +//! and variant data), the `tls_codec` traits can be implemented manually. //! //! ## Parsing unknown values -//! In many cases it is necessary to deserialize structs with unknown values, e.g. -//! when receiving unknown TLS extensions. -//! In this case the deserialize function returns an `Error::UnknownValue` with -//! a `u64` value of the unknown type. +//! +//! In many cases it is necessary to deserialize structs with unknown values, +//! e.g. when receiving unknown TLS extensions. In this case the deserialize +//! function returns an `Error::UnknownValue` with a `u64` value of the unknown +//! type. //! //! ``` +//! # #[cfg(feature = "std")] +//! # { //! use tls_codec_derive::{TlsDeserialize, TlsSerialize, TlsSize}; //! //! #[derive(TlsDeserialize, TlsSerialize, TlsSize)] @@ -31,27 +45,35 @@ //! let deserialized = TypeWithUnknowns::tls_deserialize_exact(incoming); //! assert!(matches!(deserialized, Err(Error::UnknownValue(3)))); //! } +//! # } //! ``` //! //! ## Available attributes //! +//! Attributes can be used to control serialization and deserialization on a +//! per-field basis. +//! //! ### with //! //! ```text //! #[tls_codec(with = "prefix")] //! ``` //! -//! This attribute may be applied to a struct field. It indicates that deriving any of the -//! `tls_codec` traits for the containing struct calls the following functions: +//! This attribute may be applied to a struct field. It indicates that deriving +//! any of the `tls_codec` traits for the containing struct calls the following +//! functions: //! - `prefix::tls_deserialize` when deriving `Deserialize` //! - `prefix::tls_serialize` when deriving `Serialize` //! - `prefix::tls_serialized_len` when deriving `Size` //! -//! `prefix` can be a path to a module, type or trait where the functions are defined. +//! `prefix` can be a path to a module, type or trait where the functions are +//! defined. //! //! Their expected signatures match the corresponding methods in the traits. //! //! ``` +//! # #[cfg(feature = "std")] +//! # { //! use tls_codec_derive::{TlsSerialize, TlsSize}; //! //! #[derive(TlsSerialize, TlsSize)] @@ -72,6 +94,7 @@ //! TlsByteSliceU32(v).tls_serialize(writer) //! } //! } +//! # } //! ``` //! //! ### discriminant @@ -81,27 +104,34 @@ //! #[tls_codec(discriminant = "path::to::const::or::enum::Variant")] //! ``` //! -//! This attribute may be applied to an enum variant to specify the discriminant to use when -//! serializing it. If all variants are units (e.g. they do not have any data), this attribute -//! must not be used and the desired discriminants should be assigned to the variants using -//! standard Rust syntax (`Variant = Discriminant`). +//! This attribute may be applied to an enum variant to specify the discriminant +//! to use when serializing it. If all variants are units (e.g. they do not have +//! any data), this attribute must not be used and the desired discriminants +//! should be assigned to the variants using standard Rust syntax (`Variant = +//! Discriminant`). //! -//! For enumerations with non-unit variants, if no variant has this attribute, the serialization -//! discriminants will start from zero. If this attribute is used on a variant and the following -//! variant does not have it, its discriminant will be equal to the previous variant discriminant -//! plus 1. This behavior is referred to as "implicit discriminants". +//! For enumerations with non-unit variants, if no variant has this attribute, +//! the serialization discriminants will start from zero. If this attribute is +//! used on a variant and the following variant does not have it, its +//! discriminant will be equal to the previous variant discriminant plus 1. This +//! behavior is referred to as "implicit discriminants". //! -//! You can also provide paths that lead to `const` definitions or enum Variants. The important -//! thing is that any of those path expressions must resolve to something that can be coerced to -//! the `#[repr(enum_repr)]` of the enum. Please note that there are checks performed at compile -//! time to check if the provided value fits within the bounds of the `enum_repr` to avoid misuse. +//! You can also provide paths that lead to `const` definitions or enum +//! Variants. The important thing is that any of those path expressions must +//! resolve to something that can be coerced to the `#[repr(enum_repr)]` of the +//! enum. Please note that there are checks performed at compile time to check +//! if the provided value fits within the bounds of the `enum_repr` to avoid +//! misuse. //! -//! Note: When using paths *once* in your enum discriminants, as we do not have enough information -//! to deduce the next implicit discriminant (the constant expressions those paths resolve is only -//! evaluated at a later compilation stage than macros), you will be forced to use explicit -//! discriminants for all the other Variants of your enum. +//! Note: When using paths *once* in your enum discriminants, as we do not have +//! enough information to deduce the next implicit discriminant (the constant +//! expressions those paths resolve is only evaluated at a later compilation +//! stage than macros), you will be forced to use explicit discriminants for all +//! the other Variants of your enum. //! //! ``` +//! # #[cfg(feature = "std")] +//! # { //! use tls_codec_derive::{TlsSerialize, TlsSize}; //! //! const CONST_DISCRIMINANT: u8 = 5; @@ -130,7 +160,7 @@ //! #[tls_codec(discriminant = "CONST_DISCRIMINANT")] //! StaticConstant(u8), //! } -//! +//! # } //! ``` //! //! ### skip @@ -139,13 +169,16 @@ //! #[tls_codec(skip)] //! ``` //! -//! This attribute may be applied to a struct field to specify that it should be skipped. Skipping -//! means that the field at hand will neither be serialized into TLS bytes nor deserialized from TLS -//! bytes. For deserialization, it is required to populate the field with a known value. Thus, when -//! `skip` is used, the field type needs to implement the [Default] trait so it can be populated -//! with a default value. +//! This attribute may be applied to a struct field to specify that it should be +//! skipped. Skipping means that the field at hand will neither be serialized +//! into TLS bytes nor deserialized from TLS bytes. For deserialization, it is +//! required to populate the field with a known value. Thus, when `skip` is +//! used, the field type needs to implement the [Default] trait so it can be +//! populated with a default value. //! //! ``` +//! # #[cfg(feature = "std")] +//! # { //! use tls_codec_derive::{TlsSerialize, TlsDeserialize, TlsSize}; //! //! struct CustomStruct; @@ -163,6 +196,43 @@ //! b: CustomStruct, //! c: u8, //! } +//! # } +//! ``` +//! +//! ## Conditional deserialization via the `conditionally_deserializable` attribute macro +//! +//! In some cases, it can be useful to have two variants of a struct, where one +//! is deserializable and one isn't. For example, the deserializable variant of +//! the struct could represent an unverified message, where only verification +//! produces the verified variant. Further processing could then be restricted +//! to the undeserializable struct variant. +//! +//! A pattern like this can be created via the `conditionally_deserializable` +//! attribute macro (requires the `conditional_deserialization` feature flag). +//! +//! The macro adds a boolean const generic to the struct and creates two +//! aliases, one for the deserializable variant (with a "`Deserializable`" +//! prefix) and one for the undeserializable one (with an "`Undeserializable`" +//! prefix). +//! +//! ``` +//! # #[cfg(all(feature = "conditional_deserialization", feature = "std"))] +//! # { +//! use tls_codec::{Serialize, Deserialize}; +//! use tls_codec_derive::{TlsSerialize, TlsSize, conditionally_deserializable}; +//! +//! #[conditionally_deserializable] +//! #[derive(TlsSize, TlsSerialize, PartialEq, Debug)] +//! struct ExampleStruct { +//! a: u8, +//! b: u16, +//! } +//! +//! let undeserializable_struct = UndeserializableExampleStruct { a: 1, b: 2 }; +//! let serialized = undeserializable_struct.tls_serialize_detached().unwrap(); +//! let deserializable_struct = +//! DeserializableExampleStruct::tls_deserialize(&mut serialized.as_slice()).unwrap(); +//! # } //! ``` extern crate proc_macro; @@ -176,6 +246,9 @@ use syn::{ Expr, ExprLit, ExprPath, Field, Generics, Ident, Lit, Member, Meta, Result, Token, Type, }; +#[cfg(feature = "conditional_deserialization")] +use syn::{parse_quote, ConstParam, ImplGenerics, ItemStruct, TypeGenerics}; + /// Attribute name to identify attributes to be processed by derive-macros in this crate. const ATTR_IDENT: &str = "tls_codec"; @@ -895,6 +968,27 @@ fn impl_serialize(parsed_ast: TlsStruct, svariant: SerializeVariant) -> TokenStr } } +#[cfg(feature = "conditional_deserialization")] +fn restrict_conditional_generic( + impl_generics: ImplGenerics, + ty_generics: TypeGenerics, + deserializable: bool, +) -> (TokenStream2, TokenStream2) { + let impl_generics = quote! { #impl_generics } + .to_string() + .replace(" const IS_DESERIALIZABLE : bool ", "") + .replace("<>", "") + .parse() + .unwrap(); + let deserializable_string = if deserializable { "true" } else { "false" }; + let ty_generics = quote! { #ty_generics } + .to_string() + .replace("IS_DESERIALIZABLE", deserializable_string) + .parse() + .unwrap(); + (impl_generics, ty_generics) +} + #[allow(unused_variables)] fn impl_deserialize(parsed_ast: TlsStruct) -> TokenStream2 { match parsed_ast { @@ -914,6 +1008,9 @@ fn impl_deserialize(parsed_ast: TlsStruct) -> TokenStream2 { .map(|p| p.for_trait("Deserialize")) .collect::>(); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + #[cfg(feature = "conditional_deserialization")] + let (impl_generics, ty_generics) = + restrict_conditional_generic(impl_generics, ty_generics, true); quote! { impl #impl_generics tls_codec::Deserialize for #ident #ty_generics #where_clause { #[cfg(feature = "std")] @@ -1003,6 +1100,9 @@ fn impl_deserialize_bytes(parsed_ast: TlsStruct) -> TokenStream2 { .map(|p| p.for_trait("DeserializeBytes")) .collect::>(); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + #[cfg(feature = "conditional_deserialization")] + let (impl_generics, ty_generics) = + restrict_conditional_generic(impl_generics, ty_generics, true); quote! { impl #impl_generics tls_codec::DeserializeBytes for #ident #ty_generics #where_clause { fn tls_deserialize_bytes(bytes: &[u8]) -> core::result::Result<(Self, &[u8]), tls_codec::Error> { @@ -1102,3 +1202,89 @@ fn partition_skipped( (members_skip, member_prefixes_skip), ) } + +/// The `conditionally_deserializable` attribute macro takes as input either +/// `Bytes` or `Reader` and does the following: +/// * Add a boolean const generic to the struct indicating if the variant of the +/// struct is deserializable or not. +/// * Depending on the input derive either the `TlsDeserialize` or +/// `TlsDeserializeBytes` trait for the deserializable variant +/// * Create type aliases for the deserializable and undeserializable variant of +/// the struct, where the alias is the name of the struct prefixed with +/// `Deserializable` or `Undeserializable` respectively. +/// +/// The `conditionally_deserializable` attribute macro is only available if the +/// `conditional_deserialization` feature is enabled. +/// +#[cfg_attr( + feature = "conditional_deserialization", + doc = r##" +```compile_fail +use tls_codec_derive::{TlsSerialize, TlsDeserialize, TlsSize, conditionally_deserializable}; + +#[conditionally_deserializable(Bytes)] +#[derive(TlsDeserialize, TlsSerialize, TlsSize)] +struct ExampleStruct { + pub a: u16, +} + +impl UndeserializableExampleStruct { + #[cfg(feature = "conditional_deserialization")] + fn deserialize(bytes: &[u8]) -> Result { + Self::tls_deserialize_exact(bytes) + } +} +``` +"## +)] +#[cfg(feature = "conditional_deserialization")] +#[proc_macro_attribute] +pub fn conditionally_deserializable( + _input: TokenStream, + annotated_item: TokenStream, +) -> TokenStream { + let annotated_item = parse_macro_input!(annotated_item as ItemStruct); + impl_conditionally_deserializable(annotated_item).into() +} + +#[cfg(feature = "conditional_deserialization")] +fn impl_conditionally_deserializable(mut annotated_item: ItemStruct) -> TokenStream2 { + let deserializable_const_generic: ConstParam = parse_quote! {const IS_DESERIALIZABLE: bool}; + // Add the DESERIALIZABLE const generic to the struct + annotated_item + .generics + .params + .push(deserializable_const_generic.into()); + // Derive both TlsDeserialize and TlsDeserializeBytes + let deserialize_bytes_implementation = + impl_deserialize_bytes(parse_ast(annotated_item.clone().into()).unwrap()); + let deserialize_implementation = + impl_deserialize(parse_ast(annotated_item.clone().into()).unwrap()); + let (impl_generics, ty_generics, _) = annotated_item.generics.split_for_impl(); + // Patch generics for use by the type aliases + let (_deserializable_impl_generics, deserializable_ty_generics) = + restrict_conditional_generic(impl_generics.clone(), ty_generics.clone(), true); + let (_undeserializable_impl_generics, undeserializable_ty_generics) = + restrict_conditional_generic(impl_generics.clone(), ty_generics.clone(), false); + let annotated_item_ident = annotated_item.ident.clone(); + // Create Alias Idents by adding prefixes + let deserializable_ident = Ident::new( + &format!("Deserializable{}", annotated_item_ident), + Span::call_site(), + ); + let undeserializable_ident = Ident::new( + &format!("Undeserializable{}", annotated_item_ident), + Span::call_site(), + ); + let annotated_item_visibility = annotated_item.vis.clone(); + quote! { + #annotated_item + + #annotated_item_visibility type #undeserializable_ident = #annotated_item_ident #undeserializable_ty_generics; + #annotated_item_visibility type #deserializable_ident = #annotated_item_ident #deserializable_ty_generics; + + #deserialize_implementation + + #deserialize_bytes_implementation + } +} diff --git a/tls_codec/derive/tests/decode.rs b/tls_codec/derive/tests/decode.rs index 93d5fa430..d0907b25c 100644 --- a/tls_codec/derive/tests/decode.rs +++ b/tls_codec/derive/tests/decode.rs @@ -527,3 +527,49 @@ fn type_with_unknowns() { let deserialized = TypeWithUnknowns::tls_deserialize_exact(incoming); assert!(matches!(deserialized, Err(Error::UnknownValue(3)))); } + +#[cfg(feature = "conditional_deserialization")] +mod conditional_deserialization { + use tls_codec::{Deserialize, Serialize}; + use tls_codec_derive::{conditionally_deserializable, TlsSerialize, TlsSize}; + + #[test] + fn conditionally_deserializable_struct() { + #[conditionally_deserializable] + #[derive(TlsSize, TlsSerialize, PartialEq, Debug)] + struct ExampleStruct { + a: u8, + b: u16, + } + + let undeserializable_struct = UndeserializableExampleStruct { a: 1, b: 2 }; + let serialized = undeserializable_struct.tls_serialize_detached().unwrap(); + let deserializable_struct = + DeserializableExampleStruct::tls_deserialize(&mut serialized.as_slice()).unwrap(); + assert_eq!(deserializable_struct.a, undeserializable_struct.a); + assert_eq!(deserializable_struct.b, undeserializable_struct.b); + + #[conditionally_deserializable] + #[derive(TlsSize, TlsSerialize, PartialEq, Debug)] + struct SecondExampleStruct { + a: u8, + b: u16, + } + } + + #[test] + fn conditional_deserializable_struct_bytes() { + #[conditionally_deserializable] + #[derive(TlsSize, TlsSerialize, PartialEq, Debug)] + struct ExampleStruct { + a: u8, + b: u16, + } + let undeserializable_struct = UndeserializableExampleStruct { a: 1, b: 2 }; + let serialized = undeserializable_struct.tls_serialize_detached().unwrap(); + let deserializable_struct = + DeserializableExampleStruct::tls_deserialize_exact(&mut &*serialized).unwrap(); + assert_eq!(deserializable_struct.a, undeserializable_struct.a); + assert_eq!(deserializable_struct.b, undeserializable_struct.b); + } +} From 03ace3937bdacbbb1a27202bd11662c46dd305e1 Mon Sep 17 00:00:00 2001 From: Artyom Pavlov Date: Wed, 15 Nov 2023 17:47:03 +0300 Subject: [PATCH 3/5] Fix minimal-versions CI failure (#1255) --- .github/workflows/cms.yml | 3 +++ .github/workflows/pkcs12.yml | 3 +++ .github/workflows/pkcs7.yml | 3 +++ .github/workflows/x509-ocsp.yml | 3 +++ .github/workflows/x509-tsp.yml | 3 +++ 5 files changed, 15 insertions(+) diff --git a/.github/workflows/cms.yml b/.github/workflows/cms.yml index b1942ee38..1ba8a6498 100644 --- a/.github/workflows/cms.yml +++ b/.github/workflows/cms.yml @@ -45,6 +45,9 @@ jobs: uses: RustCrypto/actions/.github/workflows/minimal-versions.yml@master with: working-directory: ${{ github.workflow }} + # TODO: re-enable benches build when min version of proc-macro2 + # will be updated to 1.0.60+ + nightly_cmd: test: runs-on: ubuntu-latest diff --git a/.github/workflows/pkcs12.yml b/.github/workflows/pkcs12.yml index 999b0140d..0fb8427df 100644 --- a/.github/workflows/pkcs12.yml +++ b/.github/workflows/pkcs12.yml @@ -46,6 +46,9 @@ jobs: uses: RustCrypto/actions/.github/workflows/minimal-versions.yml@master with: working-directory: ${{ github.workflow }} + # TODO: re-enable benches build when min version of proc-macro2 + # will be updated to 1.0.60+ + nightly_cmd: test: runs-on: ubuntu-latest diff --git a/.github/workflows/pkcs7.yml b/.github/workflows/pkcs7.yml index b51604064..408ef48b0 100644 --- a/.github/workflows/pkcs7.yml +++ b/.github/workflows/pkcs7.yml @@ -44,6 +44,9 @@ jobs: uses: RustCrypto/actions/.github/workflows/minimal-versions.yml@master with: working-directory: ${{ github.workflow }} + # TODO: re-enable benches build when min version of proc-macro2 + # will be updated to 1.0.60+ + nightly_cmd: test: runs-on: ubuntu-latest diff --git a/.github/workflows/x509-ocsp.yml b/.github/workflows/x509-ocsp.yml index b2d83507a..c95aff5a9 100644 --- a/.github/workflows/x509-ocsp.yml +++ b/.github/workflows/x509-ocsp.yml @@ -44,6 +44,9 @@ jobs: uses: RustCrypto/actions/.github/workflows/minimal-versions.yml@master with: working-directory: ${{ github.workflow }} + # TODO: re-enable benches build when min version of proc-macro2 + # will be updated to 1.0.60+ + nightly_cmd: test: runs-on: ubuntu-latest diff --git a/.github/workflows/x509-tsp.yml b/.github/workflows/x509-tsp.yml index f4380ae7a..d15e9a088 100644 --- a/.github/workflows/x509-tsp.yml +++ b/.github/workflows/x509-tsp.yml @@ -47,6 +47,9 @@ jobs: uses: RustCrypto/actions/.github/workflows/minimal-versions.yml@master with: working-directory: ${{ github.workflow }} + # TODO: re-enable benches build when min version of proc-macro2 + # will be updated to 1.0.60+ + nightly_cmd: test: runs-on: ubuntu-latest From 7ea12d36ea26746af9e80de8795aab692f0b3426 Mon Sep 17 00:00:00 2001 From: Artyom Pavlov Date: Wed, 15 Nov 2023 19:06:23 +0300 Subject: [PATCH 4/5] Apply Clippy to tests and fix some lints (#1257) --- .github/workflows/workspace.yml | 4 ++-- cms/tests/builder.rs | 2 +- cms/tests/compressed_data.rs | 5 +---- cms/tests/digested_data.rs | 5 +---- cms/tests/encrypted_data.rs | 5 +---- cms/tests/enveloped_data.rs | 25 +++++-------------------- cms/tests/signed_data.rs | 6 ++---- cms/tests/tests_from_pkcs7_crate.rs | 9 +++------ pkcs12/tests/cert_tests.rs | 8 ++++---- pkcs5/src/pbes2/kdf.rs | 4 ++-- tls_codec/derive/tests/decode.rs | 2 +- x509-cert/tests/pkix_extensions.rs | 8 ++++---- 12 files changed, 27 insertions(+), 56 deletions(-) diff --git a/.github/workflows/workspace.yml b/.github/workflows/workspace.yml index 78608f233..39f59a4a1 100644 --- a/.github/workflows/workspace.yml +++ b/.github/workflows/workspace.yml @@ -21,9 +21,9 @@ jobs: - uses: actions/checkout@v4 - uses: dtolnay/rust-toolchain@master with: - toolchain: 1.71.0 + toolchain: 1.73.0 components: clippy - - run: cargo clippy --all-features + - run: cargo clippy --all --all-features --tests doc: runs-on: ubuntu-latest diff --git a/cms/tests/builder.rs b/cms/tests/builder.rs index 10e0bf8f8..3273829d0 100644 --- a/cms/tests/builder.rs +++ b/cms/tests/builder.rs @@ -344,7 +344,7 @@ fn test_build_pkcs7_scep_pkcsreq() { values: message_type_value, }; let mut sender_nonce_value: SetOfVec = Default::default(); - let nonce = OctetString::new(*&[42; 32]).unwrap(); + let nonce = OctetString::new([42; 32]).unwrap(); sender_nonce_value .insert(Any::new(Tag::OctetString, nonce.as_bytes()).unwrap()) .unwrap(); diff --git a/cms/tests/compressed_data.rs b/cms/tests/compressed_data.rs index 5d569fea8..28fe016bc 100644 --- a/cms/tests/compressed_data.rs +++ b/cms/tests/compressed_data.rs @@ -40,10 +40,7 @@ fn reencode_compressed_data_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_inci = ci2.to_der().unwrap(); diff --git a/cms/tests/digested_data.rs b/cms/tests/digested_data.rs index e4187d7f0..76338beb7 100644 --- a/cms/tests/digested_data.rs +++ b/cms/tests/digested_data.rs @@ -44,10 +44,7 @@ fn reencode_digested_data_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_inci = ci2.to_der().unwrap(); diff --git a/cms/tests/encrypted_data.rs b/cms/tests/encrypted_data.rs index c710d3426..27d1d56b7 100644 --- a/cms/tests/encrypted_data.rs +++ b/cms/tests/encrypted_data.rs @@ -46,10 +46,7 @@ fn reencode_encrypted_data_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_inci = ci2.to_der().unwrap(); diff --git a/cms/tests/enveloped_data.rs b/cms/tests/enveloped_data.rs index 8d2886213..9cd7019e8 100644 --- a/cms/tests/enveloped_data.rs +++ b/cms/tests/enveloped_data.rs @@ -79,10 +79,7 @@ fn reencode_enveloped_data_ktri_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_in_ci = ci2.to_der().unwrap(); @@ -180,10 +177,7 @@ fn reencode_enveloped_data_kari_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_in_ci = ci2.to_der().unwrap(); @@ -270,10 +264,7 @@ fn reencode_enveloped_data_pwri_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_in_ci = ci2.to_der().unwrap(); @@ -336,10 +327,7 @@ fn reencode_enveloped_data_kek_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_in_ci = ci2.to_der().unwrap(); @@ -498,10 +486,7 @@ fn reencode_enveloped_data_multi_test() { // assemble a new ContentInfo and encode it let ci2 = ContentInfo { content_type: ci.content_type, - content: AnyRef::try_from(reencoded_data.as_slice()) - .unwrap() - .try_into() - .unwrap(), + content: AnyRef::try_from(reencoded_data.as_slice()).unwrap().into(), }; let reencoded_data_in_ci = ci2.to_der().unwrap(); diff --git a/cms/tests/signed_data.rs b/cms/tests/signed_data.rs index efe0dae4a..35c97e3ec 100644 --- a/cms/tests/signed_data.rs +++ b/cms/tests/signed_data.rs @@ -32,8 +32,7 @@ fn trust_list_test() { content_type: ci.content_type, content: AnyRef::try_from(reencoded_signed_data.as_slice()) .unwrap() - .try_into() - .unwrap(), + .into(), }; let reencoded_der_signed_data_in_ci = ci2.to_der().unwrap(); @@ -62,8 +61,7 @@ fn reencode_signed_data_test() { content_type: ci.content_type, content: AnyRef::try_from(reencoded_signed_data.as_slice()) .unwrap() - .try_into() - .unwrap(), + .into(), }; let reencoded_der_signed_data_in_ci = ci2.to_der().unwrap(); diff --git a/cms/tests/tests_from_pkcs7_crate.rs b/cms/tests/tests_from_pkcs7_crate.rs index 8c75adf05..af5c34dc3 100644 --- a/cms/tests/tests_from_pkcs7_crate.rs +++ b/cms/tests/tests_from_pkcs7_crate.rs @@ -71,8 +71,7 @@ fn cms_decode_signed_mdm_example() { content_type: ci.content_type, content: AnyRef::try_from(reencoded_signed_data.as_slice()) .unwrap() - .try_into() - .unwrap(), + .into(), }; let reencoded_der_signed_data_in_ci = ci2.to_der().unwrap(); @@ -99,8 +98,7 @@ fn cms_decode_signed_scep_example() { content_type: ci.content_type, content: AnyRef::try_from(reencoded_signed_data.as_slice()) .unwrap() - .try_into() - .unwrap(), + .into(), }; let reencoded_der_signed_data_in_ci = ci2.to_der().unwrap(); @@ -137,8 +135,7 @@ fn cms_decode_signed_der() { content_type: ci.content_type, content: AnyRef::try_from(reencoded_signed_data.as_slice()) .unwrap() - .try_into() - .unwrap(), + .into(), }; let reencoded_der_signed_data_in_ci = ci2.to_der().unwrap(); diff --git a/pkcs12/tests/cert_tests.rs b/pkcs12/tests/cert_tests.rs index c5d744714..0f3b0b4bd 100644 --- a/pkcs12/tests/cert_tests.rs +++ b/pkcs12/tests/cert_tests.rs @@ -166,7 +166,7 @@ fn decode_sample_pfx() { let auth_safes = AuthenticatedSafe::from_der(auth_safes_os.as_bytes()).unwrap(); // Process first auth safe (from offset 34) - let auth_safe0 = auth_safes.get(0).unwrap(); + let auth_safe0 = auth_safes.first().unwrap(); assert_eq!(ID_ENCRYPTED_DATA, auth_safe0.content_type); let enc_data_os = &auth_safe0.content.to_der().unwrap(); let enc_data = EncryptedData::from_der(enc_data_os.as_slice()).unwrap(); @@ -183,11 +183,11 @@ fn decode_sample_pfx() { let params = pkcs8::pkcs5::pbes2::Parameters::from_der(&enc_params).unwrap(); - let scheme = pkcs5::EncryptionScheme::try_from(params.clone()).unwrap(); + let scheme = pkcs5::EncryptionScheme::from(params.clone()); let ciphertext_os = enc_data.enc_content_info.encrypted_content.clone().unwrap(); let mut ciphertext = ciphertext_os.as_bytes().to_vec(); let plaintext = scheme.decrypt_in_place("", &mut ciphertext).unwrap(); - let cert_bags = SafeContents::from_der(&plaintext).unwrap(); + let cert_bags = SafeContents::from_der(plaintext).unwrap(); for cert_bag in cert_bags { match cert_bag.bag_id { pkcs12::PKCS_12_CERT_BAG_OID => { @@ -589,7 +589,7 @@ fn decode_sample_pfx2() { let auth_safes = AuthenticatedSafe::from_der(auth_safes_os.as_bytes()).unwrap(); // Process first auth safe (from offset 34) - let auth_safe0 = auth_safes.get(0).unwrap(); + let auth_safe0 = auth_safes.first().unwrap(); assert_eq!(ID_DATA, auth_safe0.content_type); let auth_safe0_auth_safes_os = diff --git a/pkcs5/src/pbes2/kdf.rs b/pkcs5/src/pbes2/kdf.rs index 63378cbcb..4c666b658 100644 --- a/pkcs5/src/pbes2/kdf.rs +++ b/pkcs5/src/pbes2/kdf.rs @@ -355,11 +355,11 @@ impl<'a> From for AlgorithmIdentifierRef<'a> { impl Encode for Pbkdf2Prf { fn encoded_len(&self) -> der::Result { - AlgorithmIdentifierRef::try_from(*self)?.encoded_len() + AlgorithmIdentifierRef::from(*self).encoded_len() } fn encode(&self, writer: &mut impl Writer) -> der::Result<()> { - AlgorithmIdentifierRef::try_from(*self)?.encode(writer) + AlgorithmIdentifierRef::from(*self).encode(writer) } } diff --git a/tls_codec/derive/tests/decode.rs b/tls_codec/derive/tests/decode.rs index d0907b25c..feb487341 100644 --- a/tls_codec/derive/tests/decode.rs +++ b/tls_codec/derive/tests/decode.rs @@ -568,7 +568,7 @@ mod conditional_deserialization { let undeserializable_struct = UndeserializableExampleStruct { a: 1, b: 2 }; let serialized = undeserializable_struct.tls_serialize_detached().unwrap(); let deserializable_struct = - DeserializableExampleStruct::tls_deserialize_exact(&mut &*serialized).unwrap(); + DeserializableExampleStruct::tls_deserialize_exact(&*serialized).unwrap(); assert_eq!(deserializable_struct.a, undeserializable_struct.a); assert_eq!(deserializable_struct.b, undeserializable_struct.b); } diff --git a/x509-cert/tests/pkix_extensions.rs b/x509-cert/tests/pkix_extensions.rs index a20e9394d..9d9973a0a 100644 --- a/x509-cert/tests/pkix_extensions.rs +++ b/x509-cert/tests/pkix_extensions.rs @@ -402,7 +402,7 @@ fn decode_cert() { 0 => match dpn { DistributionPointName::FullName(gns) => { assert_eq!(1, gns.len()); - let gn = gns.get(0).unwrap(); + let gn = gns.first().unwrap(); match gn { GeneralName::UniformResourceIdentifier(uri) => { assert_eq!( @@ -422,7 +422,7 @@ fn decode_cert() { 1 => match dpn { DistributionPointName::FullName(gns) => { assert_eq!(1, gns.len()); - let gn = gns.get(0).unwrap(); + let gn = gns.first().unwrap(); match gn { GeneralName::UniformResourceIdentifier(uri) => { assert_eq!("ldap://ldap-pte.identrust.com.test/cn%3DIGC%20Root%20CA1%2Co%3DIdenTrust%2Cc%3DUS%3FcertificateRevocationList%3Bbinary", uri.to_string()); @@ -869,7 +869,7 @@ fn decode_idp() { let gns = GeneralNames::from_der(&hex!("305EA45C305A310B3009060355040613025553311F301D060355040A131654657374204365727469666963617465732032303137311C301A060355040B13136F6E6C79536F6D65526561736F6E7320434133310C300A0603550403130343524C")).unwrap(); assert_eq!(1, gns.len()); - if let GeneralName::DirectoryName(gn) = gns.get(0).unwrap() { + if let GeneralName::DirectoryName(gn) = gns.first().unwrap() { assert_eq!(4, gn.0.len()); } @@ -894,7 +894,7 @@ fn decode_idp() { DistributionPoint::from_der(&hex!("3062A060A05EA45C305A310B3009060355040613025553311F301D060355040A131654657374204365727469666963617465732032303137311C301A060355040B13136F6E6C79536F6D65526561736F6E7320434133310C300A0603550403130343524C")).unwrap(); if let DistributionPointName::FullName(dpn) = dp.distribution_point.unwrap() { assert_eq!(1, dpn.len()); - if let GeneralName::DirectoryName(gn) = dpn.get(0).unwrap() { + if let GeneralName::DirectoryName(gn) = dpn.first().unwrap() { assert_eq!(4, gn.0.len()); } } From c61f06635d0f85a8cf84734a5572a49710f3798b Mon Sep 17 00:00:00 2001 From: Artyom Pavlov Date: Thu, 16 Nov 2023 02:51:21 +0300 Subject: [PATCH 5/5] Update Cargo.lock (#1256) --- Cargo.lock | 63 +++++++++++++++++++++++--------------------- base32ct/Cargo.toml | 3 ++- base64ct/Cargo.toml | 3 ++- tls_codec/Cargo.toml | 3 +++ 4 files changed, 40 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9acfb18b9..2fc71cdc3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -401,9 +401,9 @@ dependencies = [ [[package]] name = "crypto-bigint" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "740fe28e594155f10cfc383984cbefd529d7396050557148f79cb0f621204124" +checksum = "28f85c3514d2a6e64160359b45a3918c3b4178bcbf4ae5d03ab2d02e521c479a" dependencies = [ "generic-array", "rand_core", @@ -536,9 +536,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" +checksum = "7c18ee0ed65a5f1f81cac6b1d213b69c35fa47d4252ad41f1486dbd8226fe36e" dependencies = [ "libc", "windows-sys", @@ -680,9 +680,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", "libc", @@ -821,9 +821,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.149" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" [[package]] name = "libm" @@ -833,9 +833,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "linux-raw-sys" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" +checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" [[package]] name = "memchr" @@ -1034,9 +1034,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "primeorder" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3" +checksum = "c7dbe9ed3b56368bd99483eb32fe9c17fdd3730aebadc906918ce78d54c7eeb4" dependencies = [ "elliptic-curve", ] @@ -1267,9 +1267,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.21" +version = "0.38.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3" +checksum = "9ad981d6c340a49cdc40a1028d9c6084ec7e9fa33fcb839cab656a267071e234" dependencies = [ "bitflags 2.4.1", "errno", @@ -1348,9 +1348,9 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.190" +version = "1.0.192" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7" +checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" dependencies = [ "serde_derive", ] @@ -1367,9 +1367,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.190" +version = "1.0.192" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3" +checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" dependencies = [ "proc-macro2", "quote", @@ -1476,9 +1476,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.1" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" [[package]] name = "spin" @@ -1521,9 +1521,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.38" +version = "2.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" +checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" dependencies = [ "proc-macro2", "quote", @@ -1553,9 +1553,9 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" +checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" dependencies = [ "winapi-util", ] @@ -1591,8 +1591,11 @@ dependencies = [ name = "tls_codec" version = "0.3.0" dependencies = [ + "anstyle", + "anstyle-parse", "arbitrary", "clap", + "clap_lex", "criterion", "regex", "serde", @@ -1613,9 +1616,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ff9e3abce27ee2c9a37f9ad37238c1bdd4e789c84ba37df76aa4d528f5072cc" +checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35" dependencies = [ "serde", "serde_spanned", @@ -1634,9 +1637,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.20.7" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" dependencies = [ "indexmap", "serde", @@ -1823,9 +1826,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winnow" -version = "0.5.18" +version = "0.5.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176b6138793677221d420fd2f0aeeced263f197688b36484660da767bca2fa32" +checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b" dependencies = [ "memchr", ] diff --git a/base32ct/Cargo.toml b/base32ct/Cargo.toml index b71a629ab..c045f6369 100644 --- a/base32ct/Cargo.toml +++ b/base32ct/Cargo.toml @@ -18,7 +18,8 @@ rust-version = "1.60" [dev-dependencies] base32 = "0.4" -proptest = "1" +# pinned to preserve MSRV +proptest = { version = "=1.2.0", default-features = false, features = ["std"] } [features] alloc = [] diff --git a/base64ct/Cargo.toml b/base64ct/Cargo.toml index faea9ec19..dbb4ef0c5 100644 --- a/base64ct/Cargo.toml +++ b/base64ct/Cargo.toml @@ -18,7 +18,8 @@ rust-version = "1.60" [dev-dependencies] base64 = "0.21" -proptest = { version = "1", default-features = false, features = ["std"] } +# pinned to preserve MSRV +proptest = { version = "=1.2.0", default-features = false, features = ["std"] } [features] alloc = [] diff --git a/tls_codec/Cargo.toml b/tls_codec/Cargo.toml index 863309d99..c8b860d9a 100644 --- a/tls_codec/Cargo.toml +++ b/tls_codec/Cargo.toml @@ -23,6 +23,9 @@ serde = { version = "1.0.184", features = ["derive"], optional = true } [dev-dependencies] clap = "=4.3.23" # pinned to preserve MSRV +clap_lex = "=0.5.0" # pinned to preserve MSRV +anstyle = "=1.0.2" # pinned to preserve MSRV +anstyle-parse = "=0.2.1" # pinned to preserve MSRV criterion = { version = "0.5", default-features = false } regex = "1.8"