From ccced985cb16a7927cc58b1f5f4298eb82b0917b Mon Sep 17 00:00:00 2001 From: rlee287 Date: Mon, 9 Oct 2023 13:11:03 -0700 Subject: [PATCH 01/12] Initial committing AE code, with marker traits and padding AEAD TODO: bikeshedding of names and constructions that need changes to the underlying AEAD traits --- Cargo.lock | 1 + aead/Cargo.toml | 2 + aead/src/committing_aead.rs | 162 ++++++++++++++++++++++++++++++++++++ aead/src/lib.rs | 3 + 4 files changed, 168 insertions(+) create mode 100644 aead/src/committing_aead.rs diff --git a/Cargo.lock b/Cargo.lock index f878923e3..2033c0a1a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12,6 +12,7 @@ dependencies = [ "crypto-common 0.1.6", "generic-array", "heapless", + "subtle", ] [[package]] diff --git a/aead/Cargo.toml b/aead/Cargo.toml index 29eb0bf26..a0ca896f9 100644 --- a/aead/Cargo.toml +++ b/aead/Cargo.toml @@ -24,6 +24,7 @@ arrayvec = { version = "0.7", optional = true, default-features = false } blobby = { version = "0.3", optional = true } bytes = { version = "1", optional = true, default-features = false } heapless = { version = "0.7", optional = true, default-features = false } +subtle = { version = "2.5", optional = true, default-features = false } [features] default = ["rand_core"] @@ -33,6 +34,7 @@ dev = ["blobby"] getrandom = ["crypto-common/getrandom", "rand_core"] rand_core = ["crypto-common/rand_core"] stream = [] +committing_ae = ["subtle"] [package.metadata.docs.rs] all-features = true diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs new file mode 100644 index 000000000..101b5449b --- /dev/null +++ b/aead/src/committing_aead.rs @@ -0,0 +1,162 @@ +//! Committing AEAD support. +//! +//! Marker trait for Committing AEADs along with constructions that give +//! key-committing properties to normal AEADs. +//! +//! ## About +//! +//! While AEADs provide confidentiality and integrity, many of them do not +//! provide a committment for their inputs. One consequence is that, for +//! example, [it is possible to construct an AES-GCM ciphertext that correctly +//! decrypts under two different keys into two different plaintexts][1]. +//! Moreover, the lack of committment properties has lead to breaks in real +//! cryptographic protocols, e.g. the password-authenticated key exchange +//! [OPAQUE][2] and the Shadowsocks proxy, as described in [3]. +//! +//! This module provides the [`KeyCommittingAead`] marker trait to indicate that +//! an AEAD commits to its key, along with the [`CommittingAead`] marker trait +//! to incidate that an AEAD commits to all of its inputs. (This can +//! equivalently be thought of as collision resistance of an AEAD with respect +//! to its inputs.) When the `committing_ae` feature is enabled, it also +//! provides a [padding construction][4] that wraps an AEAD using ideal +//! primitives and makes it key-committing. +//! +//! [1]: https://eprint.iacr.org/2019/016.pdf +//! [2]: https://eprint.iacr.org/2018/163.pdf +//! [3]: https://www.usenix.org/system/files/sec21summer_len.pdf +//! [4]: https://eprint.iacr.org/2020/1456.pdf +//! + +use crate::AeadCore; + +#[cfg(feature = "committing_ae")] +#[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] +pub use padded_aead::PaddedAead; + +/// Marker trait that signals that an AEAD commits to its key. +pub trait KeyCommittingAead: AeadCore {} +/// Marker trait that signals that an AEAD commits to all its inputs. +pub trait CommittingAead: AeadCore+KeyCommittingAead {} + +#[cfg(feature = "committing_ae")] +#[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] +mod padded_aead { + use crate::{AeadCore, AeadInPlace}; + use crypto_common::{KeyInit, KeySizeUser}; + use core::ops::{Add, Mul}; + use generic_array::ArrayLength; + use generic_array::typenum::{U2, Unsigned}; + use subtle::{Choice, ConstantTimeEq}; + use super::KeyCommittingAead; + + #[cfg(feature = "committing_ae")] + #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] + #[derive(Debug, Clone)] + /// A wrapper around a non-committing AEAD that implements the + /// "padding fix" from of prepending + /// `2*key_len` zeros to the plaintext before encryption and verifying + /// their presence upon decryption. + /// + /// The linked paper proves that this construction is key-committing for + /// AES-GCM, ChaCha20Poly1305, and other AEADs that internally use + /// primitives that can be modelled as ideal. However, security is not + /// guaranteed with weak primitives. For example, e.g. HMAC-SHA-1 can be + /// used as a MAC in normal circumstances because HMAC does not require a + /// collision resistant hash, but an AEAD using HMAC-SHA-1 to provide + /// integrity cannot be made committing using this padding scheme. + pub struct PaddedAead { + inner_aead: Aead, + } + impl PaddedAead { + /// Extracts the inner Aead object. + #[inline] + pub fn into_inner(self) -> Aead { + self.inner_aead + } + } + + impl KeySizeUser for PaddedAead { + type KeySize = Aead::KeySize; + } + impl KeyInit for PaddedAead { + fn new(key: &crypto_common::Key) -> Self { + PaddedAead { + inner_aead: Aead::new(key), + } + } + } + impl AeadCore for PaddedAead + where + Aead::CiphertextOverhead: Add<>::Output>, + ::KeySize: Mul, + <::CiphertextOverhead as Add<<::KeySize as Mul>::Output>>::Output: ArrayLength + { + type NonceSize = Aead::NonceSize; + + type TagSize = Aead::TagSize; + + type CiphertextOverhead = >::Output>>::Output; + } + // TODO: don't see a way to provide impls for both AeadInPlace + // and AeadMutInPlace + impl AeadInPlace for PaddedAead + where + Self: AeadCore + { + fn encrypt_in_place_detached( + &self, + nonce: &crate::Nonce, + associated_data: &[u8], + buffer: &mut [u8], + ) -> crate::Result> { + let offset_amount = Aead::CiphertextOverhead::to_usize() + +2*Aead::KeySize::to_usize(); + buffer.copy_within(..buffer.len()-offset_amount, offset_amount); + buffer[..offset_amount].fill(0x00); + + // Compiler can't see that Self::NonceSize == Aead::NonceSize + let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + + let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; + + // Compiler can't see that Self::TagSize == Aead::TagSize + let tag_recast = crate::Tag::::clone_from_slice(tag_inner.as_slice()); + Ok(tag_recast) + } + + fn decrypt_in_place_detached( + &self, + nonce: &crate::Nonce, + associated_data: &[u8], + buffer: &mut [u8], + tag: &crate::Tag, + ) -> crate::Result<()> { + // Compiler can't see that Self::NonceSize == Aead::NonceSize + // Ditto for Self::TagSize == Aead::TagSize + let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + let tag_recast = crate::Tag::::from_slice(tag.as_slice()); + + let tag_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, associated_data, buffer, tag_recast) { + Ok(_) => 1, + Err(_) => 0 + }); + + let offset_amount = Aead::CiphertextOverhead::to_usize() + +2*Aead::KeySize::to_usize(); + // Do the loop because the slice ct_eq requires constructing + // [0; offset_amount], which requires an allocation + let mut pad_is_ok = Choice::from(1); + for element in &buffer[..offset_amount] { + pad_is_ok = pad_is_ok & element.ct_eq(&0); + } + buffer.copy_within(offset_amount.., 0); + if (tag_is_ok & pad_is_ok).into() { + Ok(()) + } else { + Err(crate::Error) + } + } + } + impl KeyCommittingAead for PaddedAead + where Self: AeadCore {} +} \ No newline at end of file diff --git a/aead/src/lib.rs b/aead/src/lib.rs index 700667998..b5f239496 100644 --- a/aead/src/lib.rs +++ b/aead/src/lib.rs @@ -36,6 +36,9 @@ pub mod dev; #[cfg_attr(docsrs, doc(cfg(feature = "stream")))] pub mod stream; +// Include the module unconditionally because of marker traits +pub mod committing_aead; + pub use crypto_common::{Key, KeyInit, KeySizeUser}; pub use generic_array::{self, typenum::consts}; From 3cc50a8d29c586b028e60c814067c6d8d8d62379 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Sun, 22 Oct 2023 14:01:16 -0700 Subject: [PATCH 02/12] Documentation revisions and increase padding fix len to 3*key_size --- aead/src/committing_aead.rs | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 101b5449b..029f48ceb 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -11,15 +11,15 @@ //! decrypts under two different keys into two different plaintexts][1]. //! Moreover, the lack of committment properties has lead to breaks in real //! cryptographic protocols, e.g. the password-authenticated key exchange -//! [OPAQUE][2] and the Shadowsocks proxy, as described in [3]. +//! [OPAQUE][2] and the Shadowsocks proxy, as described in [this paper +//! introducing partitioning oracle attacks]. //! //! This module provides the [`KeyCommittingAead`] marker trait to indicate that //! an AEAD commits to its key, along with the [`CommittingAead`] marker trait //! to incidate that an AEAD commits to all of its inputs. (This can //! equivalently be thought of as collision resistance of an AEAD with respect //! to its inputs.) When the `committing_ae` feature is enabled, it also -//! provides a [padding construction][4] that wraps an AEAD using ideal -//! primitives and makes it key-committing. +//! provides constructions that wrap an AEAD and make it committing. //! //! [1]: https://eprint.iacr.org/2019/016.pdf //! [2]: https://eprint.iacr.org/2018/163.pdf @@ -45,7 +45,7 @@ mod padded_aead { use crypto_common::{KeyInit, KeySizeUser}; use core::ops::{Add, Mul}; use generic_array::ArrayLength; - use generic_array::typenum::{U2, Unsigned}; + use generic_array::typenum::{U3, Unsigned}; use subtle::{Choice, ConstantTimeEq}; use super::KeyCommittingAead; @@ -53,17 +53,21 @@ mod padded_aead { #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] #[derive(Debug, Clone)] /// A wrapper around a non-committing AEAD that implements the - /// "padding fix" from of prepending - /// `2*key_len` zeros to the plaintext before encryption and verifying - /// their presence upon decryption. + /// [padding fix][1] of prepending zeros to the plaintext before encryption + /// and verifying their presence upon decryption. Based on the formulas + /// of [2], we append `3*key_len` zeros to obtain `3/4*key_len` bits of + /// key committment security. /// - /// The linked paper proves that this construction is key-committing for - /// AES-GCM, ChaCha20Poly1305, and other AEADs that internally use + /// The padding fix paper proves that this construction is key-committing + /// for AES-GCM, ChaCha20Poly1305, and other AEADs that internally use /// primitives that can be modelled as ideal. However, security is not /// guaranteed with weak primitives. For example, e.g. HMAC-SHA-1 can be /// used as a MAC in normal circumstances because HMAC does not require a /// collision resistant hash, but an AEAD using HMAC-SHA-1 to provide /// integrity cannot be made committing using this padding scheme. + /// + /// [1]: https://eprint.iacr.org/2020/1456.pdf + /// [2]: https://csrc.nist.gov/csrc/media/Events/2023/third-workshop-on-block-cipher-modes-of-operation/documents/accepted-papers/The%20Landscape%20of%20Committing%20Authenticated%20Encryption.pdf pub struct PaddedAead { inner_aead: Aead, } @@ -87,18 +91,20 @@ mod padded_aead { } impl AeadCore for PaddedAead where - Aead::CiphertextOverhead: Add<>::Output>, - ::KeySize: Mul, - <::CiphertextOverhead as Add<<::KeySize as Mul>::Output>>::Output: ArrayLength + Aead::CiphertextOverhead: Add<>::Output>, + Aead::KeySize: Mul, + >::Output>>::Output: ArrayLength { type NonceSize = Aead::NonceSize; type TagSize = Aead::TagSize; - type CiphertextOverhead = >::Output>>::Output; + type CiphertextOverhead = >::Output>>::Output; } // TODO: don't see a way to provide impls for both AeadInPlace - // and AeadMutInPlace + // and AeadMutInPlace, as having both would conflict with the blanket impl + // Choose AeadInPlace because all the current rustcrypto/AEADs do not have + // a mutable state impl AeadInPlace for PaddedAead where Self: AeadCore From 6e578559523881407af774dc73455749a00fd562 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Sun, 22 Oct 2023 14:01:47 -0700 Subject: [PATCH 03/12] Add Cargo.{toml, lock} changes for CTX-like constructions --- Cargo.lock | 2 ++ aead/Cargo.toml | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 2033c0a1a..151fc6a5a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,8 +10,10 @@ dependencies = [ "blobby", "bytes", "crypto-common 0.1.6", + "digest 0.10.7", "generic-array", "heapless", + "hmac 0.12.1", "subtle", ] diff --git a/aead/Cargo.toml b/aead/Cargo.toml index a0ca896f9..5d58132a0 100644 --- a/aead/Cargo.toml +++ b/aead/Cargo.toml @@ -24,7 +24,12 @@ arrayvec = { version = "0.7", optional = true, default-features = false } blobby = { version = "0.3", optional = true } bytes = { version = "1", optional = true, default-features = false } heapless = { version = "0.7", optional = true, default-features = false } +# Dependencies needed for committing_ae subtle = { version = "2.5", optional = true, default-features = false } +# Keep digest at 0.10.x instead of 0.11.0-pre for now +# Avoids depending on multiple versions of crypto-common +digest = { version = "0.10.7", optional = true, features = ["mac"] } +hmac = { version = "0.12.1", optional = true } [features] default = ["rand_core"] @@ -34,7 +39,7 @@ dev = ["blobby"] getrandom = ["crypto-common/getrandom", "rand_core"] rand_core = ["crypto-common/rand_core"] stream = [] -committing_ae = ["subtle"] +committing_ae = ["subtle", "digest", "hmac"] [package.metadata.docs.rs] all-features = true From d27e0f910d4aed2bf44b72493a7de8aca7806cd8 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Sun, 22 Oct 2023 14:13:19 -0700 Subject: [PATCH 04/12] Implementation of the encryption direction of CTX --- aead/src/committing_aead.rs | 114 ++++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 029f48ceb..2e6bd22d8 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -33,6 +33,10 @@ use crate::AeadCore; #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] pub use padded_aead::PaddedAead; +#[cfg(feature = "committing_ae")] +#[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] +pub use ctx::CtxAead; + /// Marker trait that signals that an AEAD commits to its key. pub trait KeyCommittingAead: AeadCore {} /// Marker trait that signals that an AEAD commits to all its inputs. @@ -165,4 +169,114 @@ mod padded_aead { } impl KeyCommittingAead for PaddedAead where Self: AeadCore {} +} + +#[cfg(feature = "committing_ae")] +#[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] +mod ctx { + use crate::{AeadCore, AeadInPlace}; + use crypto_common::{KeyInit, KeySizeUser}; + use digest::Digest; + use super::{KeyCommittingAead, CommittingAead}; + + #[cfg(feature = "committing_ae")] + #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] + #[derive(Debug, Clone)] + /// Implementation of the encryption portion of the + /// [CTX scheme](https://eprint.iacr.org/2022/1260.pdf). + /// + /// CTX wraps an AEAD and replaces the tag with + /// `H(key || nonce || aad || orig_tag)`, which is shown in the paper to + /// commit to all AEAD inputs as long as the hash is collision resistant. + /// This provides `hash_output_len/2` bits of committment security. + /// + /// Unfortunately there is currently no way to get the expected tag of the + /// inner AEAD using the current trait interfaces, so this struct only + /// implements the encryption direction. This may still be useful for + /// interfacing with other programs that use the CTX committing AE scheme. + pub struct CtxAead { + inner_aead: Aead, + hasher: CrHash + } + impl CtxAead { + /// Extracts the inner Aead object. + #[inline] + pub fn into_inner(self) -> Aead { + self.inner_aead + } + } + + impl KeySizeUser for CtxAead { + type KeySize = Aead::KeySize; + } + impl KeyInit for CtxAead { + fn new(key: &crypto_common::Key) -> Self { + CtxAead { + inner_aead: Aead::new(key), + hasher: Digest::new_with_prefix(key) + } + } + } + + impl AeadCore for CtxAead + { + type NonceSize = Aead::NonceSize; + + type TagSize = CrHash::OutputSize; + + type CiphertextOverhead = Aead::CiphertextOverhead; + } + + // TODO: don't see a way to provide impls for both AeadInPlace + // and AeadMutInPlace, as having both would conflict with the blanket impl + // Choose AeadInPlace because all the current rustcrypto/AEADs do not have + // a mutable state + impl AeadInPlace for CtxAead + where + Self: AeadCore + { + fn encrypt_in_place_detached( + &self, + nonce: &crate::Nonce, + associated_data: &[u8], + buffer: &mut [u8], + ) -> crate::Result> { + // Compiler can't see that Self::NonceSize == Aead::NonceSize + let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + + let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; + + let mut tag_computer = self.hasher.clone(); + tag_computer.update(nonce); + tag_computer.update(associated_data); + tag_computer.update(tag_inner); + let final_tag = tag_computer.finalize(); + + // Compiler can't see that Self::TagSize == Digest::OutputSize + let tag_recast = crate::Tag::::clone_from_slice(final_tag.as_slice()); + Ok(tag_recast) + } + + /// Unimplemented decryption of the message in-place, which panics if + /// called + #[allow(unused_variables)] + fn decrypt_in_place_detached( + &self, + nonce: &crate::Nonce, + associated_data: &[u8], + buffer: &mut [u8], + tag: &crate::Tag, + ) -> crate::Result<()> { + // Compiler can't see that Self::NonceSize == Aead::NonceSize + let _nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + unimplemented!("Cannot get inner AEAD tag using current AEAD interfaces") + // Remaning steps: compute expected tag during decryption, repeat + // hasher steps analogously to encryption phase, and compare tag + } + } + + impl KeyCommittingAead for CtxAead + where Self: AeadCore {} + impl CommittingAead for CtxAead + where Self: AeadCore {} } \ No newline at end of file From 157f3431a4b2343d00f619350ef71684b1b1baf2 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Sun, 22 Oct 2023 14:28:22 -0700 Subject: [PATCH 05/12] Add a CtxishHmacAead committing AEAD construction While this exact construction hasn't appeared in the literature, it is similar to CTX, with the original tag being exposed to allow it to be implemented with the current AEAD traits, and a follow-up substitution of HMAC instead of a raw hash function to ensure that exposing the original tag does not open the committment up to length-extension type vulnerabilities. --- aead/src/committing_aead.rs | 167 +++++++++++++++++++++++++++++++++++- 1 file changed, 165 insertions(+), 2 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 2e6bd22d8..518bc25d8 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -37,6 +37,10 @@ pub use padded_aead::PaddedAead; #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] pub use ctx::CtxAead; +#[cfg(feature = "committing_ae")] +#[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] +pub use ctx::CtxishHmacAead; + /// Marker trait that signals that an AEAD commits to its key. pub trait KeyCommittingAead: AeadCore {} /// Marker trait that signals that an AEAD commits to all its inputs. @@ -175,8 +179,13 @@ mod padded_aead { #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] mod ctx { use crate::{AeadCore, AeadInPlace}; - use crypto_common::{KeyInit, KeySizeUser}; - use digest::Digest; + use crypto_common::{KeyInit, KeySizeUser, BlockSizeUser}; + use core::ops::Add; + use digest::{Digest, Mac, FixedOutput}; + use hmac::SimpleHmac; + use generic_array::ArrayLength; + use generic_array::typenum::Unsigned; + use subtle::Choice; use super::{KeyCommittingAead, CommittingAead}; #[cfg(feature = "committing_ae")] @@ -279,4 +288,158 @@ mod ctx { where Self: AeadCore {} impl CommittingAead for CtxAead where Self: AeadCore {} + + #[cfg(feature = "committing_ae")] + #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] + #[derive(Debug, Clone)] + /// Implementation of a modified version of the CTX scheme. + /// + /// Instead of returning tag `H(key || nonce || aad || orig_tag)`, we return + /// `orig_tag || HMAC_key(nonce || aad || orig_tag)`. The AEAD API requires + /// that we treat the underlying AEAD as a black box, without access to the + /// expected tag at decryption time, so we have to also send it along with + /// the committment to the other inputs to the AEAD. (Ideally, the need to + /// send `orig_tag` as well can be removed in a future version of the + /// crate.) At decryption time, we verify both `orig_tag` and the hash + /// commitment. + /// + /// ## Security analysis for the modified construction + /// + /// HMAC invokes the underlying hash function twice such that the inputs to + /// the hash functions are computed only by XOR, concatenation, and hashing. + /// Thus, if we trust the underlying hash function to serve as a committment + /// to its inputs, we can also trust HMAC-hash to commit to its inputs and + /// provide `hash_output_len/2` bits of committment security, as with CTX. + /// + /// If the underlying AEAD provides proper confidentiality and integrity + /// protections, we can assume that this new construction also provides + /// propert confidentiality and integrity, since it has the same ciphertext + /// and includes the original tag without exposing cryptographic secrets in + /// a recoverable form. Moreover, HMAC is supposed to be a secure keyed MAC, + /// so an attacker cannot forge a commitment without knowing the key, even + /// with full knowledge of the other input to the HMAC. + /// + /// We use `HMAC_key(nonce || aad || orig_tag)` instead of the original CTX + /// construction of `H(key || nonce || aad || orig_tag)` to mitigate length + /// extension attacks that may become possible when `orig_tag` is sent in + /// the clear (with the result that `H(key || nonce || aad || orig_tag)` + /// decomposes into `H(secret || public)`), even though returning + /// `orig_tag || H(key || nonce || aad || orig_tag)` as the tag would allow + /// for increased interoperability with other CTX implementations. (In fact, + /// revealing `orig_tag` would be fatal for the CTX+ construction which + /// omits `aad` from the `orig_tag` computation by allowing forgery of the + /// hash committment via length extension on `aad`.) + pub struct CtxishHmacAead { + inner_aead: Aead, + hasher: SimpleHmac + } + impl CtxishHmacAead { + /// Extracts the inner Aead object. + #[inline] + pub fn into_inner(self) -> Aead { + self.inner_aead + } + } + + impl KeySizeUser for CtxishHmacAead { + type KeySize = Aead::KeySize; + } + impl KeyInit for CtxishHmacAead { + fn new(key: &crypto_common::Key) -> Self { + CtxishHmacAead { + inner_aead: Aead::new(key), + hasher: as KeyInit>::new_from_slice(key).unwrap() + } + } + } + impl AeadCore for CtxishHmacAead + where + Aead::TagSize: Add, + >::Output: ArrayLength + { + type NonceSize = Aead::NonceSize; + + type TagSize = >::Output; + + type CiphertextOverhead = Aead::CiphertextOverhead; + } + // TODO: don't see a way to provide impls for both AeadInPlace + // and AeadMutInPlace, as having both would conflict with the blanket impl + // Choose AeadInPlace because all the current rustcrypto/AEADs do not have + // a mutable state + impl AeadInPlace for CtxishHmacAead + where + Self: AeadCore + { + fn encrypt_in_place_detached( + &self, + nonce: &crate::Nonce, + associated_data: &[u8], + buffer: &mut [u8], + ) -> crate::Result> { + // Compiler can't see that Self::NonceSize == Aead::NonceSize + let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + + let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; + + let mut tag_computer = self.hasher.clone(); + tag_computer.update(nonce); + tag_computer.update(associated_data); + tag_computer.update(&tag_inner); + let hmac_tag = tag_computer.finalize_fixed(); + + let final_tag_iter = tag_inner.iter().copied().chain(hmac_tag); + + let final_tag = crate::Tag::::from_exact_iter(final_tag_iter).unwrap(); + Ok(final_tag) + } + + #[allow(unused_variables)] + fn decrypt_in_place_detached( + &self, + nonce: &crate::Nonce, + associated_data: &[u8], + buffer: &mut [u8], + tag: &crate::Tag, + ) -> crate::Result<()> { + // Compiler can't see that Self::NonceSize == Aead::NonceSize + let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + // Get the inner tag + let tag_inner = crate::Tag::::from_slice(&tag[..Aead::TagSize::to_usize()]); + + // Prevent timing side channels by not returning early on inner AEAD + // decryption failure + let tag_inner_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, associated_data, buffer, tag_inner) { + Ok(_) => 1, + Err(_) => 0 + }); + + let mut tag_computer = self.hasher.clone(); + tag_computer.update(nonce); + tag_computer.update(associated_data); + // At this point we know whether tag_inner has the correct value + // If it doesn't then we'll likely get a mismatch here too + // Regardless, we require both `Choice`s to be OK + // So it doesn't matter if we ingest a potentially tainted tag here + tag_computer.update(&tag_inner); + + // Get the HMAC tag + let expected_hmac_tag = &tag[Aead::TagSize::to_usize()..]; + + let hmac_tag_is_ok = Choice::from(match tag_computer.verify_slice(expected_hmac_tag) { + Ok(_) => 1, + Err(_) => 0 + }); + + if (tag_inner_is_ok & hmac_tag_is_ok).into() { + Ok(()) + } else { + Err(crate::Error) + } + } + } + impl KeyCommittingAead for CtxishHmacAead + where Self: AeadCore {} + impl CommittingAead for CtxishHmacAead + where Self: AeadCore {} } \ No newline at end of file From e845ebc974106feacf5a53dd3293896a6192caa9 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Sun, 22 Oct 2023 23:26:14 -0700 Subject: [PATCH 06/12] Fix padding AEAD impl to use the new offset of 3*key_size --- aead/src/committing_aead.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 518bc25d8..748a38035 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -124,7 +124,7 @@ mod padded_aead { buffer: &mut [u8], ) -> crate::Result> { let offset_amount = Aead::CiphertextOverhead::to_usize() - +2*Aead::KeySize::to_usize(); + +3*Aead::KeySize::to_usize(); buffer.copy_within(..buffer.len()-offset_amount, offset_amount); buffer[..offset_amount].fill(0x00); @@ -156,7 +156,7 @@ mod padded_aead { }); let offset_amount = Aead::CiphertextOverhead::to_usize() - +2*Aead::KeySize::to_usize(); + +3*Aead::KeySize::to_usize(); // Do the loop because the slice ct_eq requires constructing // [0; offset_amount], which requires an allocation let mut pad_is_ok = Choice::from(1); From 86edabd6917cd4dccdf9d4bc046e5cb498877a2e Mon Sep 17 00:00:00 2001 From: rlee287 Date: Sun, 22 Oct 2023 23:27:44 -0700 Subject: [PATCH 07/12] Workshop the committing AEAD documentation text some more --- aead/src/committing_aead.rs | 53 ++++++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 22 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 748a38035..e405ce65d 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -3,29 +3,38 @@ //! Marker trait for Committing AEADs along with constructions that give //! key-committing properties to normal AEADs. //! -//! ## About +//! ## Why Committing AEADs? //! //! While AEADs provide confidentiality and integrity, many of them do not -//! provide a committment for their inputs. One consequence is that, for -//! example, [it is possible to construct an AES-GCM ciphertext that correctly -//! decrypts under two different keys into two different plaintexts][1]. -//! Moreover, the lack of committment properties has lead to breaks in real -//! cryptographic protocols, e.g. the password-authenticated key exchange -//! [OPAQUE][2] and the Shadowsocks proxy, as described in [this paper -//! introducing partitioning oracle attacks]. +//! provide a commitment for their inputs (which can equivalently be thought +//! of as collision resistance of an AEAD with respect to its inputs). The +//! lack of commitment properties has lead to breaks in real cryptographic +//! protocols, e.g. improper implementations of the password-authenticated +//! key exchange [OPAQUE][2] and the Shadowsocks proxy, as described in +//! a paper describing [partitioning oracle attacks][3]. //! +//! Concrete examples of popular AEADs that lack commitment properties: +//! - AEADs using polynomial-based MACs (e.g. AES-GCM and ChaCha20Poly1305) +//! do not commit to their inputs. [1] describes how to construct an +//! AES-GCM ciphertext that decrypts correctly under two different keys to +//! two different, semantically meaningful plaintexts. +//! - AEADs where decryption can be separated into parallel always-successful +//! plaintext recovery and tag computation+equality checking steps cannot +//! provide commitment when the tag computation function is not preimage +//! resistant. [5] provides concrete attacks against EAX, GCM, SIV, CCM, +//! and OCB3 that demonstrate that they are not key-commiting. +//! +//! ## Module contents //! This module provides the [`KeyCommittingAead`] marker trait to indicate that //! an AEAD commits to its key, along with the [`CommittingAead`] marker trait -//! to incidate that an AEAD commits to all of its inputs. (This can -//! equivalently be thought of as collision resistance of an AEAD with respect -//! to its inputs.) When the `committing_ae` feature is enabled, it also +//! to indicate that an AEAD commits to all of its inputs. When the `committing_ae` feature is enabled, it also //! provides constructions that wrap an AEAD and make it committing. //! //! [1]: https://eprint.iacr.org/2019/016.pdf //! [2]: https://eprint.iacr.org/2018/163.pdf //! [3]: https://www.usenix.org/system/files/sec21summer_len.pdf //! [4]: https://eprint.iacr.org/2020/1456.pdf -//! +//! [5]: https://eprint.iacr.org/2023/526.pdf use crate::AeadCore; @@ -64,7 +73,7 @@ mod padded_aead { /// [padding fix][1] of prepending zeros to the plaintext before encryption /// and verifying their presence upon decryption. Based on the formulas /// of [2], we append `3*key_len` zeros to obtain `3/4*key_len` bits of - /// key committment security. + /// key commitment security. /// /// The padding fix paper proves that this construction is key-committing /// for AES-GCM, ChaCha20Poly1305, and other AEADs that internally use @@ -158,7 +167,7 @@ mod padded_aead { let offset_amount = Aead::CiphertextOverhead::to_usize() +3*Aead::KeySize::to_usize(); // Do the loop because the slice ct_eq requires constructing - // [0; offset_amount], which requires an allocation + // [0; offset_amount], which requires more memory let mut pad_is_ok = Choice::from(1); for element in &buffer[..offset_amount] { pad_is_ok = pad_is_ok & element.ct_eq(&0); @@ -197,9 +206,9 @@ mod ctx { /// CTX wraps an AEAD and replaces the tag with /// `H(key || nonce || aad || orig_tag)`, which is shown in the paper to /// commit to all AEAD inputs as long as the hash is collision resistant. - /// This provides `hash_output_len/2` bits of committment security. + /// This provides `hash_output_len/2` bits of commitment security. /// - /// Unfortunately there is currently no way to get the expected tag of the + /// Unfortunately, there is currently no way to get the expected tag of the /// inner AEAD using the current trait interfaces, so this struct only /// implements the encryption direction. This may still be useful for /// interfacing with other programs that use the CTX committing AE scheme. @@ -298,7 +307,7 @@ mod ctx { /// `orig_tag || HMAC_key(nonce || aad || orig_tag)`. The AEAD API requires /// that we treat the underlying AEAD as a black box, without access to the /// expected tag at decryption time, so we have to also send it along with - /// the committment to the other inputs to the AEAD. (Ideally, the need to + /// the commitment to the other inputs to the AEAD. (Ideally, the need to /// send `orig_tag` as well can be removed in a future version of the /// crate.) At decryption time, we verify both `orig_tag` and the hash /// commitment. @@ -307,13 +316,13 @@ mod ctx { /// /// HMAC invokes the underlying hash function twice such that the inputs to /// the hash functions are computed only by XOR, concatenation, and hashing. - /// Thus, if we trust the underlying hash function to serve as a committment + /// Thus, if we trust the underlying hash function to serve as a commitment /// to its inputs, we can also trust HMAC-hash to commit to its inputs and - /// provide `hash_output_len/2` bits of committment security, as with CTX. + /// provide `hash_output_len/2` bits of commitment security, as with CTX. /// /// If the underlying AEAD provides proper confidentiality and integrity /// protections, we can assume that this new construction also provides - /// propert confidentiality and integrity, since it has the same ciphertext + /// proper confidentiality and integrity, since it has the same ciphertext /// and includes the original tag without exposing cryptographic secrets in /// a recoverable form. Moreover, HMAC is supposed to be a secure keyed MAC, /// so an attacker cannot forge a commitment without knowing the key, even @@ -328,7 +337,7 @@ mod ctx { /// for increased interoperability with other CTX implementations. (In fact, /// revealing `orig_tag` would be fatal for the CTX+ construction which /// omits `aad` from the `orig_tag` computation by allowing forgery of the - /// hash committment via length extension on `aad`.) + /// hash commitment via length extension on `aad`.) pub struct CtxishHmacAead { inner_aead: Aead, hasher: SimpleHmac @@ -442,4 +451,4 @@ mod ctx { where Self: AeadCore {} impl CommittingAead for CtxishHmacAead where Self: AeadCore {} -} \ No newline at end of file +} From 1f269a36606ade0ca2130ef20fd4aadcc8c3aee1 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Mon, 23 Oct 2023 17:52:46 -0700 Subject: [PATCH 08/12] Change PaddedAEAD from AeadInPlace to Aead In-place en/decryption doesn't quite make sense when the padding adds a ciphertext overhead --- aead/src/committing_aead.rs | 72 ++++++++++++++++++++----------------- 1 file changed, 39 insertions(+), 33 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index e405ce65d..7e6a576a7 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -122,59 +122,65 @@ mod padded_aead { // and AeadMutInPlace, as having both would conflict with the blanket impl // Choose AeadInPlace because all the current rustcrypto/AEADs do not have // a mutable state - impl AeadInPlace for PaddedAead + impl crate::Aead for PaddedAead where Self: AeadCore { - fn encrypt_in_place_detached( + fn encrypt<'msg, 'aad>( &self, nonce: &crate::Nonce, - associated_data: &[u8], - buffer: &mut [u8], - ) -> crate::Result> { - let offset_amount = Aead::CiphertextOverhead::to_usize() - +3*Aead::KeySize::to_usize(); - buffer.copy_within(..buffer.len()-offset_amount, offset_amount); - buffer[..offset_amount].fill(0x00); + plaintext: impl Into>, + ) -> crate::Result> { + let padding_overhead = 3*Aead::KeySize::to_usize(); + assert_eq!(padding_overhead+Aead::CiphertextOverhead::to_usize(), Self::CiphertextOverhead::to_usize()); + + let payload = plaintext.into(); + let mut padded_msg = alloc::vec![0x00; payload.msg.len()+3*Aead::KeySize::to_usize()]; + padded_msg[padding_overhead..].copy_from_slice(payload.msg); // Compiler can't see that Self::NonceSize == Aead::NonceSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; - - // Compiler can't see that Self::TagSize == Aead::TagSize - let tag_recast = crate::Tag::::clone_from_slice(tag_inner.as_slice()); - Ok(tag_recast) + let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, payload.aad, &mut padded_msg)?; + // Append the tag to the end + padded_msg.extend(tag_inner); + Ok(padded_msg) } - fn decrypt_in_place_detached( + fn decrypt<'msg, 'aad>( &self, nonce: &crate::Nonce, - associated_data: &[u8], - buffer: &mut [u8], - tag: &crate::Tag, - ) -> crate::Result<()> { + ciphertext: impl Into>, + ) -> crate::Result> { + let padding_overhead = 3*Aead::KeySize::to_usize(); + let total_overhead = padding_overhead+Aead::CiphertextOverhead::to_usize(); + assert_eq!(total_overhead, Self::CiphertextOverhead::to_usize()); + + let payload = ciphertext.into(); + // Compiler can't see that Self::NonceSize == Aead::NonceSize - // Ditto for Self::TagSize == Aead::TagSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let tag_recast = crate::Tag::::from_slice(tag.as_slice()); - let tag_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, associated_data, buffer, tag_recast) { + let (ctxt, tag) = payload.msg.split_at(payload.msg.len()-Aead::TagSize::to_usize()); + let tag_recast = crate::Tag::::from_slice(tag); + + if ctxt.len() < total_overhead { + return Err(crate::Error); + } + + let mut ptxt_vec = alloc::vec::Vec::from(ctxt); + + // Avoid timing side channel by not returning early + let mut decryption_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, payload.aad, &mut ptxt_vec, tag_recast) { Ok(_) => 1, Err(_) => 0 }); - - let offset_amount = Aead::CiphertextOverhead::to_usize() - +3*Aead::KeySize::to_usize(); - // Do the loop because the slice ct_eq requires constructing - // [0; offset_amount], which requires more memory - let mut pad_is_ok = Choice::from(1); - for element in &buffer[..offset_amount] { - pad_is_ok = pad_is_ok & element.ct_eq(&0); + // Check padding now + for byte in ptxt_vec.drain(..padding_overhead) { + decryption_is_ok = decryption_is_ok & byte.ct_eq(&0); } - buffer.copy_within(offset_amount.., 0); - if (tag_is_ok & pad_is_ok).into() { - Ok(()) + if decryption_is_ok.into() { + Ok(ptxt_vec) } else { Err(crate::Error) } From 6e1de2552d28594ddf8f9c040a880cf61053df78 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Mon, 23 Oct 2023 17:54:38 -0700 Subject: [PATCH 09/12] Impl AeadMut for PaddedAead Turns out Aead and AeadMut do not clash with each other --- aead/src/committing_aead.rs | 63 +++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 7e6a576a7..9bc6dfedb 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -186,6 +186,69 @@ mod padded_aead { } } } + impl crate::AeadMut for PaddedAead + where + Self: AeadCore + { + fn encrypt<'msg, 'aad>( + &mut self, + nonce: &crate::Nonce, + plaintext: impl Into>, + ) -> crate::Result> { + let padding_overhead = 3*Aead::KeySize::to_usize(); + assert_eq!(padding_overhead+Aead::CiphertextOverhead::to_usize(), Self::CiphertextOverhead::to_usize()); + + let payload = plaintext.into(); + let mut padded_msg = alloc::vec![0x00; payload.msg.len()+3*Aead::KeySize::to_usize()]; + padded_msg[padding_overhead..].copy_from_slice(payload.msg); + + // Compiler can't see that Self::NonceSize == Aead::NonceSize + let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + + let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, payload.aad, &mut padded_msg)?; + // Append the tag to the end + padded_msg.extend(tag_inner); + Ok(padded_msg) + } + fn decrypt<'msg, 'aad>( + &mut self, + nonce: &crate::Nonce, + ciphertext: impl Into>, + ) -> crate::Result> { + let padding_overhead = 3*Aead::KeySize::to_usize(); + let total_overhead = padding_overhead+Aead::CiphertextOverhead::to_usize(); + assert_eq!(total_overhead, Self::CiphertextOverhead::to_usize()); + + let payload = ciphertext.into(); + + // Compiler can't see that Self::NonceSize == Aead::NonceSize + let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); + + let (ctxt, tag) = payload.msg.split_at(payload.msg.len()-Aead::TagSize::to_usize()); + let tag_recast = crate::Tag::::from_slice(tag); + + if ctxt.len() < total_overhead { + return Err(crate::Error); + } + + let mut ptxt_vec = alloc::vec::Vec::from(ctxt); + + // Avoid timing side channel by not returning early + let mut decryption_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, payload.aad, &mut ptxt_vec, tag_recast) { + Ok(_) => 1, + Err(_) => 0 + }); + // Check padding now + for byte in ptxt_vec.drain(..padding_overhead) { + decryption_is_ok = decryption_is_ok & byte.ct_eq(&0); + } + if decryption_is_ok.into() { + Ok(ptxt_vec) + } else { + Err(crate::Error) + } + } + } impl KeyCommittingAead for PaddedAead where Self: AeadCore {} } From ea4de3d9e4dc8e8e18893d0ce2bae679a34a778f Mon Sep 17 00:00:00 2001 From: rlee287 Date: Tue, 24 Oct 2023 11:24:55 -0700 Subject: [PATCH 10/12] Minor documentation updates --- aead/src/committing_aead.rs | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 9bc6dfedb..64897c2ba 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -1,4 +1,4 @@ -//! Committing AEAD support. +//! Committing AEAD marker traits and generic constructions. //! //! Marker trait for Committing AEADs along with constructions that give //! key-committing properties to normal AEADs. @@ -9,20 +9,21 @@ //! provide a commitment for their inputs (which can equivalently be thought //! of as collision resistance of an AEAD with respect to its inputs). The //! lack of commitment properties has lead to breaks in real cryptographic -//! protocols, e.g. improper implementations of the password-authenticated -//! key exchange [OPAQUE][2] and the Shadowsocks proxy, as described in -//! a paper describing [partitioning oracle attacks][3]. +//! protocols, e.g. the Shadowsocks proxy ans improper implementations of the +//! password-authenticated key exchange [OPAQUE][2], as described in the +//! [partitioning oracle attacks][3] paper. //! //! Concrete examples of popular AEADs that lack commitment properties: -//! - AEADs using polynomial-based MACs (e.g. AES-GCM and ChaCha20Poly1305) -//! do not commit to their inputs. [1] describes how to construct an -//! AES-GCM ciphertext that decrypts correctly under two different keys to -//! two different, semantically meaningful plaintexts. +//! - AEADs using polynomial-based MACs (e.g. AES-GCM, AES-GCM-SIV, +//! and ChaCha20Poly1305) do not commit to their inputs. [This paper][1] +//! describes how to construct an AES-GCM ciphertext that decrypts correctly +//! under two different keys to two different, semantically meaningful +//! plaintexts. //! - AEADs where decryption can be separated into parallel always-successful //! plaintext recovery and tag computation+equality checking steps cannot //! provide commitment when the tag computation function is not preimage -//! resistant. [5] provides concrete attacks against EAX, GCM, SIV, CCM, -//! and OCB3 that demonstrate that they are not key-commiting. +//! resistant. [This paper][5] provides concrete attacks against EAX, GCM, +//! SIV, CCM, and OCB3 that demonstrate that they are not key-commiting. //! //! ## Module contents //! This module provides the [`KeyCommittingAead`] marker trait to indicate that @@ -72,8 +73,8 @@ mod padded_aead { /// A wrapper around a non-committing AEAD that implements the /// [padding fix][1] of prepending zeros to the plaintext before encryption /// and verifying their presence upon decryption. Based on the formulas - /// of [2], we append `3*key_len` zeros to obtain `3/4*key_len` bits of - /// key commitment security. + /// of [this paper][2], we append `3*key_len` zeros to obtain `3/4*key_len` + /// bits of key commitment security. /// /// The padding fix paper proves that this construction is key-committing /// for AES-GCM, ChaCha20Poly1305, and other AEADs that internally use @@ -118,10 +119,7 @@ mod padded_aead { type CiphertextOverhead = >::Output>>::Output; } - // TODO: don't see a way to provide impls for both AeadInPlace - // and AeadMutInPlace, as having both would conflict with the blanket impl - // Choose AeadInPlace because all the current rustcrypto/AEADs do not have - // a mutable state + impl crate::Aead for PaddedAead where Self: AeadCore From bcff7a86b3daf3a9d1d2f6b85658a2455f9074ed Mon Sep 17 00:00:00 2001 From: rlee287 Date: Tue, 24 Oct 2023 11:33:55 -0700 Subject: [PATCH 11/12] cargo fmt run --- aead/src/committing_aead.rs | 243 ++++++++++++++++++++++-------------- 1 file changed, 149 insertions(+), 94 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index 64897c2ba..b0d5623fa 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -1,6 +1,6 @@ //! Committing AEAD marker traits and generic constructions. //! -//! Marker trait for Committing AEADs along with constructions that give +//! Marker trait for Committing AEADs along with constructions that give //! key-committing properties to normal AEADs. //! //! ## Why Committing AEADs? @@ -12,19 +12,19 @@ //! protocols, e.g. the Shadowsocks proxy ans improper implementations of the //! password-authenticated key exchange [OPAQUE][2], as described in the //! [partitioning oracle attacks][3] paper. -//! +//! //! Concrete examples of popular AEADs that lack commitment properties: //! - AEADs using polynomial-based MACs (e.g. AES-GCM, AES-GCM-SIV, -//! and ChaCha20Poly1305) do not commit to their inputs. [This paper][1] -//! describes how to construct an AES-GCM ciphertext that decrypts correctly -//! under two different keys to two different, semantically meaningful +//! and ChaCha20Poly1305) do not commit to their inputs. [This paper][1] +//! describes how to construct an AES-GCM ciphertext that decrypts correctly +//! under two different keys to two different, semantically meaningful //! plaintexts. //! - AEADs where decryption can be separated into parallel always-successful //! plaintext recovery and tag computation+equality checking steps cannot //! provide commitment when the tag computation function is not preimage //! resistant. [This paper][5] provides concrete attacks against EAX, GCM, //! SIV, CCM, and OCB3 that demonstrate that they are not key-commiting. -//! +//! //! ## Module contents //! This module provides the [`KeyCommittingAead`] marker trait to indicate that //! an AEAD commits to its key, along with the [`CommittingAead`] marker trait @@ -54,28 +54,28 @@ pub use ctx::CtxishHmacAead; /// Marker trait that signals that an AEAD commits to its key. pub trait KeyCommittingAead: AeadCore {} /// Marker trait that signals that an AEAD commits to all its inputs. -pub trait CommittingAead: AeadCore+KeyCommittingAead {} +pub trait CommittingAead: AeadCore + KeyCommittingAead {} #[cfg(feature = "committing_ae")] #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] mod padded_aead { + use super::KeyCommittingAead; use crate::{AeadCore, AeadInPlace}; - use crypto_common::{KeyInit, KeySizeUser}; use core::ops::{Add, Mul}; + use crypto_common::{KeyInit, KeySizeUser}; + use generic_array::typenum::{Unsigned, U3}; use generic_array::ArrayLength; - use generic_array::typenum::{U3, Unsigned}; use subtle::{Choice, ConstantTimeEq}; - use super::KeyCommittingAead; #[cfg(feature = "committing_ae")] #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] #[derive(Debug, Clone)] /// A wrapper around a non-committing AEAD that implements the - /// [padding fix][1] of prepending zeros to the plaintext before encryption + /// [padding fix][1] of prepending zeros to the plaintext before encryption /// and verifying their presence upon decryption. Based on the formulas /// of [this paper][2], we append `3*key_len` zeros to obtain `3/4*key_len` /// bits of key commitment security. - /// + /// /// The padding fix paper proves that this construction is key-committing /// for AES-GCM, ChaCha20Poly1305, and other AEADs that internally use /// primitives that can be modelled as ideal. However, security is not @@ -83,13 +83,13 @@ mod padded_aead { /// used as a MAC in normal circumstances because HMAC does not require a /// collision resistant hash, but an AEAD using HMAC-SHA-1 to provide /// integrity cannot be made committing using this padding scheme. - /// + /// /// [1]: https://eprint.iacr.org/2020/1456.pdf /// [2]: https://csrc.nist.gov/csrc/media/Events/2023/third-workshop-on-block-cipher-modes-of-operation/documents/accepted-papers/The%20Landscape%20of%20Committing%20Authenticated%20Encryption.pdf pub struct PaddedAead { inner_aead: Aead, } - impl PaddedAead { + impl PaddedAead { /// Extracts the inner Aead object. #[inline] pub fn into_inner(self) -> Aead { @@ -97,40 +97,45 @@ mod padded_aead { } } - impl KeySizeUser for PaddedAead { + impl KeySizeUser for PaddedAead { type KeySize = Aead::KeySize; } - impl KeyInit for PaddedAead { + impl KeyInit for PaddedAead { fn new(key: &crypto_common::Key) -> Self { PaddedAead { inner_aead: Aead::new(key), } } } - impl AeadCore for PaddedAead + impl AeadCore for PaddedAead where Aead::CiphertextOverhead: Add<>::Output>, Aead::KeySize: Mul, - >::Output>>::Output: ArrayLength + >::Output>>::Output: + ArrayLength, { type NonceSize = Aead::NonceSize; type TagSize = Aead::TagSize; - type CiphertextOverhead = >::Output>>::Output; + type CiphertextOverhead = + >::Output>>::Output; } - impl crate::Aead for PaddedAead + impl crate::Aead for PaddedAead where - Self: AeadCore + Self: AeadCore, { fn encrypt<'msg, 'aad>( &self, nonce: &crate::Nonce, plaintext: impl Into>, ) -> crate::Result> { - let padding_overhead = 3*Aead::KeySize::to_usize(); - assert_eq!(padding_overhead+Aead::CiphertextOverhead::to_usize(), Self::CiphertextOverhead::to_usize()); + let padding_overhead = 3 * Aead::KeySize::to_usize(); + assert_eq!( + padding_overhead + Aead::CiphertextOverhead::to_usize(), + Self::CiphertextOverhead::to_usize() + ); let payload = plaintext.into(); let mut padded_msg = alloc::vec![0x00; payload.msg.len()+3*Aead::KeySize::to_usize()]; @@ -139,7 +144,11 @@ mod padded_aead { // Compiler can't see that Self::NonceSize == Aead::NonceSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, payload.aad, &mut padded_msg)?; + let tag_inner = self.inner_aead.encrypt_in_place_detached( + nonce_recast, + payload.aad, + &mut padded_msg, + )?; // Append the tag to the end padded_msg.extend(tag_inner); Ok(padded_msg) @@ -150,8 +159,8 @@ mod padded_aead { nonce: &crate::Nonce, ciphertext: impl Into>, ) -> crate::Result> { - let padding_overhead = 3*Aead::KeySize::to_usize(); - let total_overhead = padding_overhead+Aead::CiphertextOverhead::to_usize(); + let padding_overhead = 3 * Aead::KeySize::to_usize(); + let total_overhead = padding_overhead + Aead::CiphertextOverhead::to_usize(); assert_eq!(total_overhead, Self::CiphertextOverhead::to_usize()); let payload = ciphertext.into(); @@ -159,7 +168,9 @@ mod padded_aead { // Compiler can't see that Self::NonceSize == Aead::NonceSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let (ctxt, tag) = payload.msg.split_at(payload.msg.len()-Aead::TagSize::to_usize()); + let (ctxt, tag) = payload + .msg + .split_at(payload.msg.len() - Aead::TagSize::to_usize()); let tag_recast = crate::Tag::::from_slice(tag); if ctxt.len() < total_overhead { @@ -169,10 +180,17 @@ mod padded_aead { let mut ptxt_vec = alloc::vec::Vec::from(ctxt); // Avoid timing side channel by not returning early - let mut decryption_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, payload.aad, &mut ptxt_vec, tag_recast) { - Ok(_) => 1, - Err(_) => 0 - }); + let mut decryption_is_ok = Choice::from( + match self.inner_aead.decrypt_in_place_detached( + nonce_recast, + payload.aad, + &mut ptxt_vec, + tag_recast, + ) { + Ok(_) => 1, + Err(_) => 0, + }, + ); // Check padding now for byte in ptxt_vec.drain(..padding_overhead) { decryption_is_ok = decryption_is_ok & byte.ct_eq(&0); @@ -184,17 +202,20 @@ mod padded_aead { } } } - impl crate::AeadMut for PaddedAead + impl crate::AeadMut for PaddedAead where - Self: AeadCore + Self: AeadCore, { fn encrypt<'msg, 'aad>( &mut self, nonce: &crate::Nonce, plaintext: impl Into>, ) -> crate::Result> { - let padding_overhead = 3*Aead::KeySize::to_usize(); - assert_eq!(padding_overhead+Aead::CiphertextOverhead::to_usize(), Self::CiphertextOverhead::to_usize()); + let padding_overhead = 3 * Aead::KeySize::to_usize(); + assert_eq!( + padding_overhead + Aead::CiphertextOverhead::to_usize(), + Self::CiphertextOverhead::to_usize() + ); let payload = plaintext.into(); let mut padded_msg = alloc::vec![0x00; payload.msg.len()+3*Aead::KeySize::to_usize()]; @@ -203,7 +224,11 @@ mod padded_aead { // Compiler can't see that Self::NonceSize == Aead::NonceSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, payload.aad, &mut padded_msg)?; + let tag_inner = self.inner_aead.encrypt_in_place_detached( + nonce_recast, + payload.aad, + &mut padded_msg, + )?; // Append the tag to the end padded_msg.extend(tag_inner); Ok(padded_msg) @@ -213,8 +238,8 @@ mod padded_aead { nonce: &crate::Nonce, ciphertext: impl Into>, ) -> crate::Result> { - let padding_overhead = 3*Aead::KeySize::to_usize(); - let total_overhead = padding_overhead+Aead::CiphertextOverhead::to_usize(); + let padding_overhead = 3 * Aead::KeySize::to_usize(); + let total_overhead = padding_overhead + Aead::CiphertextOverhead::to_usize(); assert_eq!(total_overhead, Self::CiphertextOverhead::to_usize()); let payload = ciphertext.into(); @@ -222,7 +247,9 @@ mod padded_aead { // Compiler can't see that Self::NonceSize == Aead::NonceSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let (ctxt, tag) = payload.msg.split_at(payload.msg.len()-Aead::TagSize::to_usize()); + let (ctxt, tag) = payload + .msg + .split_at(payload.msg.len() - Aead::TagSize::to_usize()); let tag_recast = crate::Tag::::from_slice(tag); if ctxt.len() < total_overhead { @@ -232,10 +259,17 @@ mod padded_aead { let mut ptxt_vec = alloc::vec::Vec::from(ctxt); // Avoid timing side channel by not returning early - let mut decryption_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, payload.aad, &mut ptxt_vec, tag_recast) { - Ok(_) => 1, - Err(_) => 0 - }); + let mut decryption_is_ok = Choice::from( + match self.inner_aead.decrypt_in_place_detached( + nonce_recast, + payload.aad, + &mut ptxt_vec, + tag_recast, + ) { + Ok(_) => 1, + Err(_) => 0, + }, + ); // Check padding now for byte in ptxt_vec.drain(..padding_overhead) { decryption_is_ok = decryption_is_ok & byte.ct_eq(&0); @@ -247,43 +281,42 @@ mod padded_aead { } } } - impl KeyCommittingAead for PaddedAead - where Self: AeadCore {} + impl KeyCommittingAead for PaddedAead where Self: AeadCore {} } #[cfg(feature = "committing_ae")] #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] mod ctx { + use super::{CommittingAead, KeyCommittingAead}; use crate::{AeadCore, AeadInPlace}; - use crypto_common::{KeyInit, KeySizeUser, BlockSizeUser}; use core::ops::Add; - use digest::{Digest, Mac, FixedOutput}; - use hmac::SimpleHmac; - use generic_array::ArrayLength; + use crypto_common::{BlockSizeUser, KeyInit, KeySizeUser}; + use digest::{Digest, FixedOutput, Mac}; use generic_array::typenum::Unsigned; + use generic_array::ArrayLength; + use hmac::SimpleHmac; use subtle::Choice; - use super::{KeyCommittingAead, CommittingAead}; #[cfg(feature = "committing_ae")] #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] #[derive(Debug, Clone)] /// Implementation of the encryption portion of the /// [CTX scheme](https://eprint.iacr.org/2022/1260.pdf). - /// - /// CTX wraps an AEAD and replaces the tag with + /// + /// CTX wraps an AEAD and replaces the tag with /// `H(key || nonce || aad || orig_tag)`, which is shown in the paper to /// commit to all AEAD inputs as long as the hash is collision resistant. /// This provides `hash_output_len/2` bits of commitment security. - /// + /// /// Unfortunately, there is currently no way to get the expected tag of the /// inner AEAD using the current trait interfaces, so this struct only - /// implements the encryption direction. This may still be useful for + /// implements the encryption direction. This may still be useful for /// interfacing with other programs that use the CTX committing AE scheme. pub struct CtxAead { inner_aead: Aead, - hasher: CrHash + hasher: CrHash, } - impl CtxAead { + impl CtxAead { /// Extracts the inner Aead object. #[inline] pub fn into_inner(self) -> Aead { @@ -291,20 +324,19 @@ mod ctx { } } - impl KeySizeUser for CtxAead { + impl KeySizeUser for CtxAead { type KeySize = Aead::KeySize; } - impl KeyInit for CtxAead { + impl KeyInit for CtxAead { fn new(key: &crypto_common::Key) -> Self { CtxAead { inner_aead: Aead::new(key), - hasher: Digest::new_with_prefix(key) + hasher: Digest::new_with_prefix(key), } } } - impl AeadCore for CtxAead - { + impl AeadCore for CtxAead { type NonceSize = Aead::NonceSize; type TagSize = CrHash::OutputSize; @@ -316,9 +348,10 @@ mod ctx { // and AeadMutInPlace, as having both would conflict with the blanket impl // Choose AeadInPlace because all the current rustcrypto/AEADs do not have // a mutable state - impl AeadInPlace for CtxAead + impl AeadInPlace + for CtxAead where - Self: AeadCore + Self: AeadCore, { fn encrypt_in_place_detached( &self, @@ -329,7 +362,9 @@ mod ctx { // Compiler can't see that Self::NonceSize == Aead::NonceSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; + let tag_inner = + self.inner_aead + .encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; let mut tag_computer = self.hasher.clone(); tag_computer.update(nonce); @@ -360,33 +395,32 @@ mod ctx { } } - impl KeyCommittingAead for CtxAead - where Self: AeadCore {} - impl CommittingAead for CtxAead - where Self: AeadCore {} + impl KeyCommittingAead for CtxAead where Self: AeadCore + {} + impl CommittingAead for CtxAead where Self: AeadCore {} #[cfg(feature = "committing_ae")] #[cfg_attr(docsrs, doc(cfg(feature = "committing_ae")))] #[derive(Debug, Clone)] /// Implementation of a modified version of the CTX scheme. - /// + /// /// Instead of returning tag `H(key || nonce || aad || orig_tag)`, we return /// `orig_tag || HMAC_key(nonce || aad || orig_tag)`. The AEAD API requires /// that we treat the underlying AEAD as a black box, without access to the /// expected tag at decryption time, so we have to also send it along with /// the commitment to the other inputs to the AEAD. (Ideally, the need to - /// send `orig_tag` as well can be removed in a future version of the + /// send `orig_tag` as well can be removed in a future version of the /// crate.) At decryption time, we verify both `orig_tag` and the hash /// commitment. - /// + /// /// ## Security analysis for the modified construction - /// + /// /// HMAC invokes the underlying hash function twice such that the inputs to /// the hash functions are computed only by XOR, concatenation, and hashing. /// Thus, if we trust the underlying hash function to serve as a commitment /// to its inputs, we can also trust HMAC-hash to commit to its inputs and /// provide `hash_output_len/2` bits of commitment security, as with CTX. - /// + /// /// If the underlying AEAD provides proper confidentiality and integrity /// protections, we can assume that this new construction also provides /// proper confidentiality and integrity, since it has the same ciphertext @@ -394,7 +428,7 @@ mod ctx { /// a recoverable form. Moreover, HMAC is supposed to be a secure keyed MAC, /// so an attacker cannot forge a commitment without knowing the key, even /// with full knowledge of the other input to the HMAC. - /// + /// /// We use `HMAC_key(nonce || aad || orig_tag)` instead of the original CTX /// construction of `H(key || nonce || aad || orig_tag)` to mitigate length /// extension attacks that may become possible when `orig_tag` is sent in @@ -405,11 +439,11 @@ mod ctx { /// revealing `orig_tag` would be fatal for the CTX+ construction which /// omits `aad` from the `orig_tag` computation by allowing forgery of the /// hash commitment via length extension on `aad`.) - pub struct CtxishHmacAead { + pub struct CtxishHmacAead { inner_aead: Aead, - hasher: SimpleHmac + hasher: SimpleHmac, } - impl CtxishHmacAead { + impl CtxishHmacAead { /// Extracts the inner Aead object. #[inline] pub fn into_inner(self) -> Aead { @@ -417,21 +451,26 @@ mod ctx { } } - impl KeySizeUser for CtxishHmacAead { + impl KeySizeUser + for CtxishHmacAead + { type KeySize = Aead::KeySize; } - impl KeyInit for CtxishHmacAead { + impl KeyInit + for CtxishHmacAead + { fn new(key: &crypto_common::Key) -> Self { CtxishHmacAead { inner_aead: Aead::new(key), - hasher: as KeyInit>::new_from_slice(key).unwrap() + hasher: as KeyInit>::new_from_slice(key).unwrap(), } } } - impl AeadCore for CtxishHmacAead + impl AeadCore + for CtxishHmacAead where Aead::TagSize: Add, - >::Output: ArrayLength + >::Output: ArrayLength, { type NonceSize = Aead::NonceSize; @@ -443,9 +482,10 @@ mod ctx { // and AeadMutInPlace, as having both would conflict with the blanket impl // Choose AeadInPlace because all the current rustcrypto/AEADs do not have // a mutable state - impl AeadInPlace for CtxishHmacAead + impl + AeadInPlace for CtxishHmacAead where - Self: AeadCore + Self: AeadCore, { fn encrypt_in_place_detached( &self, @@ -456,7 +496,9 @@ mod ctx { // Compiler can't see that Self::NonceSize == Aead::NonceSize let nonce_recast = crate::Nonce::::from_slice(nonce.as_slice()); - let tag_inner = self.inner_aead.encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; + let tag_inner = + self.inner_aead + .encrypt_in_place_detached(nonce_recast, associated_data, buffer)?; let mut tag_computer = self.hasher.clone(); tag_computer.update(nonce); @@ -485,10 +527,17 @@ mod ctx { // Prevent timing side channels by not returning early on inner AEAD // decryption failure - let tag_inner_is_ok = Choice::from(match self.inner_aead.decrypt_in_place_detached(nonce_recast, associated_data, buffer, tag_inner) { - Ok(_) => 1, - Err(_) => 0 - }); + let tag_inner_is_ok = Choice::from( + match self.inner_aead.decrypt_in_place_detached( + nonce_recast, + associated_data, + buffer, + tag_inner, + ) { + Ok(_) => 1, + Err(_) => 0, + }, + ); let mut tag_computer = self.hasher.clone(); tag_computer.update(nonce); @@ -504,7 +553,7 @@ mod ctx { let hmac_tag_is_ok = Choice::from(match tag_computer.verify_slice(expected_hmac_tag) { Ok(_) => 1, - Err(_) => 0 + Err(_) => 0, }); if (tag_inner_is_ok & hmac_tag_is_ok).into() { @@ -514,8 +563,14 @@ mod ctx { } } } - impl KeyCommittingAead for CtxishHmacAead - where Self: AeadCore {} - impl CommittingAead for CtxishHmacAead - where Self: AeadCore {} + impl KeyCommittingAead + for CtxishHmacAead + where + Self: AeadCore, + { + } + impl CommittingAead for CtxishHmacAead where + Self: AeadCore + { + } } From cf801c3dde58a793bbe88e236c8510b1f6cae542 Mon Sep 17 00:00:00 2001 From: rlee287 Date: Tue, 24 Oct 2023 11:37:26 -0700 Subject: [PATCH 12/12] Fix clippy lints --- aead/src/committing_aead.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/aead/src/committing_aead.rs b/aead/src/committing_aead.rs index b0d5623fa..23c073386 100644 --- a/aead/src/committing_aead.rs +++ b/aead/src/committing_aead.rs @@ -193,7 +193,7 @@ mod padded_aead { ); // Check padding now for byte in ptxt_vec.drain(..padding_overhead) { - decryption_is_ok = decryption_is_ok & byte.ct_eq(&0); + decryption_is_ok &= byte.ct_eq(&0); } if decryption_is_ok.into() { Ok(ptxt_vec) @@ -272,7 +272,7 @@ mod padded_aead { ); // Check padding now for byte in ptxt_vec.drain(..padding_overhead) { - decryption_is_ok = decryption_is_ok & byte.ct_eq(&0); + decryption_is_ok &= byte.ct_eq(&0); } if decryption_is_ok.into() { Ok(ptxt_vec) @@ -462,7 +462,8 @@ mod ctx { fn new(key: &crypto_common::Key) -> Self { CtxishHmacAead { inner_aead: Aead::new(key), - hasher: as KeyInit>::new_from_slice(key).unwrap(), + hasher: as KeyInit>::new_from_slice(key) + .expect("HMAC accepts all key lengths so this always works"), } } } @@ -508,7 +509,8 @@ mod ctx { let final_tag_iter = tag_inner.iter().copied().chain(hmac_tag); - let final_tag = crate::Tag::::from_exact_iter(final_tag_iter).unwrap(); + let final_tag = crate::Tag::::from_exact_iter(final_tag_iter) + .expect("Lengths shoud always match"); Ok(final_tag) } @@ -546,7 +548,7 @@ mod ctx { // If it doesn't then we'll likely get a mismatch here too // Regardless, we require both `Choice`s to be OK // So it doesn't matter if we ingest a potentially tainted tag here - tag_computer.update(&tag_inner); + tag_computer.update(tag_inner); // Get the HMAC tag let expected_hmac_tag = &tag[Aead::TagSize::to_usize()..];