diff --git a/lightning/src/ln/channelmanager.rs b/lightning/src/ln/channelmanager.rs index c787cedfad3..0250efe220b 100644 --- a/lightning/src/ln/channelmanager.rs +++ b/lightning/src/ln/channelmanager.rs @@ -24,7 +24,7 @@ use bitcoin::blockdata::transaction::Transaction; use bitcoin::blockdata::constants::genesis_block; use bitcoin::network::constants::Network; -use bitcoin::hashes::{Hash, HashEngine}; +use bitcoin::hashes::Hash; use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::sha256d::Hash as Sha256dHash; use bitcoin::hash_types::{BlockHash, Txid}; @@ -2164,22 +2164,10 @@ impl ChannelMana } }, onion_utils::Hop::Forward { next_hop_data, next_hop_hmac, new_packet_bytes } => { - let mut new_pubkey = msg.onion_routing_packet.public_key.unwrap(); - - let blinding_factor = { - let mut sha = Sha256::engine(); - sha.input(&new_pubkey.serialize()[..]); - sha.input(&shared_secret); - Sha256::from_engine(sha).into_inner() - }; - - let public_key = if let Err(e) = new_pubkey.mul_assign(&self.secp_ctx, &blinding_factor[..]) { - Err(e) - } else { Ok(new_pubkey) }; - + let new_pubkey = msg.onion_routing_packet.public_key.unwrap(); let outgoing_packet = msgs::OnionPacket { version: 0, - public_key, + public_key: onion_utils::next_hop_packet_pubkey(&self.secp_ctx, new_pubkey, &shared_secret), hop_data: new_packet_bytes, hmac: next_hop_hmac.clone(), }; diff --git a/lightning/src/ln/onion_utils.rs b/lightning/src/ln/onion_utils.rs index 1df374d7e00..b223a344dbe 100644 --- a/lightning/src/ln/onion_utils.rs +++ b/lightning/src/ln/onion_utils.rs @@ -74,6 +74,17 @@ pub(super) fn gen_ammag_from_shared_secret(shared_secret: &[u8]) -> [u8; 32] { Hmac::from_engine(hmac).into_inner() } +pub(super) fn next_hop_packet_pubkey(secp_ctx: &Secp256k1, mut packet_pubkey: PublicKey, packet_shared_secret: &[u8; 32]) -> Result { + let blinding_factor = { + let mut sha = Sha256::engine(); + sha.input(&packet_pubkey.serialize()[..]); + sha.input(packet_shared_secret); + Sha256::from_engine(sha).into_inner() + }; + + packet_pubkey.mul_assign(secp_ctx, &blinding_factor[..]).map(|_| packet_pubkey) +} + // can only fail if an intermediary hop has an invalid public key or session_priv is invalid #[inline] pub(super) fn construct_onion_keys_callback (secp_ctx: &Secp256k1, path: &Vec, session_priv: &SecretKey, mut callback: FType) -> Result<(), secp256k1::Error> { diff --git a/lightning/src/util/chacha20poly1305rfc.rs b/lightning/src/util/chacha20poly1305rfc.rs index e0e155ced4c..befe5d19f5f 100644 --- a/lightning/src/util/chacha20poly1305rfc.rs +++ b/lightning/src/util/chacha20poly1305rfc.rs @@ -10,6 +10,10 @@ // This is a port of Andrew Moons poly1305-donna // https://github.com/floodyberry/poly1305-donna +use ln::msgs::DecodeError; +use util::ser::{FixedLengthReader, LengthRead, LengthReadableArgs, Readable, Writeable, Writer}; +use io::{self, Read, Write}; + #[cfg(not(fuzzing))] mod real_chachapoly { use util::chacha20::ChaCha20; @@ -70,6 +74,26 @@ mod real_chachapoly { self.mac.raw_result(out_tag); } + // Encrypt `input_output` in-place. To finish and calculate the tag, use `finish_and_get_tag` + // below. + pub(super) fn encrypt_in_place(&mut self, input_output: &mut [u8]) { + debug_assert!(self.finished == false); + self.cipher.process_in_place(input_output); + self.data_len += input_output.len(); + self.mac.input(input_output); + } + + // If we were previously encrypting with `encrypt_in_place`, this method can be used to finish + // encrypting and calculate the tag. + pub(super) fn finish_and_get_tag(&mut self, out_tag: &mut [u8]) { + debug_assert!(self.finished == false); + ChaCha20Poly1305RFC::pad_mac_16(&mut self.mac, self.data_len); + self.finished = true; + self.mac.input(&self.aad_len.to_le_bytes()); + self.mac.input(&(self.data_len as u64).to_le_bytes()); + self.mac.raw_result(out_tag); + } + pub fn decrypt(&mut self, input: &[u8], output: &mut [u8], tag: &[u8]) -> bool { assert!(input.len() == output.len()); assert!(self.finished == false); @@ -92,11 +116,141 @@ mod real_chachapoly { false } } + + // Decrypt in place, without checking the tag. Use `finish_and_check_tag` to check it + // later when decryption finishes. + // + // Should never be `pub` because the public API should always enforce tag checking. + pub(super) fn decrypt_in_place(&mut self, input_output: &mut [u8]) { + debug_assert!(self.finished == false); + self.mac.input(input_output); + self.data_len += input_output.len(); + self.cipher.process_in_place(input_output); + } + + // If we were previously decrypting with `decrypt_in_place`, this method must be used to finish + // decrypting and check the tag. Returns whether or not the tag is valid. + pub(super) fn finish_and_check_tag(&mut self, tag: &[u8]) -> bool { + debug_assert!(self.finished == false); + self.finished = true; + ChaCha20Poly1305RFC::pad_mac_16(&mut self.mac, self.data_len); + self.mac.input(&self.aad_len.to_le_bytes()); + self.mac.input(&(self.data_len as u64).to_le_bytes()); + + let mut calc_tag = [0u8; 16]; + self.mac.raw_result(&mut calc_tag); + if fixed_time_eq(&calc_tag, tag) { + true + } else { + false + } + } } } #[cfg(not(fuzzing))] pub use self::real_chachapoly::ChaCha20Poly1305RFC; +/// Enables simultaneously reading and decrypting a ChaCha20Poly1305RFC stream from a std::io::Read. +struct ChaChaPolyReader<'a, R: Read> { + pub chacha: &'a mut ChaCha20Poly1305RFC, + pub read: R, +} + +impl<'a, R: Read> Read for ChaChaPolyReader<'a, R> { + // Decrypt bytes from Self::read into `dest`. + // `ChaCha20Poly1305RFC::finish_and_check_tag` must be called to check the tag after all reads + // complete. + fn read(&mut self, dest: &mut [u8]) -> Result { + let res = self.read.read(dest)?; + if res > 0 { + self.chacha.decrypt_in_place(&mut dest[0..res]); + } + Ok(res) + } +} + +/// Enables simultaneously writing and encrypting a byte stream into a Writer. +struct ChaChaPolyWriter<'a, W: Writer> { + pub chacha: &'a mut ChaCha20Poly1305RFC, + pub write: &'a mut W, +} + +impl<'a, W: Writer> Writer for ChaChaPolyWriter<'a, W> { + // Encrypt then write bytes from `src` into Self::write. + // `ChaCha20Poly1305RFC::finish_and_get_tag` can be called to retrieve the tag after all writes + // complete. + fn write_all(&mut self, src: &[u8]) -> Result<(), io::Error> { + let mut src_idx = 0; + while src_idx < src.len() { + let mut write_buffer = [0; 8192]; + let bytes_written = (&mut write_buffer[..]).write(&src[src_idx..]).expect("In-memory writes can't fail"); + self.chacha.encrypt_in_place(&mut write_buffer[..bytes_written]); + self.write.write_all(&write_buffer[..bytes_written])?; + src_idx += bytes_written; + } + Ok(()) + } +} + +/// Enables the use of the serialization macros for objects that need to be simultaneously encrypted and +/// serialized. This allows us to avoid an intermediate Vec allocation. +pub(crate) struct ChaChaPolyWriteAdapter<'a, W: Writeable> { + pub rho: [u8; 32], + pub writeable: &'a W, +} + +impl<'a, W: Writeable> ChaChaPolyWriteAdapter<'a, W> { + #[allow(unused)] // This will be used for onion messages soon + pub fn new(rho: [u8; 32], writeable: &'a W) -> ChaChaPolyWriteAdapter<'a, W> { + Self { rho, writeable } + } +} + +impl<'a, T: Writeable> Writeable for ChaChaPolyWriteAdapter<'a, T> { + // Simultaneously write and encrypt Self::writeable. + fn write(&self, w: &mut W) -> Result<(), io::Error> { + let mut chacha = ChaCha20Poly1305RFC::new(&self.rho, &[0; 12], &[]); + let mut chacha_stream = ChaChaPolyWriter { chacha: &mut chacha, write: w }; + self.writeable.write(&mut chacha_stream)?; + let mut tag = [0 as u8; 16]; + chacha.finish_and_get_tag(&mut tag); + tag.write(w)?; + + Ok(()) + } +} + +/// Enables the use of the serialization macros for objects that need to be simultaneously decrypted and +/// deserialized. This allows us to avoid an intermediate Vec allocation. +pub(crate) struct ChaChaPolyReadAdapter { + #[allow(unused)] // This will be used soon for onion messages + pub readable: R, +} + +impl LengthReadableArgs<[u8; 32]> for ChaChaPolyReadAdapter { + // Simultaneously read and decrypt an object from a LengthRead, storing it in Self::readable. + // LengthRead must be used instead of std::io::Read because we need the total length to separate + // out the tag at the end. + fn read(mut r: &mut R, secret: [u8; 32]) -> Result { + if r.total_bytes() < 16 { return Err(DecodeError::InvalidValue) } + + let mut chacha = ChaCha20Poly1305RFC::new(&secret, &[0; 12], &[]); + let decrypted_len = r.total_bytes() - 16; + let s = FixedLengthReader::new(&mut r, decrypted_len); + let mut chacha_stream = ChaChaPolyReader { chacha: &mut chacha, read: s }; + let readable: T = Readable::read(&mut chacha_stream)?; + chacha_stream.read.eat_remaining()?; + + let mut tag = [0 as u8; 16]; + r.read_exact(&mut tag)?; + if !chacha.finish_and_check_tag(&tag) { + return Err(DecodeError::InvalidValue) + } + + Ok(Self { readable }) + } +} + #[cfg(fuzzing)] mod fuzzy_chachapoly { #[derive(Clone, Copy)] @@ -130,6 +284,16 @@ mod fuzzy_chachapoly { self.finished = true; } + pub(super) fn encrypt_in_place(&mut self, _input_output: &mut [u8]) { + assert!(self.finished == false); + self.finished = true; + } + + pub(super) fn finish_and_get_tag(&mut self, out_tag: &mut [u8]) { + out_tag.copy_from_slice(&self.tag); + self.finished = true; + } + pub fn decrypt(&mut self, input: &[u8], output: &mut [u8], tag: &[u8]) -> bool { assert!(input.len() == output.len()); assert!(self.finished == false); @@ -139,7 +303,106 @@ mod fuzzy_chachapoly { self.finished = true; true } + + pub(super) fn decrypt_in_place(&mut self, _input: &mut [u8]) { + assert!(self.finished == false); + } + + pub(super) fn finish_and_check_tag(&mut self, tag: &[u8]) -> bool { + if tag[..] != self.tag[..] { return false; } + self.finished = true; + true + } } } #[cfg(fuzzing)] pub use self::fuzzy_chachapoly::ChaCha20Poly1305RFC; + +#[cfg(test)] +mod tests { + use ln::msgs::DecodeError; + use super::{ChaChaPolyReadAdapter, ChaChaPolyWriteAdapter}; + use util::ser::{self, FixedLengthReader, LengthReadableArgs, Writeable}; + + // Used for for testing various lengths of serialization. + #[derive(Debug, PartialEq)] + struct TestWriteable { + field1: Vec, + field2: Vec, + field3: Vec, + } + impl_writeable_tlv_based!(TestWriteable, { + (1, field1, vec_type), + (2, field2, vec_type), + (3, field3, vec_type), + }); + + #[test] + fn test_chacha_stream_adapters() { + // Check that ChaChaPolyReadAdapter and ChaChaPolyWriteAdapter correctly encode and decode an + // encrypted object. + macro_rules! check_object_read_write { + ($obj: expr) => { + // First, serialize the object, encrypted with ChaCha20Poly1305. + let rho = [42; 32]; + let writeable_len = $obj.serialized_length() as u64 + 16; + let write_adapter = ChaChaPolyWriteAdapter::new(rho, &$obj); + let encrypted_writeable_bytes = write_adapter.encode(); + let encrypted_writeable = &encrypted_writeable_bytes[..]; + + // Now deserialize the object back and make sure it matches the original. + let mut rd = FixedLengthReader::new(encrypted_writeable, writeable_len); + let read_adapter = >::read(&mut rd, rho).unwrap(); + assert_eq!($obj, read_adapter.readable); + }; + } + + // Try a big object that will require multiple write buffers. + let big_writeable = TestWriteable { + field1: vec![43], + field2: vec![44; 4192], + field3: vec![45; 4192 + 1], + }; + check_object_read_write!(big_writeable); + + // Try a small object that fits into one write buffer. + let small_writeable = TestWriteable { + field1: vec![43], + field2: vec![44], + field3: vec![45], + }; + check_object_read_write!(small_writeable); + } + + fn do_chacha_stream_adapters_ser_macros() -> Result<(), DecodeError> { + let writeable = TestWriteable { + field1: vec![43], + field2: vec![44; 4192], + field3: vec![45; 4192 + 1], + }; + + // First, serialize the object into a TLV stream, encrypted with ChaCha20Poly1305. + let rho = [42; 32]; + let write_adapter = ChaChaPolyWriteAdapter::new(rho, &writeable); + let mut writer = ser::VecWriter(Vec::new()); + encode_tlv_stream!(&mut writer, { + (1, write_adapter, required), + }); + + // Now deserialize the object back and make sure it matches the original. + let mut read_adapter: Option> = None; + decode_tlv_stream!(&writer.0[..], { + (1, read_adapter, (option: LengthReadableArgs, rho)), + }); + assert_eq!(writeable, read_adapter.unwrap().readable); + + Ok(()) + } + + #[test] + fn chacha_stream_adapters_ser_macros() { + // Test that our stream adapters work as expected with the TLV macros. + // This also serves to test the `option: $trait` variant of the `decode_tlv` ser macro. + do_chacha_stream_adapters_ser_macros().unwrap() + } +} diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index 428adbc5e66..a45806eef54 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -134,6 +134,13 @@ impl Read for FixedLengthReader { } } +impl LengthRead for FixedLengthReader { + #[inline] + fn total_bytes(&self) -> u64 { + self.total_bytes + } +} + /// A Read which tracks whether any bytes have been read at all. This allows us to distinguish /// between "EOF reached before we started" and "EOF reached mid-read". pub(crate) struct ReadTrackingReader { @@ -220,6 +227,21 @@ pub trait ReadableArgs

fn read(reader: &mut R, params: P) -> Result; } +/// A std::io::Read that also provides the total bytes available to read. +pub(crate) trait LengthRead: Read { + /// The total number of bytes available to read. + fn total_bytes(&self) -> u64; +} + +/// A trait that various higher-level rust-lightning types implement allowing them to be read in +/// from a Read given some additional set of arguments which is required to deserialize, requiring +/// the implementer to provide the total length of the read. +pub(crate) trait LengthReadableArgs

where Self: Sized +{ + /// Reads a Self in from the given LengthRead + fn read(reader: &mut R, params: P) -> Result; +} + /// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read /// /// (C-not exported) as we only export serialization to/from byte arrays instead diff --git a/lightning/src/util/ser_macros.rs b/lightning/src/util/ser_macros.rs index 165d1f1edba..816426b1133 100644 --- a/lightning/src/util/ser_macros.rs +++ b/lightning/src/util/ser_macros.rs @@ -118,6 +118,9 @@ macro_rules! check_tlv_order { ($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, ignorable) => {{ // no-op }}; + ($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, (option: $trait: ident $(, $read_arg: expr)?)) => {{ + // no-op + }}; } macro_rules! check_missing_tlv { @@ -144,6 +147,9 @@ macro_rules! check_missing_tlv { ($last_seen_type: expr, $type: expr, $field: ident, ignorable) => {{ // no-op }}; + ($last_seen_type: expr, $type: expr, $field: ident, (option: $trait: ident $(, $read_arg: expr)?)) => {{ + // no-op + }}; } macro_rules! decode_tlv { @@ -163,6 +169,9 @@ macro_rules! decode_tlv { ($reader: expr, $field: ident, ignorable) => {{ $field = ser::MaybeReadable::read(&mut $reader)?; }}; + ($reader: expr, $field: ident, (option: $trait: ident $(, $read_arg: expr)?)) => {{ + $field = Some($trait::read(&mut $reader $(, $read_arg)*)?); + }}; } macro_rules! decode_tlv_stream {