From efe1135d2841d219a88a7f3c97e399d741bf1137 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 9 Feb 2024 16:55:30 +0100 Subject: [PATCH 001/124] Update Reply protocol definition and codec (#717) * Update Reply protocol definition and codec * Make consolidation a flag in Query/Reply * Fix wrong Consolidation cast in codec * Apply Reply changes to routing * Fix shared-memory feature * Fix stats * Bump Zenoh Protocol Version * Add query/reply ok(put|del)/err() tests --- commons/zenoh-codec/src/zenoh/mod.rs | 4 +- commons/zenoh-codec/src/zenoh/query.rs | 70 +++++----- commons/zenoh-codec/src/zenoh/reply.rs | 141 +++----------------- commons/zenoh-codec/tests/codec.rs | 2 +- commons/zenoh-protocol/src/lib.rs | 2 +- commons/zenoh-protocol/src/zenoh/mod.rs | 10 +- commons/zenoh-protocol/src/zenoh/query.rs | 25 ++-- commons/zenoh-protocol/src/zenoh/reply.rs | 90 +++---------- io/zenoh-transport/src/common/stats.rs | 8 ++ io/zenoh-transport/src/shm.rs | 17 +-- zenoh/src/net/routing/dispatcher/pubsub.rs | 13 +- zenoh/src/net/routing/dispatcher/queries.rs | 54 +++++--- zenoh/src/queryable.rs | 81 ++++++----- zenoh/src/session.rs | 74 +++++++--- zenoh/tests/session.rs | 71 +++++++++- 15 files changed, 329 insertions(+), 333 deletions(-) diff --git a/commons/zenoh-codec/src/zenoh/mod.rs b/commons/zenoh-codec/src/zenoh/mod.rs index 2e3ea48be7..d59add9d63 100644 --- a/commons/zenoh-codec/src/zenoh/mod.rs +++ b/commons/zenoh-codec/src/zenoh/mod.rs @@ -121,8 +121,8 @@ where fn write(self, writer: &mut W, x: &ResponseBody) -> Self::Output { match x { ResponseBody::Reply(b) => self.write(&mut *writer, b), - ResponseBody::Err(b) => self.write(&mut *writer, b), ResponseBody::Ack(b) => self.write(&mut *writer, b), + ResponseBody::Err(b) => self.write(&mut *writer, b), ResponseBody::Put(b) => self.write(&mut *writer, b), } } @@ -140,8 +140,8 @@ where let codec = Zenoh080Header::new(header); let body = match imsg::mid(codec.header) { id::REPLY => ResponseBody::Reply(codec.read(&mut *reader)?), - id::ERR => ResponseBody::Err(codec.read(&mut *reader)?), id::ACK => ResponseBody::Ack(codec.read(&mut *reader)?), + id::ERR => ResponseBody::Err(codec.read(&mut *reader)?), id::PUT => ResponseBody::Put(codec.read(&mut *reader)?), _ => return Err(DidntRead), }; diff --git a/commons/zenoh-codec/src/zenoh/query.rs b/commons/zenoh-codec/src/zenoh/query.rs index 09b01b2266..cb0506e474 100644 --- a/commons/zenoh-codec/src/zenoh/query.rs +++ b/commons/zenoh-codec/src/zenoh/query.rs @@ -22,48 +22,46 @@ use zenoh_protocol::{ common::{iext, imsg}, zenoh::{ id, - query::{ext, flag, Query}, + query::{ext, flag, Consolidation, Query}, }, }; -// Extension Consolidation -impl WCodec<(ext::ConsolidationType, bool), &mut W> for Zenoh080 +// Consolidation +impl WCodec for Zenoh080 where W: Writer, { type Output = Result<(), DidntWrite>; - fn write(self, writer: &mut W, x: (ext::ConsolidationType, bool)) -> Self::Output { - let (x, more) = x; + fn write(self, writer: &mut W, x: Consolidation) -> Self::Output { let v: u64 = match x { - ext::ConsolidationType::Auto => 0, - ext::ConsolidationType::None => 1, - ext::ConsolidationType::Monotonic => 2, - ext::ConsolidationType::Latest => 3, - ext::ConsolidationType::Unique => 4, + Consolidation::Auto => 0, + Consolidation::None => 1, + Consolidation::Monotonic => 2, + Consolidation::Latest => 3, + Consolidation::Unique => 4, }; - let v = ext::Consolidation::new(v); - self.write(&mut *writer, (&v, more)) + self.write(&mut *writer, v) } } -impl RCodec<(ext::ConsolidationType, bool), &mut R> for Zenoh080Header +impl RCodec for Zenoh080 where R: Reader, { type Error = DidntRead; - fn read(self, reader: &mut R) -> Result<(ext::ConsolidationType, bool), Self::Error> { - let (ext, more): (ext::Consolidation, bool) = self.read(&mut *reader)?; - let c = match ext.value { - 0 => ext::ConsolidationType::Auto, - 1 => ext::ConsolidationType::None, - 2 => ext::ConsolidationType::Monotonic, - 3 => ext::ConsolidationType::Latest, - 4 => ext::ConsolidationType::Unique, - _ => return Err(DidntRead), + fn read(self, reader: &mut R) -> Result { + let v: u64 = self.read(&mut *reader)?; + let c = match v { + 0 => Consolidation::Auto, + 1 => Consolidation::None, + 2 => Consolidation::Monotonic, + 3 => Consolidation::Latest, + 4 => Consolidation::Unique, + _ => Consolidation::Auto, // Fallback on Auto if Consolidation is unknown }; - Ok((c, more)) + Ok(c) } } @@ -75,9 +73,9 @@ where fn write(self, writer: &mut W, x: &Query) -> Self::Output { let Query { + consolidation, parameters, ext_sinfo, - ext_consolidation, ext_body, ext_attachment, ext_unknown, @@ -85,11 +83,13 @@ where // Header let mut header = id::QUERY; + if consolidation != &Consolidation::default() { + header |= flag::C; + } if !parameters.is_empty() { header |= flag::P; } let mut n_exts = (ext_sinfo.is_some() as u8) - + ((ext_consolidation != &ext::ConsolidationType::default()) as u8) + (ext_body.is_some() as u8) + (ext_attachment.is_some() as u8) + (ext_unknown.len() as u8); @@ -99,6 +99,9 @@ where self.write(&mut *writer, header)?; // Body + if consolidation != &Consolidation::default() { + self.write(&mut *writer, *consolidation)?; + } if !parameters.is_empty() { self.write(&mut *writer, parameters)?; } @@ -108,10 +111,6 @@ where n_exts -= 1; self.write(&mut *writer, (sinfo, n_exts != 0))?; } - if ext_consolidation != &ext::ConsolidationType::default() { - n_exts -= 1; - self.write(&mut *writer, (*ext_consolidation, n_exts != 0))?; - } if let Some(body) = ext_body.as_ref() { n_exts -= 1; self.write(&mut *writer, (body, n_exts != 0))?; @@ -154,6 +153,11 @@ where } // Body + let mut consolidation = Consolidation::default(); + if imsg::has_flag(self.header, flag::C) { + consolidation = self.codec.read(&mut *reader)?; + } + let mut parameters = String::new(); if imsg::has_flag(self.header, flag::P) { parameters = self.codec.read(&mut *reader)?; @@ -161,7 +165,6 @@ where // Extensions let mut ext_sinfo: Option = None; - let mut ext_consolidation = ext::ConsolidationType::default(); let mut ext_body: Option = None; let mut ext_attachment: Option = None; let mut ext_unknown = Vec::new(); @@ -176,11 +179,6 @@ where ext_sinfo = Some(s); has_ext = ext; } - ext::Consolidation::ID => { - let (c, ext): (ext::ConsolidationType, bool) = eodec.read(&mut *reader)?; - ext_consolidation = c; - has_ext = ext; - } ext::QueryBodyType::SID | ext::QueryBodyType::VID => { let (s, ext): (ext::QueryBodyType, bool) = eodec.read(&mut *reader)?; ext_body = Some(s); @@ -200,9 +198,9 @@ where } Ok(Query { + consolidation, parameters, ext_sinfo, - ext_consolidation, ext_body, ext_attachment, ext_unknown, diff --git a/commons/zenoh-codec/src/zenoh/reply.rs b/commons/zenoh-codec/src/zenoh/reply.rs index d98c72b341..d54e98cc5e 100644 --- a/commons/zenoh-codec/src/zenoh/reply.rs +++ b/commons/zenoh-codec/src/zenoh/reply.rs @@ -11,23 +11,18 @@ // Contributors: // ZettaScale Zenoh Team, // -#[cfg(not(feature = "shared-memory"))] -use crate::Zenoh080Bounded; -#[cfg(feature = "shared-memory")] -use crate::Zenoh080Sliced; use crate::{common::extension, RCodec, WCodec, Zenoh080, Zenoh080Header}; use alloc::vec::Vec; use zenoh_buffers::{ reader::{DidntRead, Reader}, writer::{DidntWrite, Writer}, - ZBuf, }; use zenoh_protocol::{ - common::{iext, imsg}, - core::Encoding, + common::imsg, zenoh::{ id, - reply::{ext, flag, Reply}, + query::Consolidation, + reply::{flag, Reply, ReplyBody}, }, }; @@ -39,81 +34,35 @@ where fn write(self, writer: &mut W, x: &Reply) -> Self::Output { let Reply { - timestamp, - encoding, - ext_sinfo, - ext_consolidation, - #[cfg(feature = "shared-memory")] - ext_shm, - ext_attachment, + consolidation, ext_unknown, payload, } = x; // Header let mut header = id::REPLY; - if timestamp.is_some() { - header |= flag::T; - } - if encoding != &Encoding::default() { - header |= flag::E; - } - let mut n_exts = (ext_sinfo.is_some()) as u8 - + ((ext_consolidation != &ext::ConsolidationType::default()) as u8) - + (ext_attachment.is_some()) as u8 - + (ext_unknown.len() as u8); - #[cfg(feature = "shared-memory")] - { - n_exts += ext_shm.is_some() as u8; + if consolidation != &Consolidation::default() { + header |= flag::C; } + let mut n_exts = ext_unknown.len() as u8; if n_exts != 0 { header |= flag::Z; } self.write(&mut *writer, header)?; // Body - if let Some(ts) = timestamp.as_ref() { - self.write(&mut *writer, ts)?; - } - if encoding != &Encoding::default() { - self.write(&mut *writer, encoding)?; + if consolidation != &Consolidation::default() { + self.write(&mut *writer, *consolidation)?; } // Extensions - if let Some(sinfo) = ext_sinfo.as_ref() { - n_exts -= 1; - self.write(&mut *writer, (sinfo, n_exts != 0))?; - } - if ext_consolidation != &ext::ConsolidationType::default() { - n_exts -= 1; - self.write(&mut *writer, (*ext_consolidation, n_exts != 0))?; - } - #[cfg(feature = "shared-memory")] - if let Some(eshm) = ext_shm.as_ref() { - n_exts -= 1; - self.write(&mut *writer, (eshm, n_exts != 0))?; - } - if let Some(att) = ext_attachment.as_ref() { - n_exts -= 1; - self.write(&mut *writer, (att, n_exts != 0))?; - } for u in ext_unknown.iter() { n_exts -= 1; self.write(&mut *writer, (u, n_exts != 0))?; } // Payload - #[cfg(feature = "shared-memory")] - { - let codec = Zenoh080Sliced::::new(ext_shm.is_some()); - codec.write(&mut *writer, payload)?; - } - - #[cfg(not(feature = "shared-memory"))] - { - let bodec = Zenoh080Bounded::::new(); - bodec.write(&mut *writer, payload)?; - } + self.write(&mut *writer, payload)?; Ok(()) } @@ -144,81 +93,27 @@ where } // Body - let mut timestamp: Option = None; - if imsg::has_flag(self.header, flag::T) { - timestamp = Some(self.codec.read(&mut *reader)?); - } - - let mut encoding = Encoding::default(); - if imsg::has_flag(self.header, flag::E) { - encoding = self.codec.read(&mut *reader)?; + let mut consolidation = Consolidation::default(); + if imsg::has_flag(self.header, flag::C) { + consolidation = self.codec.read(&mut *reader)?; } // Extensions - let mut ext_sinfo: Option = None; - let mut ext_consolidation = ext::ConsolidationType::default(); - #[cfg(feature = "shared-memory")] - let mut ext_shm: Option = None; - let mut ext_attachment: Option = None; let mut ext_unknown = Vec::new(); let mut has_ext = imsg::has_flag(self.header, flag::Z); while has_ext { let ext: u8 = self.codec.read(&mut *reader)?; - let eodec = Zenoh080Header::new(ext); - match iext::eid(ext) { - ext::SourceInfo::ID => { - let (s, ext): (ext::SourceInfoType, bool) = eodec.read(&mut *reader)?; - ext_sinfo = Some(s); - has_ext = ext; - } - ext::Consolidation::ID => { - let (c, ext): (ext::ConsolidationType, bool) = eodec.read(&mut *reader)?; - ext_consolidation = c; - has_ext = ext; - } - #[cfg(feature = "shared-memory")] - ext::Shm::ID => { - let (s, ext): (ext::ShmType, bool) = eodec.read(&mut *reader)?; - ext_shm = Some(s); - has_ext = ext; - } - ext::Attachment::ID => { - let (a, ext): (ext::AttachmentType, bool) = eodec.read(&mut *reader)?; - ext_attachment = Some(a); - has_ext = ext; - } - _ => { - let (u, ext) = extension::read(reader, "Reply", ext)?; - ext_unknown.push(u); - has_ext = ext; - } - } + let (u, ext) = extension::read(reader, "Reply", ext)?; + ext_unknown.push(u); + has_ext = ext; } // Payload - let payload: ZBuf = { - #[cfg(feature = "shared-memory")] - { - let codec = Zenoh080Sliced::::new(ext_shm.is_some()); - codec.read(&mut *reader)? - } - - #[cfg(not(feature = "shared-memory"))] - { - let bodec = Zenoh080Bounded::::new(); - bodec.read(&mut *reader)? - } - }; + let payload: ReplyBody = self.codec.read(&mut *reader)?; Ok(Reply { - timestamp, - encoding, - ext_sinfo, - ext_consolidation, - #[cfg(feature = "shared-memory")] - ext_shm, - ext_attachment, + consolidation, ext_unknown, payload, }) diff --git a/commons/zenoh-codec/tests/codec.rs b/commons/zenoh-codec/tests/codec.rs index 3fdb95e1b5..28201c1977 100644 --- a/commons/zenoh-codec/tests/codec.rs +++ b/commons/zenoh-codec/tests/codec.rs @@ -556,7 +556,7 @@ fn codec_network() { run!(NetworkMessage, NetworkMessage::rand()); } -// Zenoh new +// Zenoh #[test] fn codec_put() { run!(zenoh::Put, zenoh::Put::rand()); diff --git a/commons/zenoh-protocol/src/lib.rs b/commons/zenoh-protocol/src/lib.rs index 2e1a2fa7cf..8d26f52ed9 100644 --- a/commons/zenoh-protocol/src/lib.rs +++ b/commons/zenoh-protocol/src/lib.rs @@ -28,7 +28,7 @@ pub mod transport; pub mod zenoh; // Zenoh version -pub const VERSION: u8 = 0x08; +pub const VERSION: u8 = 0x09; // Zenoh protocol uses the following conventions for message definition and representation. // diff --git a/commons/zenoh-protocol/src/zenoh/mod.rs b/commons/zenoh-protocol/src/zenoh/mod.rs index e67576e673..a23eaa9b21 100644 --- a/commons/zenoh-protocol/src/zenoh/mod.rs +++ b/commons/zenoh-protocol/src/zenoh/mod.rs @@ -95,10 +95,11 @@ impl RequestBody { let mut rng = rand::thread_rng(); - match rng.gen_range(0..3) { + match rng.gen_range(0..4) { 0 => RequestBody::Query(Query::rand()), 1 => RequestBody::Put(Put::rand()), 2 => RequestBody::Del(Del::rand()), + 3 => RequestBody::Pull(Pull::rand()), _ => unreachable!(), } } @@ -126,8 +127,8 @@ impl From for RequestBody { #[derive(Debug, Clone, PartialEq, Eq)] pub enum ResponseBody { Reply(Reply), - Err(Err), Ack(Ack), + Err(Err), Put(Put), } @@ -135,13 +136,12 @@ impl ResponseBody { #[cfg(feature = "test")] pub fn rand() -> Self { use rand::Rng; - let mut rng = rand::thread_rng(); match rng.gen_range(0..4) { 0 => ResponseBody::Reply(Reply::rand()), - 1 => ResponseBody::Err(Err::rand()), - 2 => ResponseBody::Ack(Ack::rand()), + 1 => ResponseBody::Ack(Ack::rand()), + 2 => ResponseBody::Err(Err::rand()), 3 => ResponseBody::Put(Put::rand()), _ => unreachable!(), } diff --git a/commons/zenoh-protocol/src/zenoh/query.rs b/commons/zenoh-protocol/src/zenoh/query.rs index 7432840492..17dfa23df8 100644 --- a/commons/zenoh-protocol/src/zenoh/query.rs +++ b/commons/zenoh-protocol/src/zenoh/query.rs @@ -69,50 +69,45 @@ impl From for Consolidation { /// /// ```text /// Flags: +/// - C: Consolidation if C==1 then consolidation is present /// - P: Parameters If P==1 then the parameters are present -/// - X: Reserved /// - Z: Extension If Z==1 then at least one extension is present /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ -/// |Z|X|P| QUERY | +/// |Z|P|C| QUERY | /// +-+-+-+---------+ +/// % consolidation % if C==1 +/// +---------------+ /// ~ ps: ~ if P==1 /// +---------------+ /// ~ [qry_exts] ~ if Z==1 /// +---------------+ /// ``` pub mod flag { - pub const P: u8 = 1 << 5; // 0x20 Parameters if P==1 then the parameters are present - // pub const X: u8 = 1 << 6; // 0x40 Reserved + pub const C: u8 = 1 << 5; // 0x20 Consolidation if C==1 then consolidation is present + pub const P: u8 = 1 << 6; // 0x40 Parameters if P==1 then the parameters are present pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Query { + pub consolidation: Consolidation, pub parameters: String, pub ext_sinfo: Option, - pub ext_consolidation: Consolidation, pub ext_body: Option, pub ext_attachment: Option, pub ext_unknown: Vec, } pub mod ext { - use crate::{ - common::{ZExtZ64, ZExtZBuf}, - zextz64, zextzbuf, - }; + use crate::{common::ZExtZBuf, zextzbuf}; /// # SourceInfo extension /// Used to carry additional information about the source of data pub type SourceInfo = zextzbuf!(0x1, false); pub type SourceInfoType = crate::zenoh::ext::SourceInfoType<{ SourceInfo::ID }>; - /// # Consolidation extension - pub type Consolidation = zextz64!(0x2, true); - pub type ConsolidationType = crate::zenoh::query::Consolidation; - /// # QueryBody extension /// Used to carry a body attached to the query /// Shared Memory extension is automatically defined by ValueType extension if @@ -137,6 +132,7 @@ impl Query { const MIN: usize = 2; const MAX: usize = 16; + let consolidation = Consolidation::rand(); let parameters: String = if rng.gen_bool(0.5) { let len = rng.gen_range(MIN..MAX); Alphanumeric.sample_string(&mut rng, len) @@ -144,7 +140,6 @@ impl Query { String::new() }; let ext_sinfo = rng.gen_bool(0.5).then_some(ext::SourceInfoType::rand()); - let ext_consolidation = Consolidation::rand(); let ext_body = rng.gen_bool(0.5).then_some(ext::QueryBodyType::rand()); let ext_attachment = rng.gen_bool(0.5).then_some(ext::AttachmentType::rand()); let mut ext_unknown = Vec::new(); @@ -156,9 +151,9 @@ impl Query { } Self { + consolidation, parameters, ext_sinfo, - ext_consolidation, ext_body, ext_attachment, ext_unknown, diff --git a/commons/zenoh-protocol/src/zenoh/reply.rs b/commons/zenoh-protocol/src/zenoh/reply.rs index 2395e1e9b2..7cbab4ca0a 100644 --- a/commons/zenoh-protocol/src/zenoh/reply.rs +++ b/commons/zenoh-protocol/src/zenoh/reply.rs @@ -11,115 +11,61 @@ // Contributors: // ZettaScale Zenoh Team, // -use crate::{common::ZExtUnknown, core::Encoding}; +use crate::{ + common::ZExtUnknown, + zenoh::{query::Consolidation, PushBody}, +}; use alloc::vec::Vec; -use uhlc::Timestamp; -use zenoh_buffers::ZBuf; /// # Reply message /// /// ```text /// Flags: -/// - T: Timestamp If T==1 then the timestamp if present -/// - E: Encoding If E==1 then the encoding is present +/// - C: Consolidation if C==1 then consolidation is present +/// - X: Reserved /// - Z: Extension If Z==1 then at least one extension is present /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ -/// |Z|E|T| REPLY | +/// |Z|X|C| REPLY | /// +-+-+-+---------+ -/// ~ ts: ~ if T==1 -/// +---------------+ -/// ~ encoding ~ if E==1 +/// % consolidation % if C==1 /// +---------------+ /// ~ [repl_exts] ~ if Z==1 /// +---------------+ -/// ~ pl: ~ -- Payload +/// ~ ReplyBody ~ -- Payload /// +---------------+ /// ``` pub mod flag { - pub const T: u8 = 1 << 5; // 0x20 Timestamp if T==0 then the timestamp if present - pub const E: u8 = 1 << 6; // 0x40 Encoding if E==1 then the encoding is present + pub const C: u8 = 1 << 5; // 0x20 Consolidation if C==1 then consolidation is present + // pub const X: u8 = 1 << 6; // 0x40 Reserved pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Reply { - pub timestamp: Option, - pub encoding: Encoding, - pub ext_sinfo: Option, - pub ext_consolidation: ext::ConsolidationType, - #[cfg(feature = "shared-memory")] - pub ext_shm: Option, - pub ext_attachment: Option, + pub consolidation: Consolidation, pub ext_unknown: Vec, - pub payload: ZBuf, + pub payload: ReplyBody, } -pub mod ext { - #[cfg(feature = "shared-memory")] - use crate::{common::ZExtUnit, zextunit}; - use crate::{ - common::{ZExtZ64, ZExtZBuf}, - zextz64, zextzbuf, - }; - - /// # SourceInfo extension - /// Used to carry additional information about the source of data - pub type SourceInfo = zextzbuf!(0x1, false); - pub type SourceInfoType = crate::zenoh::ext::SourceInfoType<{ SourceInfo::ID }>; - - /// # Consolidation extension - pub type Consolidation = zextz64!(0x2, true); - pub type ConsolidationType = crate::zenoh::query::ext::ConsolidationType; - - /// # Shared Memory extension - /// Used to carry additional information about the shared-memory layour of data - #[cfg(feature = "shared-memory")] - pub type Shm = zextunit!(0x3, true); - #[cfg(feature = "shared-memory")] - pub type ShmType = crate::zenoh::ext::ShmType<{ Shm::ID }>; - - /// # User attachment - pub type Attachment = zextzbuf!(0x4, false); - pub type AttachmentType = crate::zenoh::ext::AttachmentType<{ Attachment::ID }>; -} +pub type ReplyBody = PushBody; impl Reply { #[cfg(feature = "test")] pub fn rand() -> Self { - use crate::{common::iext, core::ZenohId, zenoh::Consolidation}; use rand::Rng; let mut rng = rand::thread_rng(); - let timestamp = rng.gen_bool(0.5).then_some({ - let time = uhlc::NTP64(rng.gen()); - let id = uhlc::ID::try_from(ZenohId::rand().to_le_bytes()).unwrap(); - Timestamp::new(time, id) - }); - let encoding = Encoding::rand(); - let ext_sinfo = rng.gen_bool(0.5).then_some(ext::SourceInfoType::rand()); - let ext_consolidation = Consolidation::rand(); - #[cfg(feature = "shared-memory")] - let ext_shm = rng.gen_bool(0.5).then_some(ext::ShmType::rand()); - let ext_attachment = rng.gen_bool(0.5).then_some(ext::AttachmentType::rand()); + let payload = ReplyBody::rand(); + let consolidation = Consolidation::rand(); let mut ext_unknown = Vec::new(); for _ in 0..rng.gen_range(0..4) { - ext_unknown.push(ZExtUnknown::rand2( - iext::mid(ext::Attachment::ID) + 1, - false, - )); + ext_unknown.push(ZExtUnknown::rand2(1, false)); } - let payload = ZBuf::rand(rng.gen_range(1..=64)); Self { - timestamp, - encoding, - ext_sinfo, - ext_consolidation, - #[cfg(feature = "shared-memory")] - ext_shm, - ext_attachment, + consolidation, ext_unknown, payload, } diff --git a/io/zenoh-transport/src/common/stats.rs b/io/zenoh-transport/src/common/stats.rs index f095a58273..aaf39641c0 100644 --- a/io/zenoh-transport/src/common/stats.rs +++ b/io/zenoh-transport/src/common/stats.rs @@ -208,6 +208,10 @@ stats_struct! { # TYPE "counter" pub tx_z_del_msgs DiscriminatedStats, + # HELP "Counter of received bytes in zenoh del message attachments." + # TYPE "counter" + pub tx_z_del_pl_bytes DiscriminatedStats, + # HELP "Counter of sent zenoh query messages." # TYPE "counter" pub tx_z_query_msgs DiscriminatedStats, @@ -252,6 +256,10 @@ stats_struct! { # TYPE "counter" pub rx_z_del_msgs DiscriminatedStats, + # HELP "Counter of received bytes in zenoh del message attachments." + # TYPE "counter" + pub rx_z_del_pl_bytes DiscriminatedStats, + # HELP "Counter of received zenoh query messages." # TYPE "counter" pub rx_z_query_msgs DiscriminatedStats, diff --git a/io/zenoh-transport/src/shm.rs b/io/zenoh-transport/src/shm.rs index 04a8f502c4..8b0e93f494 100644 --- a/io/zenoh-transport/src/shm.rs +++ b/io/zenoh-transport/src/shm.rs @@ -21,6 +21,7 @@ use zenoh_protocol::{ err::{ext::ErrBodyType, Err}, ext::ShmType, query::{ext::QueryBodyType, Query}, + reply::ReplyBody, PushBody, Put, Reply, RequestBody, ResponseBody, }, }; @@ -105,17 +106,17 @@ impl MapShm for Query { // Impl - Reply impl MapShm for Reply { fn map_to_shminfo(&mut self) -> ZResult { - let Self { - payload, ext_shm, .. - } = self; - map_to_shminfo!(payload, ext_shm) + match &mut self.payload { + ReplyBody::Put(b) => b.map_to_shminfo(), + _ => Ok(false), + } } fn map_to_shmbuf(&mut self, shmr: &RwLock) -> ZResult { - let Self { - payload, ext_shm, .. - } = self; - map_to_shmbuf!(payload, ext_shm, shmr) + match &mut self.payload { + ReplyBody::Put(b) => b.map_to_shmbuf(shmr), + _ => Ok(false), + } } } diff --git a/zenoh/src/net/routing/dispatcher/pubsub.rs b/zenoh/src/net/routing/dispatcher/pubsub.rs index da6ae0c371..ffe2d3ccca 100644 --- a/zenoh/src/net/routing/dispatcher/pubsub.rs +++ b/zenoh/src/net/routing/dispatcher/pubsub.rs @@ -413,10 +413,19 @@ macro_rules! inc_stats { match &$body { PushBody::Put(p) => { stats.[<$txrx _z_put_msgs>].[](1); - stats.[<$txrx _z_put_pl_bytes>].[](p.payload.len()); + let mut n = p.payload.len(); + if let Some(a) = p.ext_attachment.as_ref() { + n += a.buffer.len(); + } + stats.[<$txrx _z_put_pl_bytes>].[](n); } - PushBody::Del(_) => { + PushBody::Del(d) => { stats.[<$txrx _z_del_msgs>].[](1); + let mut n = 0; + if let Some(a) = d.ext_attachment.as_ref() { + n += a.buffer.len(); + } + stats.[<$txrx _z_del_pl_bytes>].[](n); } } } diff --git a/zenoh/src/net/routing/dispatcher/queries.rs b/zenoh/src/net/routing/dispatcher/queries.rs index 9645af0f74..a6748650ab 100644 --- a/zenoh/src/net/routing/dispatcher/queries.rs +++ b/zenoh/src/net/routing/dispatcher/queries.rs @@ -21,16 +21,16 @@ use async_trait::async_trait; use std::collections::HashMap; use std::sync::{Arc, Weak}; use zenoh_config::WhatAmI; -use zenoh_protocol::core::key_expr::keyexpr; -use zenoh_protocol::network::declare::queryable::ext::QueryableInfo; +use zenoh_protocol::zenoh::reply::ReplyBody; +use zenoh_protocol::zenoh::Put; use zenoh_protocol::{ - core::{Encoding, WireExpr}, + core::{key_expr::keyexpr, Encoding, WireExpr}, network::{ - declare::ext, + declare::{ext, queryable::ext::QueryableInfo}, request::{ext::TargetType, Request, RequestId}, response::{self, ext::ResponderIdType, Response, ResponseFinal}, }, - zenoh::{reply::ext::ConsolidationType, Reply, RequestBody, ResponseBody}, + zenoh::{query::Consolidation, Reply, RequestBody, ResponseBody}, }; use zenoh_sync::get_mut_unchecked; use zenoh_util::Timed; @@ -464,11 +464,29 @@ macro_rules! inc_res_stats { match &$body { ResponseBody::Put(p) => { stats.[<$txrx _z_put_msgs>].[](1); - stats.[<$txrx _z_put_pl_bytes>].[](p.payload.len()); + let mut n = p.payload.len(); + if let Some(a) = p.ext_attachment.as_ref() { + n += a.buffer.len(); + } + stats.[<$txrx _z_put_pl_bytes>].[](n); } ResponseBody::Reply(r) => { stats.[<$txrx _z_reply_msgs>].[](1); - stats.[<$txrx _z_reply_pl_bytes>].[](r.payload.len()); + let mut n = 0; + match &r.payload { + ReplyBody::Put(p) => { + if let Some(a) = p.ext_attachment.as_ref() { + n += a.buffer.len(); + } + n += p.payload.len(); + } + ReplyBody::Del(d) => { + if let Some(a) = d.ext_attachment.as_ref() { + n += a.buffer.len(); + } + } + } + stats.[<$txrx _z_reply_pl_bytes>].[](n); } ResponseBody::Err(e) => { stats.[<$txrx _z_reply_msgs>].[](1); @@ -537,15 +555,19 @@ pub fn route_query( for (wexpr, payload) in local_replies { let payload = ResponseBody::Reply(Reply { - timestamp: None, - encoding: Encoding::default(), - ext_sinfo: None, - ext_consolidation: ConsolidationType::default(), - #[cfg(feature = "shared-memory")] - ext_shm: None, - ext_attachment: None, // @TODO: expose it in the API - ext_unknown: vec![], - payload, + consolidation: Consolidation::default(), // @TODO: handle Del case + ext_unknown: vec![], // @TODO: handle unknown extensions + payload: ReplyBody::Put(Put { + // @TODO: handle Del case + timestamp: None, // @TODO: handle timestamp + encoding: Encoding::default(), // @TODO: handle encoding + ext_sinfo: None, // @TODO: handle source info + ext_attachment: None, // @TODO: expose it in the API + #[cfg(feature = "shared-memory")] + ext_shm: None, + ext_unknown: vec![], // @TODO: handle unknown extensions + payload, + }), }); #[cfg(feature = "stats")] if !admin { diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 9ee73d1641..4e9f4914dd 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -30,11 +30,11 @@ use std::future::Ready; use std::ops::Deref; use std::sync::Arc; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; -use zenoh_protocol::core::WireExpr; -use zenoh_protocol::network::{response, Mapping, RequestId, Response, ResponseFinal}; -use zenoh_protocol::zenoh::ext::ValueType; -use zenoh_protocol::zenoh::reply::ext::ConsolidationType; -use zenoh_protocol::zenoh::{self, ResponseBody}; +use zenoh_protocol::{ + core::WireExpr, + network::{response, Mapping, RequestId, Response, ResponseFinal}, + zenoh::{self, ext::ValueType, reply::ReplyBody, Del, Put, ResponseBody}, +}; use zenoh_result::ZResult; pub(crate) struct QueryInner { @@ -206,16 +206,33 @@ impl SyncResolve for ReplyBuilder<'_> { source_id: None, source_sn: None, }; - #[allow(unused_mut)] - let mut ext_attachment = None; - #[cfg(feature = "unstable")] - { - data_info.source_id = source_info.source_id; - data_info.source_sn = source_info.source_sn; - if let Some(attachment) = attachment { - ext_attachment = Some(attachment.into()); - } + + // Use a macro for inferring the proper const extension ID between Put and Del cases + macro_rules! ext_attachment { + () => {{ + #[allow(unused_mut)] + let mut ext_attachment = None; + #[cfg(feature = "unstable")] + { + data_info.source_id = source_info.source_id; + data_info.source_sn = source_info.source_sn; + if let Some(attachment) = attachment { + ext_attachment = Some(attachment.into()); + } + } + ext_attachment + }}; } + + let ext_sinfo = if data_info.source_id.is_some() || data_info.source_sn.is_some() { + Some(zenoh::put::ext::SourceInfoType { + zid: data_info.source_id.unwrap_or_default(), + eid: 0, // @TODO use proper EntityId (#703) + sn: data_info.source_sn.unwrap_or_default() as u32, + }) + } else { + None + }; self.query.inner.primitives.send_response(Response { rid: self.query.inner.qid, wire_expr: WireExpr { @@ -224,24 +241,26 @@ impl SyncResolve for ReplyBuilder<'_> { mapping: Mapping::Sender, }, payload: ResponseBody::Reply(zenoh::Reply { - timestamp: data_info.timestamp, - encoding: data_info.encoding.unwrap_or_default(), - ext_sinfo: if data_info.source_id.is_some() || data_info.source_sn.is_some() - { - Some(zenoh::reply::ext::SourceInfoType { - zid: data_info.source_id.unwrap_or_default(), - eid: 0, // @TODO use proper EntityId (#703) - sn: data_info.source_sn.unwrap_or_default() as u32, - }) - } else { - None - }, - ext_consolidation: ConsolidationType::default(), - #[cfg(feature = "shared-memory")] - ext_shm: None, - ext_attachment, + consolidation: zenoh::Consolidation::default(), ext_unknown: vec![], - payload, + payload: match kind { + SampleKind::Put => ReplyBody::Put(Put { + timestamp: data_info.timestamp, + encoding: data_info.encoding.unwrap_or_default(), + ext_sinfo, + #[cfg(feature = "shared-memory")] + ext_shm: None, + ext_attachment: ext_attachment!(), + ext_unknown: vec![], + payload, + }), + SampleKind::Delete => ReplyBody::Del(Del { + timestamp, + ext_sinfo, + ext_attachment: ext_attachment!(), + ext_unknown: vec![], + }), + }, }), ext_qos: response::ext::QoSType::response_default(), ext_tstamp: None, diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index d52c446d3d..46cfd5e499 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -57,6 +57,9 @@ use zenoh_config::unwrap_or_default; use zenoh_core::{zconfigurable, zread, Resolve, ResolveClosure, ResolveFuture, SyncResolve}; use zenoh_protocol::network::AtomicRequestId; use zenoh_protocol::network::RequestId; +use zenoh_protocol::zenoh::reply::ReplyBody; +use zenoh_protocol::zenoh::Del; +use zenoh_protocol::zenoh::Put; use zenoh_protocol::{ core::{ key_expr::{keyexpr, OwnedKeyExpr}, @@ -73,10 +76,7 @@ use zenoh_protocol::{ Mapping, Push, Response, ResponseFinal, }, zenoh::{ - query::{ - self, - ext::{ConsolidationType, QueryBodyType}, - }, + query::{self, ext::QueryBodyType, Consolidation}, Pull, PushBody, RequestBody, ResponseBody, }, }; @@ -1808,9 +1808,9 @@ impl Session { ext_budget: None, ext_timeout: Some(timeout), payload: RequestBody::Query(zenoh_protocol::zenoh::Query { + consolidation: consolidation.into(), parameters: selector.parameters().to_string(), ext_sinfo: None, - ext_consolidation: consolidation.into(), ext_body: value.as_ref().map(|v| query::ext::QueryBodyType { #[cfg(feature = "shared-memory")] ext_shm: None, @@ -1851,7 +1851,7 @@ impl Session { parameters: &str, qid: RequestId, _target: TargetType, - _consolidation: ConsolidationType, + _consolidation: Consolidation, body: Option, #[cfg(feature = "unstable")] attachment: Option, ) { @@ -2233,7 +2233,7 @@ impl Primitives for Session { &m.parameters, msg.id, msg.ext_target, - m.ext_consolidation, + m.consolidation, m.ext_body, #[cfg(feature = "unstable")] m.ext_attachment.map(Into::into), @@ -2341,19 +2341,63 @@ impl Primitives for Session { } None => key_expr, }; - let info = DataInfo { - kind: SampleKind::Put, - encoding: Some(m.encoding), - timestamp: m.timestamp, - source_id: m.ext_sinfo.as_ref().map(|i| i.zid), - source_sn: m.ext_sinfo.as_ref().map(|i| i.sn as u64), + + struct Ret { + payload: ZBuf, + info: DataInfo, + #[cfg(feature = "unstable")] + attachment: Option, + } + let Ret { + payload, + info, + #[cfg(feature = "unstable")] + attachment, + } = match m.payload { + ReplyBody::Put(Put { + timestamp, + encoding, + ext_sinfo, + ext_attachment: _attachment, + payload, + .. + }) => Ret { + payload, + info: DataInfo { + kind: SampleKind::Put, + encoding: Some(encoding), + timestamp, + source_id: ext_sinfo.as_ref().map(|i| i.zid), + source_sn: ext_sinfo.as_ref().map(|i| i.sn as u64), + }, + #[cfg(feature = "unstable")] + attachment: _attachment.map(Into::into), + }, + ReplyBody::Del(Del { + timestamp, + ext_sinfo, + ext_attachment: _attachment, + .. + }) => Ret { + payload: ZBuf::empty(), + info: DataInfo { + kind: SampleKind::Delete, + encoding: None, + timestamp, + source_id: ext_sinfo.as_ref().map(|i| i.zid), + source_sn: ext_sinfo.as_ref().map(|i| i.sn as u64), + }, + #[cfg(feature = "unstable")] + attachment: _attachment.map(Into::into), + }, }; + #[allow(unused_mut)] let mut sample = - Sample::with_info(key_expr.into_owned(), m.payload, Some(info)); + Sample::with_info(key_expr.into_owned(), payload, Some(info)); #[cfg(feature = "unstable")] { - sample.attachment = m.ext_attachment.map(Into::into); + sample.attachment = attachment; } let new_reply = Reply { sample: Ok(sample), diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index c2cec7c627..f727ad60c3 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -153,10 +153,31 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re let c_msgs = msgs.clone(); let qbl = ztimeout!(peer01 .declare_queryable(key_expr) - .callback(move |sample| { + .callback(move |query| { c_msgs.fetch_add(1, Ordering::Relaxed); - let rep = Sample::try_from(key_expr, vec![0u8; size]).unwrap(); - task::block_on(async { ztimeout!(sample.reply(Ok(rep)).res_async()).unwrap() }); + match query.parameters() { + "ok_put" => { + let mut rep = Sample::try_from(key_expr, vec![0u8; size]).unwrap(); + rep.kind = SampleKind::Put; + task::block_on(async { + ztimeout!(query.reply(Ok(rep)).res_async()).unwrap() + }); + } + "ok_del" => { + let mut rep = Sample::try_from(key_expr, vec![0u8; size]).unwrap(); + rep.kind = SampleKind::Delete; + task::block_on(async { + ztimeout!(query.reply(Ok(rep)).res_async()).unwrap() + }); + } + "err" => { + let rep = Value::from(vec![0u8; size]); + task::block_on(async { + ztimeout!(query.reply(Err(rep)).res_async()).unwrap() + }); + } + _ => panic!("Unknown query parameter"), + } }) .res_async()) .unwrap(); @@ -165,12 +186,15 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re task::sleep(SLEEP).await; // Get data - println!("[QR][02c] Getting on peer02 session. {msg_count} msgs."); + println!("[QR][02c] Getting Ok(Put) on peer02 session. {msg_count} msgs."); let mut cnt = 0; for _ in 0..msg_count { - let rs = ztimeout!(peer02.get(key_expr).res_async()).unwrap(); + let selector = format!("{}?ok_put", key_expr); + let rs = ztimeout!(peer02.get(selector).res_async()).unwrap(); while let Ok(s) = ztimeout!(rs.recv_async()) { - assert_eq!(s.sample.unwrap().value.payload.len(), size); + let s = s.sample.unwrap(); + assert_eq!(s.kind, SampleKind::Put); + assert_eq!(s.value.payload.len(), size); cnt += 1; } } @@ -178,6 +202,41 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re assert_eq!(msgs.load(Ordering::Relaxed), msg_count); assert_eq!(cnt, msg_count); + msgs.store(0, Ordering::Relaxed); + + println!("[QR][03c] Getting Ok(Delete) on peer02 session. {msg_count} msgs."); + let mut cnt = 0; + for _ in 0..msg_count { + let selector = format!("{}?ok_del", key_expr); + let rs = ztimeout!(peer02.get(selector).res_async()).unwrap(); + while let Ok(s) = ztimeout!(rs.recv_async()) { + let s = s.sample.unwrap(); + assert_eq!(s.kind, SampleKind::Delete); + assert_eq!(s.value.payload.len(), 0); + cnt += 1; + } + } + println!("[QR][03c] Got on peer02 session. {cnt}/{msg_count} msgs."); + assert_eq!(msgs.load(Ordering::Relaxed), msg_count); + assert_eq!(cnt, msg_count); + + msgs.store(0, Ordering::Relaxed); + + println!("[QR][04c] Getting Err() on peer02 session. {msg_count} msgs."); + let mut cnt = 0; + for _ in 0..msg_count { + let selector = format!("{}?err", key_expr); + let rs = ztimeout!(peer02.get(selector).res_async()).unwrap(); + while let Ok(s) = ztimeout!(rs.recv_async()) { + let e = s.sample.unwrap_err(); + assert_eq!(e.payload.len(), size); + cnt += 1; + } + } + println!("[QR][04c] Got on peer02 session. {cnt}/{msg_count} msgs."); + assert_eq!(msgs.load(Ordering::Relaxed), msg_count); + assert_eq!(cnt, msg_count); + println!("[PS][03c] Unqueryable on peer01 session"); ztimeout!(qbl.undeclare().res_async()).unwrap(); From d6ffebf080958157ac141c92b51b9fe00c075227 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 15 Feb 2024 11:58:21 +0100 Subject: [PATCH 002/124] Clean-up of protocol types (#729) * Update Reply protocol definition and codec * Make consolidation a flag in Query/Reply * Fix wrong Consolidation cast in codec * Apply Reply changes to routing * Fix shared-memory feature * Fix stats * Bump Zenoh Protocol Version * Add query/reply ok(put|del)/err() tests * Clean-up of code * Default CongestionControl for Push is Drop * Fix Priority::DEFAULT typo * Define DEFAULT consts * ConsolidationMode moved into the API * Remove unused Ack message * Fix Ack leftovers * CongestionControl::DEFAULT * QoSType::DEFAULT * Mapping::DEFAULT * Encoding::DEFAULT * QueryTarget::DEFAULT * NodeType::DEFAULT * QueryableInfo::DEFAULT * Remove ConsolidationMode from zenoh-protocol * ConsolidationType::DEFAULT * Remove dead code * Remove dead code * Move SampleKind to sample.rs * Cleanup SubMode * Cleanup QueryTarget * Remove emptyline --- commons/zenoh-codec/benches/codec.rs | 48 +++---- commons/zenoh-codec/src/common/mod.rs | 1 - commons/zenoh-codec/src/common/priority.rs | 66 --------- commons/zenoh-codec/src/core/mod.rs | 1 - commons/zenoh-codec/src/core/property.rs | 84 ----------- commons/zenoh-codec/src/core/wire_expr.rs | 2 +- commons/zenoh-codec/src/network/declare.rs | 32 ++--- commons/zenoh-codec/src/network/mod.rs | 2 +- commons/zenoh-codec/src/network/oam.rs | 7 +- commons/zenoh-codec/src/network/push.rs | 14 +- commons/zenoh-codec/src/network/request.rs | 20 +-- commons/zenoh-codec/src/network/response.rs | 15 +- commons/zenoh-codec/src/transport/fragment.rs | 6 +- commons/zenoh-codec/src/transport/frame.rs | 6 +- commons/zenoh-codec/src/transport/join.rs | 2 +- commons/zenoh-codec/src/transport/oam.rs | 6 +- commons/zenoh-codec/src/zenoh/ack.rs | 129 ----------------- commons/zenoh-codec/src/zenoh/mod.rs | 3 - commons/zenoh-codec/src/zenoh/put.rs | 6 +- commons/zenoh-codec/src/zenoh/query.rs | 6 +- commons/zenoh-codec/src/zenoh/reply.rs | 6 +- commons/zenoh-codec/tests/codec.rs | 5 - commons/zenoh-protocol/src/common/mod.rs | 15 -- commons/zenoh-protocol/src/core/encoding.rs | 2 + commons/zenoh-protocol/src/core/locator.rs | 64 --------- commons/zenoh-protocol/src/core/mod.rs | 98 ++----------- commons/zenoh-protocol/src/core/wire_expr.rs | 2 +- commons/zenoh-protocol/src/network/declare.rs | 28 +++- commons/zenoh-protocol/src/network/mod.rs | 44 +++--- commons/zenoh-protocol/src/network/request.rs | 13 +- commons/zenoh-protocol/src/transport/mod.rs | 12 +- commons/zenoh-protocol/src/zenoh/ack.rs | 84 ----------- commons/zenoh-protocol/src/zenoh/mod.rs | 19 +-- commons/zenoh-protocol/src/zenoh/query.rs | 14 +- examples/examples/z_pub_thr.rs | 2 +- io/zenoh-transport/src/common/batch.rs | 8 +- io/zenoh-transport/src/common/pipeline.rs | 16 +-- io/zenoh-transport/src/multicast/link.rs | 2 +- io/zenoh-transport/src/multicast/rx.rs | 4 +- io/zenoh-transport/src/shm.rs | 2 - .../src/unicast/establishment/cookie.rs | 1 - .../src/unicast/establishment/properties.rs | 132 ------------------ .../src/unicast/universal/rx.rs | 4 +- .../tests/multicast_compression.rs | 6 +- .../tests/multicast_transport.rs | 6 +- .../tests/unicast_compression.rs | 12 +- .../tests/unicast_concurrent.rs | 12 +- .../tests/unicast_defragmentation.rs | 16 +-- .../tests/unicast_intermittent.rs | 6 +- .../tests/unicast_priorities.rs | 4 +- io/zenoh-transport/tests/unicast_shm.rs | 12 +- .../tests/unicast_simultaneous.rs | 4 +- io/zenoh-transport/tests/unicast_transport.rs | 56 ++++---- zenoh-ext/src/subscriber_ext.rs | 8 +- zenoh/src/key_expr.rs | 4 +- zenoh/src/liveliness.rs | 6 +- zenoh/src/net/routing/dispatcher/pubsub.rs | 4 +- zenoh/src/net/routing/dispatcher/queries.rs | 29 ++-- zenoh/src/net/routing/dispatcher/resource.rs | 4 +- zenoh/src/net/routing/hat/client/pubsub.rs | 16 +-- zenoh/src/net/routing/hat/client/queries.rs | 12 +- .../net/routing/hat/linkstate_peer/network.rs | 2 +- .../net/routing/hat/linkstate_peer/pubsub.rs | 20 +-- .../net/routing/hat/linkstate_peer/queries.rs | 20 +-- zenoh/src/net/routing/hat/p2p_peer/gossip.rs | 2 +- zenoh/src/net/routing/hat/p2p_peer/pubsub.rs | 16 +-- zenoh/src/net/routing/hat/p2p_peer/queries.rs | 12 +- zenoh/src/net/routing/hat/router/network.rs | 2 +- zenoh/src/net/routing/hat/router/pubsub.rs | 36 ++--- zenoh/src/net/routing/hat/router/queries.rs | 36 ++--- zenoh/src/net/runtime/adminspace.rs | 14 +- zenoh/src/net/tests/tables.rs | 40 +++--- zenoh/src/prelude.rs | 4 +- zenoh/src/publication.rs | 6 +- zenoh/src/query.rs | 33 ++++- zenoh/src/queryable.rs | 8 +- zenoh/src/sample.rs | 40 +++++- zenoh/src/session.rs | 77 +++++----- zenoh/src/subscriber.rs | 17 --- 79 files changed, 510 insertions(+), 1125 deletions(-) delete mode 100644 commons/zenoh-codec/src/common/priority.rs delete mode 100644 commons/zenoh-codec/src/core/property.rs delete mode 100644 commons/zenoh-codec/src/zenoh/ack.rs delete mode 100644 commons/zenoh-protocol/src/zenoh/ack.rs delete mode 100644 io/zenoh-transport/src/unicast/establishment/properties.rs diff --git a/commons/zenoh-codec/benches/codec.rs b/commons/zenoh-codec/benches/codec.rs index 1c46a700a7..34c9313a7f 100644 --- a/commons/zenoh-codec/benches/codec.rs +++ b/commons/zenoh-codec/benches/codec.rs @@ -75,19 +75,19 @@ fn criterion_benchmark(c: &mut Criterion) { let codec = Zenoh080::new(); let frame = FrameHeader { - reliability: Reliability::default(), + reliability: Reliability::DEFAULT, sn: TransportSn::MIN, - ext_qos: zenoh_protocol::transport::frame::ext::QoSType::default(), + ext_qos: zenoh_protocol::transport::frame::ext::QoSType::DEFAULT, }; let data = Push { wire_expr: WireExpr::empty(), - ext_qos: ext::QoSType::default(), + ext_qos: ext::QoSType::DEFAULT, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -121,19 +121,19 @@ fn criterion_benchmark(c: &mut Criterion) { let codec = Zenoh080::new(); let frame = FrameHeader { - reliability: Reliability::default(), + reliability: Reliability::DEFAULT, sn: TransportSn::MIN, - ext_qos: zenoh_protocol::transport::frame::ext::QoSType::default(), + ext_qos: zenoh_protocol::transport::frame::ext::QoSType::DEFAULT, }; let data = Push { wire_expr: WireExpr::empty(), - ext_qos: ext::QoSType::default(), + ext_qos: ext::QoSType::DEFAULT, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -162,19 +162,19 @@ fn criterion_benchmark(c: &mut Criterion) { let codec = Zenoh080::new(); let frame = FrameHeader { - reliability: Reliability::default(), + reliability: Reliability::DEFAULT, sn: TransportSn::MIN, - ext_qos: zenoh_protocol::transport::frame::ext::QoSType::default(), + ext_qos: zenoh_protocol::transport::frame::ext::QoSType::DEFAULT, }; let data = Push { wire_expr: WireExpr::empty(), - ext_qos: ext::QoSType::default(), + ext_qos: ext::QoSType::DEFAULT, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -210,12 +210,12 @@ fn criterion_benchmark(c: &mut Criterion) { let data = Push { wire_expr: WireExpr::empty(), - ext_qos: ext::QoSType::default(), + ext_qos: ext::QoSType::DEFAULT, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -238,12 +238,12 @@ fn criterion_benchmark(c: &mut Criterion) { let data = Push { wire_expr: WireExpr::empty(), - ext_qos: ext::QoSType::default(), + ext_qos: ext::QoSType::DEFAULT, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -277,12 +277,12 @@ fn criterion_benchmark(c: &mut Criterion) { let data = Push { wire_expr: WireExpr::empty(), - ext_qos: ext::QoSType::default(), + ext_qos: ext::QoSType::DEFAULT, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/commons/zenoh-codec/src/common/mod.rs b/commons/zenoh-codec/src/common/mod.rs index 4c25c93241..f34f9872bf 100644 --- a/commons/zenoh-codec/src/common/mod.rs +++ b/commons/zenoh-codec/src/common/mod.rs @@ -12,4 +12,3 @@ // ZettaScale Zenoh Team, // pub mod extension; -mod priority; diff --git a/commons/zenoh-codec/src/common/priority.rs b/commons/zenoh-codec/src/common/priority.rs deleted file mode 100644 index 776229971e..0000000000 --- a/commons/zenoh-codec/src/common/priority.rs +++ /dev/null @@ -1,66 +0,0 @@ -// -// Copyright (c) 2023 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// -use crate::{RCodec, WCodec, Zenoh080, Zenoh080Header}; -use core::convert::TryInto; -use zenoh_buffers::{ - reader::{DidntRead, Reader}, - writer::{DidntWrite, Writer}, -}; -use zenoh_protocol::{common::imsg, core::Priority}; - -impl WCodec<&Priority, &mut W> for Zenoh080 -where - W: Writer, -{ - type Output = Result<(), DidntWrite>; - - fn write(self, writer: &mut W, x: &Priority) -> Self::Output { - // Header - let header = imsg::id::PRIORITY | ((*x as u8) << imsg::HEADER_BITS); - self.write(&mut *writer, header)?; - Ok(()) - } -} - -impl RCodec for Zenoh080 -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - let header: u8 = self.read(&mut *reader)?; - let codec = Zenoh080Header::new(header); - - codec.read(reader) - } -} - -impl RCodec for Zenoh080Header -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, _reader: &mut R) -> Result { - if imsg::mid(self.header) != imsg::id::PRIORITY { - return Err(DidntRead); - } - - let priority: Priority = (imsg::flags(self.header) >> imsg::HEADER_BITS) - .try_into() - .map_err(|_| DidntRead)?; - Ok(priority) - } -} diff --git a/commons/zenoh-codec/src/core/mod.rs b/commons/zenoh-codec/src/core/mod.rs index 1f48def695..c8e19f057f 100644 --- a/commons/zenoh-codec/src/core/mod.rs +++ b/commons/zenoh-codec/src/core/mod.rs @@ -13,7 +13,6 @@ // mod encoding; mod locator; -mod property; #[cfg(feature = "shared-memory")] mod shm; mod timestamp; diff --git a/commons/zenoh-codec/src/core/property.rs b/commons/zenoh-codec/src/core/property.rs deleted file mode 100644 index bb7f760208..0000000000 --- a/commons/zenoh-codec/src/core/property.rs +++ /dev/null @@ -1,84 +0,0 @@ -// -// Copyright (c) 2023 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// -use crate::{RCodec, WCodec, Zenoh080}; -use alloc::vec::Vec; -use zenoh_buffers::{ - reader::{DidntRead, Reader}, - writer::{DidntWrite, Writer}, -}; -use zenoh_protocol::core::Property; - -impl WCodec<&Property, &mut W> for Zenoh080 -where - W: Writer, -{ - type Output = Result<(), DidntWrite>; - - fn write(self, writer: &mut W, x: &Property) -> Self::Output { - let Property { key, value } = x; - - self.write(&mut *writer, key)?; - self.write(&mut *writer, value.as_slice())?; - Ok(()) - } -} - -impl RCodec for Zenoh080 -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - let key: u64 = self.read(&mut *reader)?; - let value: Vec = self.read(&mut *reader)?; - - Ok(Property { key, value }) - } -} - -impl WCodec<&[Property], &mut W> for Zenoh080 -where - W: Writer, -{ - type Output = Result<(), DidntWrite>; - - fn write(self, writer: &mut W, x: &[Property]) -> Self::Output { - self.write(&mut *writer, x.len())?; - for p in x.iter() { - self.write(&mut *writer, p)?; - } - - Ok(()) - } -} - -impl RCodec, &mut R> for Zenoh080 -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result, Self::Error> { - let num: usize = self.read(&mut *reader)?; - - let mut ps = Vec::with_capacity(num); - for _ in 0..num { - let p: Property = self.read(&mut *reader)?; - ps.push(p); - } - - Ok(ps) - } -} diff --git a/commons/zenoh-codec/src/core/wire_expr.rs b/commons/zenoh-codec/src/core/wire_expr.rs index 6caba6c8c7..aa6f77b379 100644 --- a/commons/zenoh-codec/src/core/wire_expr.rs +++ b/commons/zenoh-codec/src/core/wire_expr.rs @@ -65,7 +65,7 @@ where Ok(WireExpr { scope, suffix: suffix.into(), - mapping: Mapping::default(), + mapping: Mapping::DEFAULT, }) } } diff --git a/commons/zenoh-codec/src/network/declare.rs b/commons/zenoh-codec/src/network/declare.rs index 20916dc359..cf92b27c17 100644 --- a/commons/zenoh-codec/src/network/declare.rs +++ b/commons/zenoh-codec/src/network/declare.rs @@ -102,16 +102,16 @@ where // Header let mut header = id::DECLARE; - let mut n_exts = ((ext_qos != &declare::ext::QoSType::default()) as u8) + let mut n_exts = ((ext_qos != &declare::ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8) - + ((ext_nodeid != &declare::ext::NodeIdType::default()) as u8); + + ((ext_nodeid != &declare::ext::NodeIdType::DEFAULT) as u8); if n_exts != 0 { header |= declare::flag::Z; } self.write(&mut *writer, header)?; // Extensions - if ext_qos != &declare::ext::QoSType::default() { + if ext_qos != &declare::ext::QoSType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_qos, n_exts != 0))?; } @@ -119,7 +119,7 @@ where n_exts -= 1; self.write(&mut *writer, (ts, n_exts != 0))?; } - if ext_nodeid != &declare::ext::NodeIdType::default() { + if ext_nodeid != &declare::ext::NodeIdType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_nodeid, n_exts != 0))?; } @@ -157,9 +157,9 @@ where } // Extensions - let mut ext_qos = declare::ext::QoSType::default(); + let mut ext_qos = declare::ext::QoSType::DEFAULT; let mut ext_tstamp = None; - let mut ext_nodeid = declare::ext::NodeIdType::default(); + let mut ext_nodeid = declare::ext::NodeIdType::DEFAULT; let mut has_ext = imsg::has_flag(self.header, declare::flag::Z); while has_ext { @@ -340,11 +340,11 @@ where // Header let mut header = declare::id::D_SUBSCRIBER; - let mut n_exts = (ext_info != &subscriber::ext::SubscriberInfo::default()) as u8; + let mut n_exts = (ext_info != &subscriber::ext::SubscriberInfo::DEFAULT) as u8; if n_exts != 0 { header |= subscriber::flag::Z; } - if wire_expr.mapping != Mapping::default() { + if wire_expr.mapping != Mapping::DEFAULT { header |= subscriber::flag::M; } if wire_expr.has_suffix() { @@ -357,7 +357,7 @@ where self.write(&mut *writer, wire_expr)?; // Extensions - if ext_info != &subscriber::ext::SubscriberInfo::default() { + if ext_info != &subscriber::ext::SubscriberInfo::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_info, n_exts != 0))?; } @@ -402,7 +402,7 @@ where }; // Extensions - let mut ext_info = subscriber::ext::SubscriberInfo::default(); + let mut ext_info = subscriber::ext::SubscriberInfo::DEFAULT; let mut has_ext = imsg::has_flag(self.header, subscriber::flag::Z); while has_ext { @@ -524,11 +524,11 @@ where // Header let mut header = declare::id::D_QUERYABLE; - let mut n_exts = (ext_info != &queryable::ext::QueryableInfo::default()) as u8; + let mut n_exts = (ext_info != &queryable::ext::QueryableInfo::DEFAULT) as u8; if n_exts != 0 { header |= subscriber::flag::Z; } - if wire_expr.mapping != Mapping::default() { + if wire_expr.mapping != Mapping::DEFAULT { header |= subscriber::flag::M; } if wire_expr.has_suffix() { @@ -539,7 +539,7 @@ where // Body self.write(&mut *writer, id)?; self.write(&mut *writer, wire_expr)?; - if ext_info != &queryable::ext::QueryableInfo::default() { + if ext_info != &queryable::ext::QueryableInfo::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_info, n_exts != 0))?; } @@ -584,7 +584,7 @@ where }; // Extensions - let mut ext_info = queryable::ext::QueryableInfo::default(); + let mut ext_info = queryable::ext::QueryableInfo::DEFAULT; let mut has_ext = imsg::has_flag(self.header, queryable::flag::Z); while has_ext { @@ -699,7 +699,7 @@ where // Header let mut header = declare::id::D_TOKEN; - if wire_expr.mapping != Mapping::default() { + if wire_expr.mapping != Mapping::DEFAULT { header |= subscriber::flag::M; } if wire_expr.has_suffix() { @@ -851,7 +851,7 @@ where // Header let mut header = declare::id::D_INTEREST; - if wire_expr.mapping != Mapping::default() { + if wire_expr.mapping != Mapping::DEFAULT { header |= subscriber::flag::M; } if wire_expr.has_suffix() { diff --git a/commons/zenoh-codec/src/network/mod.rs b/commons/zenoh-codec/src/network/mod.rs index c1f2489b88..dade13d362 100644 --- a/commons/zenoh-codec/src/network/mod.rs +++ b/commons/zenoh-codec/src/network/mod.rs @@ -58,7 +58,7 @@ where type Error = DidntRead; fn read(self, reader: &mut R) -> Result { - let codec = Zenoh080Reliability::new(Reliability::default()); + let codec = Zenoh080Reliability::new(Reliability::DEFAULT); codec.read(reader) } } diff --git a/commons/zenoh-codec/src/network/oam.rs b/commons/zenoh-codec/src/network/oam.rs index ff6daeb020..9751e9952d 100644 --- a/commons/zenoh-codec/src/network/oam.rs +++ b/commons/zenoh-codec/src/network/oam.rs @@ -52,8 +52,7 @@ where header |= iext::ENC_ZBUF; } } - let mut n_exts = - ((ext_qos != &ext::QoSType::default()) as u8) + (ext_tstamp.is_some() as u8); + let mut n_exts = ((ext_qos != &ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8); if n_exts != 0 { header |= flag::Z; } @@ -63,7 +62,7 @@ where self.write(&mut *writer, id)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_qos, n_exts != 0))?; } @@ -115,7 +114,7 @@ where let id: OamId = self.codec.read(&mut *reader)?; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut ext_tstamp = None; let mut has_ext = imsg::has_flag(self.header, flag::Z); diff --git a/commons/zenoh-codec/src/network/push.rs b/commons/zenoh-codec/src/network/push.rs index 10a8489b29..b9ec2ba5db 100644 --- a/commons/zenoh-codec/src/network/push.rs +++ b/commons/zenoh-codec/src/network/push.rs @@ -44,13 +44,13 @@ where // Header let mut header = id::PUSH; - let mut n_exts = ((ext_qos != &ext::QoSType::default()) as u8) + let mut n_exts = ((ext_qos != &ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8) - + ((ext_nodeid != &ext::NodeIdType::default()) as u8); + + ((ext_nodeid != &ext::NodeIdType::DEFAULT) as u8); if n_exts != 0 { header |= flag::Z; } - if wire_expr.mapping != Mapping::default() { + if wire_expr.mapping != Mapping::DEFAULT { header |= flag::M; } if wire_expr.has_suffix() { @@ -62,7 +62,7 @@ where self.write(&mut *writer, wire_expr)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_qos, n_exts != 0))?; } @@ -70,7 +70,7 @@ where n_exts -= 1; self.write(&mut *writer, (ts, n_exts != 0))?; } - if ext_nodeid != &ext::NodeIdType::default() { + if ext_nodeid != &ext::NodeIdType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_nodeid, n_exts != 0))?; } @@ -116,9 +116,9 @@ where }; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut ext_tstamp = None; - let mut ext_nodeid = ext::NodeIdType::default(); + let mut ext_nodeid = ext::NodeIdType::DEFAULT; let mut has_ext = imsg::has_flag(self.header, flag::Z); while has_ext { diff --git a/commons/zenoh-codec/src/network/request.rs b/commons/zenoh-codec/src/network/request.rs index 19711ff147..364c1af3d0 100644 --- a/commons/zenoh-codec/src/network/request.rs +++ b/commons/zenoh-codec/src/network/request.rs @@ -93,16 +93,16 @@ where // Header let mut header = id::REQUEST; - let mut n_exts = ((ext_qos != &ext::QoSType::default()) as u8) + let mut n_exts = ((ext_qos != &ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8) - + ((ext_target != &ext::TargetType::default()) as u8) + + ((ext_target != &ext::TargetType::DEFAULT) as u8) + (ext_budget.is_some() as u8) + (ext_timeout.is_some() as u8) - + ((ext_nodeid != &ext::NodeIdType::default()) as u8); + + ((ext_nodeid != &ext::NodeIdType::DEFAULT) as u8); if n_exts != 0 { header |= flag::Z; } - if wire_expr.mapping != Mapping::default() { + if wire_expr.mapping != Mapping::DEFAULT { header |= flag::M; } if wire_expr.has_suffix() { @@ -115,7 +115,7 @@ where self.write(&mut *writer, wire_expr)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_qos, n_exts != 0))?; } @@ -123,7 +123,7 @@ where n_exts -= 1; self.write(&mut *writer, (ts, n_exts != 0))?; } - if ext_target != &ext::TargetType::default() { + if ext_target != &ext::TargetType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (ext_target, n_exts != 0))?; } @@ -137,7 +137,7 @@ where let e = ext::Timeout::new(to.as_millis() as u64); self.write(&mut *writer, (&e, n_exts != 0))?; } - if ext_nodeid != &ext::NodeIdType::default() { + if ext_nodeid != &ext::NodeIdType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_nodeid, n_exts != 0))?; } @@ -185,10 +185,10 @@ where }; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut ext_tstamp = None; - let mut ext_nodeid = ext::NodeIdType::default(); - let mut ext_target = ext::TargetType::default(); + let mut ext_nodeid = ext::NodeIdType::DEFAULT; + let mut ext_target = ext::TargetType::DEFAULT; let mut ext_limit = None; let mut ext_timeout = None; diff --git a/commons/zenoh-codec/src/network/response.rs b/commons/zenoh-codec/src/network/response.rs index bec7df2967..5b69e8b109 100644 --- a/commons/zenoh-codec/src/network/response.rs +++ b/commons/zenoh-codec/src/network/response.rs @@ -48,13 +48,13 @@ where // Header let mut header = id::RESPONSE; - let mut n_exts = ((ext_qos != &ext::QoSType::default()) as u8) + let mut n_exts = ((ext_qos != &ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8) + (ext_respid.is_some() as u8); if n_exts != 0 { header |= flag::Z; } - if wire_expr.mapping != Mapping::default() { + if wire_expr.mapping != Mapping::DEFAULT { header |= flag::M; } if wire_expr.has_suffix() { @@ -67,7 +67,7 @@ where self.write(&mut *writer, wire_expr)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_qos, n_exts != 0))?; } @@ -123,7 +123,7 @@ where }; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut ext_tstamp = None; let mut ext_respid = None; @@ -183,8 +183,7 @@ where // Header let mut header = id::RESPONSE_FINAL; - let mut n_exts = - ((ext_qos != &ext::QoSType::default()) as u8) + (ext_tstamp.is_some() as u8); + let mut n_exts = ((ext_qos != &ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8); if n_exts != 0 { header |= flag::Z; } @@ -194,7 +193,7 @@ where self.write(&mut *writer, rid)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_qos, n_exts != 0))?; } @@ -236,7 +235,7 @@ where let rid: RequestId = bodec.read(&mut *reader)?; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut ext_tstamp = None; let mut has_ext = imsg::has_flag(self.header, flag::Z); diff --git a/commons/zenoh-codec/src/transport/fragment.rs b/commons/zenoh-codec/src/transport/fragment.rs index b66f395df1..b01e2c2bae 100644 --- a/commons/zenoh-codec/src/transport/fragment.rs +++ b/commons/zenoh-codec/src/transport/fragment.rs @@ -48,7 +48,7 @@ where if *more { header |= flag::M; } - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { header |= flag::Z; } self.write(&mut *writer, header)?; @@ -57,7 +57,7 @@ where self.write(&mut *writer, sn)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { self.write(&mut *writer, (*ext_qos, false))?; } @@ -97,7 +97,7 @@ where let sn: TransportSn = self.codec.read(&mut *reader)?; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut has_ext = imsg::has_flag(self.header, flag::Z); while has_ext { diff --git a/commons/zenoh-codec/src/transport/frame.rs b/commons/zenoh-codec/src/transport/frame.rs index 8d39aabcdb..ab82a024c4 100644 --- a/commons/zenoh-codec/src/transport/frame.rs +++ b/commons/zenoh-codec/src/transport/frame.rs @@ -46,7 +46,7 @@ where if let Reliability::Reliable = reliability { header |= flag::R; } - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { header |= flag::Z; } self.write(&mut *writer, header)?; @@ -55,7 +55,7 @@ where self.write(&mut *writer, sn)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { self.write(&mut *writer, (x.ext_qos, false))?; } @@ -94,7 +94,7 @@ where let sn: TransportSn = self.codec.read(&mut *reader)?; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut has_ext = imsg::has_flag(self.header, flag::Z); while has_ext { diff --git a/commons/zenoh-codec/src/transport/join.rs b/commons/zenoh-codec/src/transport/join.rs index 80c1663413..d87ceecc78 100644 --- a/commons/zenoh-codec/src/transport/join.rs +++ b/commons/zenoh-codec/src/transport/join.rs @@ -121,7 +121,7 @@ where let (_, more): (ZExtZBufHeader<{ ext::QoS::ID }>, bool) = self.read(&mut *reader)?; // Body - let mut ext_qos = Box::new([PrioritySn::default(); Priority::NUM]); + let mut ext_qos = Box::new([PrioritySn::DEFAULT; Priority::NUM]); for p in ext_qos.iter_mut() { *p = self.codec.read(&mut *reader)?; } diff --git a/commons/zenoh-codec/src/transport/oam.rs b/commons/zenoh-codec/src/transport/oam.rs index e2f905abf8..6861f638d3 100644 --- a/commons/zenoh-codec/src/transport/oam.rs +++ b/commons/zenoh-codec/src/transport/oam.rs @@ -47,7 +47,7 @@ where header |= iext::ENC_ZBUF; } } - let mut n_exts = (ext_qos != &ext::QoSType::default()) as u8; + let mut n_exts = (ext_qos != &ext::QoSType::DEFAULT) as u8; if n_exts != 0 { header |= flag::Z; } @@ -57,7 +57,7 @@ where self.write(&mut *writer, id)?; // Extensions - if ext_qos != &ext::QoSType::default() { + if ext_qos != &ext::QoSType::DEFAULT { n_exts -= 1; self.write(&mut *writer, (*ext_qos, n_exts != 0))?; } @@ -105,7 +105,7 @@ where let id: OamId = self.codec.read(&mut *reader)?; // Extensions - let mut ext_qos = ext::QoSType::default(); + let mut ext_qos = ext::QoSType::DEFAULT; let mut has_ext = imsg::has_flag(self.header, flag::Z); while has_ext { diff --git a/commons/zenoh-codec/src/zenoh/ack.rs b/commons/zenoh-codec/src/zenoh/ack.rs deleted file mode 100644 index 78cbca2987..0000000000 --- a/commons/zenoh-codec/src/zenoh/ack.rs +++ /dev/null @@ -1,129 +0,0 @@ -// -// Copyright (c) 2022 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// -use crate::{common::extension, RCodec, WCodec, Zenoh080, Zenoh080Header}; -use alloc::vec::Vec; -use zenoh_buffers::{ - reader::{DidntRead, Reader}, - writer::{DidntWrite, Writer}, -}; -use zenoh_protocol::{ - common::{iext, imsg}, - zenoh::{ - ack::{ext, flag, Ack}, - id, - }, -}; - -impl WCodec<&Ack, &mut W> for Zenoh080 -where - W: Writer, -{ - type Output = Result<(), DidntWrite>; - - fn write(self, writer: &mut W, x: &Ack) -> Self::Output { - let Ack { - timestamp, - ext_sinfo, - ext_unknown, - } = x; - - // Header - let mut header = id::ACK; - if timestamp.is_some() { - header |= flag::T; - } - let mut n_exts = ((ext_sinfo.is_some()) as u8) + (ext_unknown.len() as u8); - if n_exts != 0 { - header |= flag::Z; - } - self.write(&mut *writer, header)?; - - // Body - if let Some(ts) = timestamp.as_ref() { - self.write(&mut *writer, ts)?; - } - - // Extensions - if let Some(sinfo) = ext_sinfo.as_ref() { - n_exts -= 1; - self.write(&mut *writer, (sinfo, n_exts != 0))?; - } - for u in ext_unknown.iter() { - n_exts -= 1; - self.write(&mut *writer, (u, n_exts != 0))?; - } - - Ok(()) - } -} - -impl RCodec for Zenoh080 -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - let header: u8 = self.read(&mut *reader)?; - let codec = Zenoh080Header::new(header); - codec.read(reader) - } -} - -impl RCodec for Zenoh080Header -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - if imsg::mid(self.header) != id::ACK { - return Err(DidntRead); - } - - // Body - let mut timestamp: Option = None; - if imsg::has_flag(self.header, flag::T) { - timestamp = Some(self.codec.read(&mut *reader)?); - } - - // Extensions - let mut ext_sinfo: Option = None; - let mut ext_unknown = Vec::new(); - - let mut has_ext = imsg::has_flag(self.header, flag::Z); - while has_ext { - let ext: u8 = self.codec.read(&mut *reader)?; - let eodec = Zenoh080Header::new(ext); - match iext::eid(ext) { - ext::SourceInfo::ID => { - let (s, ext): (ext::SourceInfoType, bool) = eodec.read(&mut *reader)?; - ext_sinfo = Some(s); - has_ext = ext; - } - _ => { - let (u, ext) = extension::read(reader, "Ack", ext)?; - ext_unknown.push(u); - has_ext = ext; - } - } - } - - Ok(Ack { - timestamp, - ext_sinfo, - ext_unknown, - }) - } -} diff --git a/commons/zenoh-codec/src/zenoh/mod.rs b/commons/zenoh-codec/src/zenoh/mod.rs index d59add9d63..fdff09be94 100644 --- a/commons/zenoh-codec/src/zenoh/mod.rs +++ b/commons/zenoh-codec/src/zenoh/mod.rs @@ -11,7 +11,6 @@ // Contributors: // ZettaScale Zenoh Team, // -pub mod ack; pub mod del; pub mod err; pub mod pull; @@ -121,7 +120,6 @@ where fn write(self, writer: &mut W, x: &ResponseBody) -> Self::Output { match x { ResponseBody::Reply(b) => self.write(&mut *writer, b), - ResponseBody::Ack(b) => self.write(&mut *writer, b), ResponseBody::Err(b) => self.write(&mut *writer, b), ResponseBody::Put(b) => self.write(&mut *writer, b), } @@ -140,7 +138,6 @@ where let codec = Zenoh080Header::new(header); let body = match imsg::mid(codec.header) { id::REPLY => ResponseBody::Reply(codec.read(&mut *reader)?), - id::ACK => ResponseBody::Ack(codec.read(&mut *reader)?), id::ERR => ResponseBody::Err(codec.read(&mut *reader)?), id::PUT => ResponseBody::Put(codec.read(&mut *reader)?), _ => return Err(DidntRead), diff --git a/commons/zenoh-codec/src/zenoh/put.rs b/commons/zenoh-codec/src/zenoh/put.rs index ebc364cf9b..4f50be4872 100644 --- a/commons/zenoh-codec/src/zenoh/put.rs +++ b/commons/zenoh-codec/src/zenoh/put.rs @@ -54,7 +54,7 @@ where if timestamp.is_some() { header |= flag::T; } - if encoding != &Encoding::default() { + if encoding != &Encoding::DEFAULT { header |= flag::E; } let mut n_exts = (ext_sinfo.is_some()) as u8 @@ -73,7 +73,7 @@ where if let Some(ts) = timestamp.as_ref() { self.write(&mut *writer, ts)?; } - if encoding != &Encoding::default() { + if encoding != &Encoding::DEFAULT { self.write(&mut *writer, encoding)?; } @@ -143,7 +143,7 @@ where timestamp = Some(self.codec.read(&mut *reader)?); } - let mut encoding = Encoding::default(); + let mut encoding = Encoding::DEFAULT; if imsg::has_flag(self.header, flag::E) { encoding = self.codec.read(&mut *reader)?; } diff --git a/commons/zenoh-codec/src/zenoh/query.rs b/commons/zenoh-codec/src/zenoh/query.rs index cb0506e474..55f25cd5ea 100644 --- a/commons/zenoh-codec/src/zenoh/query.rs +++ b/commons/zenoh-codec/src/zenoh/query.rs @@ -83,7 +83,7 @@ where // Header let mut header = id::QUERY; - if consolidation != &Consolidation::default() { + if consolidation != &Consolidation::DEFAULT { header |= flag::C; } if !parameters.is_empty() { @@ -99,7 +99,7 @@ where self.write(&mut *writer, header)?; // Body - if consolidation != &Consolidation::default() { + if consolidation != &Consolidation::DEFAULT { self.write(&mut *writer, *consolidation)?; } if !parameters.is_empty() { @@ -153,7 +153,7 @@ where } // Body - let mut consolidation = Consolidation::default(); + let mut consolidation = Consolidation::DEFAULT; if imsg::has_flag(self.header, flag::C) { consolidation = self.codec.read(&mut *reader)?; } diff --git a/commons/zenoh-codec/src/zenoh/reply.rs b/commons/zenoh-codec/src/zenoh/reply.rs index d54e98cc5e..308004a1c2 100644 --- a/commons/zenoh-codec/src/zenoh/reply.rs +++ b/commons/zenoh-codec/src/zenoh/reply.rs @@ -41,7 +41,7 @@ where // Header let mut header = id::REPLY; - if consolidation != &Consolidation::default() { + if consolidation != &Consolidation::DEFAULT { header |= flag::C; } let mut n_exts = ext_unknown.len() as u8; @@ -51,7 +51,7 @@ where self.write(&mut *writer, header)?; // Body - if consolidation != &Consolidation::default() { + if consolidation != &Consolidation::DEFAULT { self.write(&mut *writer, *consolidation)?; } @@ -93,7 +93,7 @@ where } // Body - let mut consolidation = Consolidation::default(); + let mut consolidation = Consolidation::DEFAULT; if imsg::has_flag(self.header, flag::C) { consolidation = self.codec.read(&mut *reader)?; } diff --git a/commons/zenoh-codec/tests/codec.rs b/commons/zenoh-codec/tests/codec.rs index 28201c1977..7f23214b49 100644 --- a/commons/zenoh-codec/tests/codec.rs +++ b/commons/zenoh-codec/tests/codec.rs @@ -582,11 +582,6 @@ fn codec_err() { run!(zenoh::Err, zenoh::Err::rand()); } -#[test] -fn codec_ack() { - run!(zenoh::Ack, zenoh::Ack::rand()); -} - #[test] fn codec_pull() { run!(zenoh::Pull, zenoh::Pull::rand()); diff --git a/commons/zenoh-protocol/src/common/mod.rs b/commons/zenoh-protocol/src/common/mod.rs index d11d0b0c52..ef53e5a8ac 100644 --- a/commons/zenoh-protocol/src/common/mod.rs +++ b/commons/zenoh-protocol/src/common/mod.rs @@ -19,21 +19,6 @@ pub use extension::*; /*************************************/ // Inner Message IDs pub mod imsg { - pub mod id { - // Zenoh Messages - pub const DECLARE: u8 = 0x0b; - pub const DATA: u8 = 0x0c; - pub const QUERY: u8 = 0x0d; - pub const PULL: u8 = 0x0e; - pub const UNIT: u8 = 0x0f; - pub const LINK_STATE_LIST: u8 = 0x10; - - // Message decorators - pub const PRIORITY: u8 = 0x1c; - pub const ROUTING_CONTEXT: u8 = 0x1d; - pub const REPLY_CONTEXT: u8 = 0x1e; - } - // Header mask pub const HEADER_BITS: u8 = 5; pub const HEADER_MASK: u8 = !(0xff << HEADER_BITS); diff --git a/commons/zenoh-protocol/src/core/encoding.rs b/commons/zenoh-protocol/src/core/encoding.rs index f202b8e79c..b3abae8aae 100644 --- a/commons/zenoh-protocol/src/core/encoding.rs +++ b/commons/zenoh-protocol/src/core/encoding.rs @@ -266,6 +266,8 @@ impl Default for Encoding { } impl Encoding { + pub const DEFAULT: Self = Self::EMPTY; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::{ diff --git a/commons/zenoh-protocol/src/core/locator.rs b/commons/zenoh-protocol/src/core/locator.rs index cdd3dfa64c..42379f2b65 100644 --- a/commons/zenoh-protocol/src/core/locator.rs +++ b/commons/zenoh-protocol/src/core/locator.rs @@ -122,67 +122,3 @@ impl Locator { EndPoint::rand().into() } } - -// pub(crate) trait HasCanonForm { -// fn is_canon(&self) -> bool; - -// type Output; -// fn canonicalize(self) -> Self::Output; -// } - -// fn cmp(this: &str, than: &str) -> core::cmp::Ordering { -// let is_longer = this.len().cmp(&than.len()); -// let this = this.chars(); -// let than = than.chars(); -// let zip = this.zip(than); -// for (this, than) in zip { -// match this.cmp(&than) { -// core::cmp::Ordering::Equal => {} -// o => return o, -// } -// } -// is_longer -// } - -// impl<'a, T: Iterator + Clone, V> HasCanonForm for T { -// fn is_canon(&self) -> bool { -// let mut iter = self.clone(); -// let mut acc = if let Some((key, _)) = iter.next() { -// key -// } else { -// return true; -// }; -// for (key, _) in iter { -// if cmp(key, acc) != core::cmp::Ordering::Greater { -// return false; -// } -// acc = key; -// } -// true -// } - -// type Output = Vec<(&'a str, V)>; -// fn canonicalize(mut self) -> Self::Output { -// let mut result = Vec::new(); -// if let Some(v) = self.next() { -// result.push(v); -// } -// 'outer: for (k, v) in self { -// for (i, (x, _)) in result.iter().enumerate() { -// match cmp(k, x) { -// core::cmp::Ordering::Less => { -// result.insert(i, (k, v)); -// continue 'outer; -// } -// core::cmp::Ordering::Equal => { -// result[i].1 = v; -// continue 'outer; -// } -// core::cmp::Ordering::Greater => {} -// } -// } -// result.push((k, v)) -// } -// result -// } -// } diff --git a/commons/zenoh-protocol/src/core/mod.rs b/commons/zenoh-protocol/src/core/mod.rs index 2547034c44..3e9315bec2 100644 --- a/commons/zenoh-protocol/src/core/mod.rs +++ b/commons/zenoh-protocol/src/core/mod.rs @@ -16,7 +16,6 @@ use alloc::{ boxed::Box, format, string::{String, ToString}, - vec::Vec, }; use core::{ convert::{From, TryFrom, TryInto}, @@ -54,43 +53,6 @@ pub use endpoint::*; pub mod resolution; pub use resolution::*; -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Property { - pub key: u64, - pub value: Vec, -} - -/// The kind of a `Sample`. -#[repr(u8)] -#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] -pub enum SampleKind { - /// if the `Sample` was issued by a `put` operation. - #[default] - Put = 0, - /// if the `Sample` was issued by a `delete` operation. - Delete = 1, -} - -impl fmt::Display for SampleKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - SampleKind::Put => write!(f, "PUT"), - SampleKind::Delete => write!(f, "DELETE"), - } - } -} - -impl TryFrom for SampleKind { - type Error = u64; - fn try_from(kind: u64) -> Result { - match kind { - 0 => Ok(SampleKind::Put), - 1 => Ok(SampleKind::Delete), - _ => Err(kind), - } - } -} - /// The global unique id of a zenoh peer. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] @@ -314,6 +276,8 @@ pub enum Priority { } impl Priority { + /// Default + pub const DEFAULT: Self = Self::Data; /// The lowest Priority pub const MIN: Self = Self::Background; /// The highest Priority @@ -354,6 +318,8 @@ pub enum Reliability { } impl Reliability { + pub const DEFAULT: Self = Self::BestEffort; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::Rng; @@ -374,6 +340,13 @@ pub struct Channel { pub reliability: Reliability, } +impl Channel { + pub const DEFAULT: Self = Self { + priority: Priority::DEFAULT, + reliability: Reliability::DEFAULT, + }; +} + /// The kind of congestion control. #[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] #[repr(u8)] @@ -383,51 +356,6 @@ pub enum CongestionControl { Block = 1, } -/// The subscription mode. -#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] -#[repr(u8)] -pub enum SubMode { - #[default] - Push = 0, - Pull = 1, -} - -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub struct SubInfo { - pub reliability: Reliability, - pub mode: SubMode, -} - -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] -pub struct QueryableInfo { - pub complete: u64, // Default 0: incomplete - pub distance: u64, // Default 0: no distance -} - -/// The kind of consolidation. -#[derive(Debug, Clone, PartialEq, Eq, Copy)] -pub enum ConsolidationMode { - /// No consolidation applied: multiple samples may be received for the same key-timestamp. - None, - /// Monotonic consolidation immediately forwards samples, except if one with an equal or more recent timestamp - /// has already been sent with the same key. - /// - /// This optimizes latency while potentially reducing bandwidth. - /// - /// Note that this doesn't cause re-ordering, but drops the samples for which a more recent timestamp has already - /// been observed with the same key. - Monotonic, - /// Holds back samples to only send the set of samples that had the highest timestamp for their key. - Latest, -} - -/// The `zenoh::queryable::Queryable`s that should be target of a `zenoh::Session::get()`. -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] -pub enum QueryTarget { - #[default] - BestMatching, - All, - AllComplete, - #[cfg(feature = "complete_n")] - Complete(u64), +impl CongestionControl { + pub const DEFAULT: Self = Self::Drop; } diff --git a/commons/zenoh-protocol/src/core/wire_expr.rs b/commons/zenoh-protocol/src/core/wire_expr.rs index 7b0dee7471..6d9623d6ca 100644 --- a/commons/zenoh-protocol/src/core/wire_expr.rs +++ b/commons/zenoh-protocol/src/core/wire_expr.rs @@ -257,7 +257,7 @@ impl WireExpr<'_> { WireExpr { scope, suffix: suffix.into(), - mapping: Mapping::default(), + mapping: Mapping::DEFAULT, } } } diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 76415d52f5..1568029cc6 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -156,6 +156,8 @@ pub enum Mode { } impl Mode { + pub const DEFAULT: Self = Self::Push; + #[cfg(feature = "test")] fn rand() -> Self { use rand::Rng; @@ -344,7 +346,7 @@ pub mod subscriber { /// - if P==1 then the subscription is pull, else it is push /// - rsv: Reserved /// ``` - #[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] + #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct SubscriberInfo { pub reliability: Reliability, pub mode: Mode, @@ -354,6 +356,11 @@ pub mod subscriber { pub const R: u64 = 1; pub const P: u64 = 1 << 1; + pub const DEFAULT: Self = Self { + reliability: Reliability::DEFAULT, + mode: Mode::DEFAULT, + }; + #[cfg(feature = "test")] pub fn rand() -> Self { let reliability = Reliability::rand(); @@ -363,6 +370,12 @@ pub mod subscriber { } } + impl Default for SubscriberInfo { + fn default() -> Self { + Self::DEFAULT + } + } + impl From for SubscriberInfo { fn from(ext: Info) -> Self { let reliability = if imsg::has_option(ext.value, SubscriberInfo::R) { @@ -502,13 +515,18 @@ pub mod queryable { /// +---------------+ /// ~ distance ~ /// +---------------+ - #[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] + #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct QueryableInfo { pub complete: u8, // Default 0: incomplete // @TODO: maybe a bitflag pub distance: u32, // Default 0: no distance } impl QueryableInfo { + pub const DEFAULT: Self = Self { + complete: 0, + distance: 0, + }; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::Rng; @@ -520,6 +538,12 @@ pub mod queryable { } } + impl Default for QueryableInfo { + fn default() -> Self { + Self::DEFAULT + } + } + impl From for QueryableInfo { fn from(ext: Info) -> Self { let complete = ext.value as u8; diff --git a/commons/zenoh-protocol/src/network/mod.rs b/commons/zenoh-protocol/src/network/mod.rs index 1be58db5cc..6807488873 100644 --- a/commons/zenoh-protocol/src/network/mod.rs +++ b/commons/zenoh-protocol/src/network/mod.rs @@ -51,6 +51,8 @@ pub enum Mapping { } impl Mapping { + pub const DEFAULT: Self = Self::Receiver; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::Rng; @@ -226,6 +228,16 @@ pub mod ext { const D_FLAG: u8 = 0b00001000; const E_FLAG: u8 = 0b00010000; + pub const DEFAULT: Self = Self::new(Priority::DEFAULT, CongestionControl::DEFAULT, false); + + pub const DECLARE: Self = Self::new(Priority::DEFAULT, CongestionControl::Block, false); + pub const PUSH: Self = Self::new(Priority::DEFAULT, CongestionControl::Drop, false); + pub const REQUEST: Self = Self::new(Priority::DEFAULT, CongestionControl::Block, false); + pub const RESPONSE: Self = Self::new(Priority::DEFAULT, CongestionControl::Block, false); + pub const RESPONSE_FINAL: Self = + Self::new(Priority::DEFAULT, CongestionControl::Block, false); + pub const OAM: Self = Self::new(Priority::DEFAULT, CongestionControl::Block, false); + pub const fn new( priority: Priority, congestion_control: CongestionControl, @@ -275,35 +287,11 @@ pub mod ext { let inner: u8 = rng.gen(); Self { inner } } - - pub fn declare_default() -> Self { - Self::new(Priority::default(), CongestionControl::Block, false) - } - - pub fn push_default() -> Self { - Self::new(Priority::default(), CongestionControl::Drop, false) - } - - pub fn request_default() -> Self { - Self::new(Priority::default(), CongestionControl::Block, false) - } - - pub fn response_default() -> Self { - Self::new(Priority::default(), CongestionControl::Block, false) - } - - pub fn response_final_default() -> Self { - Self::new(Priority::default(), CongestionControl::Block, false) - } - - pub fn oam_default() -> Self { - Self::new(Priority::default(), CongestionControl::Block, false) - } } impl Default for QoSType<{ ID }> { fn default() -> Self { - Self::new(Priority::default(), CongestionControl::default(), false) + Self::new(Priority::DEFAULT, CongestionControl::DEFAULT, false) } } @@ -371,6 +359,9 @@ pub mod ext { } impl NodeIdType<{ ID }> { + // node_id == 0 means the message has been generated by the node itself + pub const DEFAULT: Self = Self { node_id: 0 }; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::Rng; @@ -382,8 +373,7 @@ pub mod ext { impl Default for NodeIdType<{ ID }> { fn default() -> Self { - // node_id == 0 means the message has been generated by the node itself - Self { node_id: 0 } + Self::DEFAULT } } diff --git a/commons/zenoh-protocol/src/network/request.rs b/commons/zenoh-protocol/src/network/request.rs index 9e0137ea3a..aba6bb057a 100644 --- a/commons/zenoh-protocol/src/network/request.rs +++ b/commons/zenoh-protocol/src/network/request.rs @@ -66,7 +66,6 @@ pub struct Request { pub mod ext { use crate::{ common::{ZExtZ64, ZExtZBuf}, - core::QueryTarget, zextz64, zextzbuf, }; use core::{num::NonZeroU32, time::Duration}; @@ -88,9 +87,19 @@ pub mod ext { /// +---------------+ /// /// The `zenoh::queryable::Queryable`s that should be target of a `zenoh::Session::get()`. - pub type TargetType = QueryTarget; + #[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] + pub enum TargetType { + #[default] + BestMatching, + All, + AllComplete, + #[cfg(feature = "complete_n")] + Complete(u64), + } impl TargetType { + pub const DEFAULT: Self = Self::BestMatching; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::prelude::*; diff --git a/commons/zenoh-protocol/src/transport/mod.rs b/commons/zenoh-protocol/src/transport/mod.rs index cdf994e5dd..307389f8c9 100644 --- a/commons/zenoh-protocol/src/transport/mod.rs +++ b/commons/zenoh-protocol/src/transport/mod.rs @@ -75,13 +75,18 @@ pub enum TransportBodyLowLatency { pub type TransportSn = u32; -#[derive(Debug, Copy, Clone, PartialEq, Eq, Default)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct PrioritySn { pub reliable: TransportSn, pub best_effort: TransportSn, } impl PrioritySn { + pub const DEFAULT: Self = Self { + reliable: TransportSn::MIN, + best_effort: TransportSn::MIN, + }; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::Rng; @@ -252,7 +257,8 @@ pub mod ext { } impl QoSType<{ ID }> { - pub const P_MASK: u8 = 0b00000111; + const P_MASK: u8 = 0b00000111; + pub const DEFAULT: Self = Self::new(Priority::DEFAULT); pub const fn new(priority: Priority) -> Self { Self { @@ -276,7 +282,7 @@ pub mod ext { impl Default for QoSType<{ ID }> { fn default() -> Self { - Self::new(Priority::default()) + Self::DEFAULT } } diff --git a/commons/zenoh-protocol/src/zenoh/ack.rs b/commons/zenoh-protocol/src/zenoh/ack.rs deleted file mode 100644 index d40bf58791..0000000000 --- a/commons/zenoh-protocol/src/zenoh/ack.rs +++ /dev/null @@ -1,84 +0,0 @@ -// -// Copyright (c) 2022 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// -use crate::common::ZExtUnknown; -use alloc::vec::Vec; -use uhlc::Timestamp; - -/// # Ack message -/// -/// ```text -/// Flags: -/// - T: Timestamp If T==1 then the timestamp if present -/// - X: Reserved -/// - Z: Extension If Z==1 then at least one extension is present -/// -/// 7 6 5 4 3 2 1 0 -/// +-+-+-+-+-+-+-+-+ -/// |Z|X|T| ACK | -/// +-+-+-+---------+ -/// ~ ts: ~ if T==1 -/// +---------------+ -/// ~ [err_exts] ~ if Z==1 -/// +---------------+ -/// ``` -pub mod flag { - pub const T: u8 = 1 << 5; // 0x20 Timestamp if T==0 then the timestamp if present - // pub const X: u8 = 1 << 6; // 0x40 Reserved - pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Ack { - pub timestamp: Option, - pub ext_sinfo: Option, - pub ext_unknown: Vec, -} - -pub mod ext { - use crate::{common::ZExtZBuf, zextzbuf}; - - /// # SourceInfo extension - /// Used to carry additional information about the source of data - pub type SourceInfo = zextzbuf!(0x1, false); - pub type SourceInfoType = crate::zenoh::ext::SourceInfoType<{ SourceInfo::ID }>; -} - -impl Ack { - #[cfg(feature = "test")] - pub fn rand() -> Self { - use crate::{common::iext, core::ZenohId}; - use rand::Rng; - let mut rng = rand::thread_rng(); - - let timestamp = rng.gen_bool(0.5).then_some({ - let time = uhlc::NTP64(rng.gen()); - let id = uhlc::ID::try_from(ZenohId::rand().to_le_bytes()).unwrap(); - Timestamp::new(time, id) - }); - let ext_sinfo = rng.gen_bool(0.5).then_some(ext::SourceInfoType::rand()); - let mut ext_unknown = Vec::new(); - for _ in 0..rng.gen_range(0..4) { - ext_unknown.push(ZExtUnknown::rand2( - iext::mid(ext::SourceInfo::ID) + 1, - false, - )); - } - - Self { - timestamp, - ext_sinfo, - ext_unknown, - } - } -} diff --git a/commons/zenoh-protocol/src/zenoh/mod.rs b/commons/zenoh-protocol/src/zenoh/mod.rs index a23eaa9b21..d73d8cdd06 100644 --- a/commons/zenoh-protocol/src/zenoh/mod.rs +++ b/commons/zenoh-protocol/src/zenoh/mod.rs @@ -11,7 +11,6 @@ // Contributors: // ZettaScale Zenoh Team, // -pub mod ack; pub mod del; pub mod err; pub mod pull; @@ -20,7 +19,6 @@ pub mod query; pub mod reply; use crate::core::Encoding; -pub use ack::Ack; pub use del::Del; pub use err::Err; pub use pull::Pull; @@ -35,8 +33,7 @@ pub mod id { pub const QUERY: u8 = 0x03; pub const REPLY: u8 = 0x04; pub const ERR: u8 = 0x05; - pub const ACK: u8 = 0x06; - pub const PULL: u8 = 0x07; + pub const PULL: u8 = 0x06; } // DataInfo @@ -127,7 +124,6 @@ impl From for RequestBody { #[derive(Debug, Clone, PartialEq, Eq)] pub enum ResponseBody { Reply(Reply), - Ack(Ack), Err(Err), Put(Put), } @@ -138,11 +134,10 @@ impl ResponseBody { use rand::Rng; let mut rng = rand::thread_rng(); - match rng.gen_range(0..4) { + match rng.gen_range(0..3) { 0 => ResponseBody::Reply(Reply::rand()), - 1 => ResponseBody::Ack(Ack::rand()), - 2 => ResponseBody::Err(Err::rand()), - 3 => ResponseBody::Put(Put::rand()), + 1 => ResponseBody::Err(Err::rand()), + 2 => ResponseBody::Put(Put::rand()), _ => unreachable!(), } } @@ -160,12 +155,6 @@ impl From for ResponseBody { } } -impl From for ResponseBody { - fn from(r: Ack) -> ResponseBody { - ResponseBody::Ack(r) - } -} - pub mod ext { use zenoh_buffers::ZBuf; diff --git a/commons/zenoh-protocol/src/zenoh/query.rs b/commons/zenoh-protocol/src/zenoh/query.rs index 17dfa23df8..ac53b963f5 100644 --- a/commons/zenoh-protocol/src/zenoh/query.rs +++ b/commons/zenoh-protocol/src/zenoh/query.rs @@ -11,7 +11,7 @@ // Contributors: // ZettaScale Zenoh Team, // -use crate::{common::ZExtUnknown, core::ConsolidationMode}; +use crate::common::ZExtUnknown; use alloc::{string::String, vec::Vec}; /// The kind of consolidation. @@ -38,6 +38,8 @@ pub enum Consolidation { } impl Consolidation { + pub const DEFAULT: Self = Self::Auto; + #[cfg(feature = "test")] pub fn rand() -> Self { use rand::prelude::SliceRandom; @@ -55,16 +57,6 @@ impl Consolidation { } } -impl From for Consolidation { - fn from(val: ConsolidationMode) -> Self { - match val { - ConsolidationMode::None => Consolidation::None, - ConsolidationMode::Monotonic => Consolidation::Monotonic, - ConsolidationMode::Latest => Consolidation::Latest, - } - } -} - /// # Query message /// /// ```text diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index 3e130e0608..b698cbc80b 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -23,7 +23,7 @@ fn main() { env_logger::init(); let args = Args::parse(); - let mut prio = Priority::default(); + let mut prio = Priority::DEFAULT; if let Some(p) = args.priority { prio = p.try_into().unwrap(); } diff --git a/io/zenoh-transport/src/common/batch.rs b/io/zenoh-transport/src/common/batch.rs index 4139a65a05..a6aad76f7b 100644 --- a/io/zenoh-transport/src/common/batch.rs +++ b/io/zenoh-transport/src/common/batch.rs @@ -574,12 +574,12 @@ mod tests { let tmsg: TransportMessage = KeepAlive.into(); let nmsg: NetworkMessage = Push { wire_expr: WireExpr::empty(), - ext_qos: ext::QoSType::new(Priority::default(), CongestionControl::Block, false), + ext_qos: ext::QoSType::new(Priority::DEFAULT, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -601,7 +601,7 @@ mod tests { let mut frame = FrameHeader { reliability: Reliability::Reliable, sn: 0, - ext_qos: frame::ext::QoSType::default(), + ext_qos: frame::ext::QoSType::DEFAULT, }; // Serialize with a frame diff --git a/io/zenoh-transport/src/common/pipeline.rs b/io/zenoh-transport/src/common/pipeline.rs index 954c656280..eebf23abc9 100644 --- a/io/zenoh-transport/src/common/pipeline.rs +++ b/io/zenoh-transport/src/common/pipeline.rs @@ -513,7 +513,7 @@ impl TransmissionPipeline { let mut stage_in = vec![]; let mut stage_out = vec![]; - let default_queue_size = [config.queue_size[Priority::default() as usize]]; + let default_queue_size = [config.queue_size[Priority::DEFAULT as usize]]; let size_iter = if priority.len() == 1 { default_queue_size.iter() } else { @@ -602,7 +602,7 @@ impl TransmissionPipelineProducer { let priority = msg.priority(); (priority as usize, priority) } else { - (0, Priority::default()) + (0, Priority::DEFAULT) }; // Lock the channel. We are the only one that will be writing on it. let mut queue = zlock!(self.stage_in[idx]); @@ -751,10 +751,10 @@ mod tests { wire_expr: key, ext_qos: ext::QoSType::new(Priority::Control, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -881,10 +881,10 @@ mod tests { wire_expr: key, ext_qos: ext::QoSType::new(Priority::Control, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -993,10 +993,10 @@ mod tests { false, ), ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/src/multicast/link.rs b/io/zenoh-transport/src/multicast/link.rs index 21ed0b3fdf..b24c077c57 100644 --- a/io/zenoh-transport/src/multicast/link.rs +++ b/io/zenoh-transport/src/multicast/link.rs @@ -483,7 +483,7 @@ async fn tx_task( .collect::>(); let (next_sn, ext_qos) = if next_sns.len() == Priority::NUM { let tmp: [PrioritySn; Priority::NUM] = next_sns.try_into().unwrap(); - (PrioritySn::default(), Some(Box::new(tmp))) + (PrioritySn::DEFAULT, Some(Box::new(tmp))) } else { (next_sns[0], None) }; diff --git a/io/zenoh-transport/src/multicast/rx.rs b/io/zenoh-transport/src/multicast/rx.rs index 14f2fd619c..dedef2149c 100644 --- a/io/zenoh-transport/src/multicast/rx.rs +++ b/io/zenoh-transport/src/multicast/rx.rs @@ -145,7 +145,7 @@ impl TransportMulticastInner { let priority = ext_qos.priority(); let c = if self.is_qos() { &peer.priority_rx[priority as usize] - } else if priority == Priority::default() { + } else if priority == Priority::DEFAULT { &peer.priority_rx[0] } else { bail!( @@ -181,7 +181,7 @@ impl TransportMulticastInner { let priority = ext_qos.priority(); let c = if self.is_qos() { &peer.priority_rx[priority as usize] - } else if priority == Priority::default() { + } else if priority == Priority::DEFAULT { &peer.priority_rx[0] } else { bail!( diff --git a/io/zenoh-transport/src/shm.rs b/io/zenoh-transport/src/shm.rs index 8b0e93f494..6f98cafc14 100644 --- a/io/zenoh-transport/src/shm.rs +++ b/io/zenoh-transport/src/shm.rs @@ -167,7 +167,6 @@ pub fn map_zmsg_to_shminfo(msg: &mut NetworkMessage) -> ZResult { ResponseBody::Reply(b) => b.map_to_shminfo(), ResponseBody::Put(b) => b.map_to_shminfo(), ResponseBody::Err(b) => b.map_to_shminfo(), - ResponseBody::Ack(_) => Ok(false), }, NetworkBody::ResponseFinal(_) | NetworkBody::Declare(_) | NetworkBody::OAM(_) => Ok(false), } @@ -222,7 +221,6 @@ pub fn map_zmsg_to_shmbuf( ResponseBody::Put(b) => b.map_to_shmbuf(shmr), ResponseBody::Err(b) => b.map_to_shmbuf(shmr), ResponseBody::Reply(b) => b.map_to_shmbuf(shmr), - ResponseBody::Ack(_) => Ok(false), }, NetworkBody::ResponseFinal(_) | NetworkBody::Declare(_) | NetworkBody::OAM(_) => Ok(false), } diff --git a/io/zenoh-transport/src/unicast/establishment/cookie.rs b/io/zenoh-transport/src/unicast/establishment/cookie.rs index e9916be7e6..0db9e1c93a 100644 --- a/io/zenoh-transport/src/unicast/establishment/cookie.rs +++ b/io/zenoh-transport/src/unicast/establishment/cookie.rs @@ -11,7 +11,6 @@ // Contributors: // ZettaScale Zenoh Team, // -// use super::properties::EstablishmentProperties; use crate::unicast::establishment::ext; use std::convert::TryFrom; use zenoh_buffers::{ diff --git a/io/zenoh-transport/src/unicast/establishment/properties.rs b/io/zenoh-transport/src/unicast/establishment/properties.rs deleted file mode 100644 index e259b650ab..0000000000 --- a/io/zenoh-transport/src/unicast/establishment/properties.rs +++ /dev/null @@ -1,132 +0,0 @@ -// -// Copyright (c) 2023 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// -use std::{ - convert::TryFrom, - ops::{Deref, DerefMut}, -}; -use zenoh_buffers::{reader::HasReader, writer::HasWriter, ZBuf}; -use zenoh_codec::{RCodec, WCodec, Zenoh080}; -use zenoh_protocol::core::Property; -use zenoh_result::{bail, zerror, Error as ZError, ZResult}; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct EstablishmentProperties(Vec); - -impl Deref for EstablishmentProperties { - type Target = Vec; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for EstablishmentProperties { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl EstablishmentProperties { - pub(super) fn new() -> Self { - EstablishmentProperties(vec![]) - } - - pub(super) fn insert(&mut self, p: Property) -> ZResult<()> { - if self.0.iter().any(|x| x.key == p.key) { - bail!("Property {} already exists", p.key) - } - self.0.push(p); - Ok(()) - } - - pub(super) fn remove(&mut self, key: u64) -> Option { - self.0 - .iter() - .position(|x| x.key == key) - .map(|i| self.0.remove(i)) - } -} - -impl TryFrom<&EstablishmentProperties> for Attachment { - type Error = ZError; - - fn try_from(eps: &EstablishmentProperties) -> Result { - if eps.is_empty() { - bail!("Can not create an attachment with zero properties") - } - - let mut zbuf = ZBuf::empty(); - let mut writer = zbuf.writer(); - let codec = Zenoh080::new(); - - codec - .write(&mut writer, eps.0.as_slice()) - .map_err(|_| zerror!(""))?; - - let attachment = Attachment::new(zbuf); - Ok(attachment) - } -} - -impl TryFrom> for EstablishmentProperties { - type Error = ZError; - - fn try_from(mut ps: Vec) -> Result { - let mut eps = EstablishmentProperties::new(); - for p in ps.drain(..) { - eps.insert(p)?; - } - - Ok(eps) - } -} - -impl TryFrom<&Attachment> for EstablishmentProperties { - type Error = ZError; - - fn try_from(att: &Attachment) -> Result { - let mut reader = att.buffer.reader(); - let codec = Zenoh080::new(); - - let ps: Vec = codec.read(&mut reader).map_err(|_| zerror!(""))?; - EstablishmentProperties::try_from(ps) - } -} - -impl EstablishmentProperties { - #[cfg(test)] - pub fn rand() -> Self { - use rand::Rng; - - const MIN: usize = 1; - const MAX: usize = 8; - - let mut rng = rand::thread_rng(); - - let mut eps = EstablishmentProperties::new(); - for _ in MIN..=MAX { - loop { - let key: u64 = rng.gen(); - let mut value = vec![0u8; rng.gen_range(MIN..=MAX)]; - rng.fill(&mut value[..]); - let p = Property { key, value }; - if eps.insert(p).is_ok() { - break; - } - } - } - - eps - } -} diff --git a/io/zenoh-transport/src/unicast/universal/rx.rs b/io/zenoh-transport/src/unicast/universal/rx.rs index 935a1814b0..04af432aef 100644 --- a/io/zenoh-transport/src/unicast/universal/rx.rs +++ b/io/zenoh-transport/src/unicast/universal/rx.rs @@ -81,7 +81,7 @@ impl TransportUnicastUniversal { let priority = ext_qos.priority(); let c = if self.is_qos() { &self.priority_rx[priority as usize] - } else if priority == Priority::default() { + } else if priority == Priority::DEFAULT { &self.priority_rx[0] } else { bail!( @@ -124,7 +124,7 @@ impl TransportUnicastUniversal { let c = if self.is_qos() { &self.priority_rx[qos.priority() as usize] - } else if qos.priority() == Priority::default() { + } else if qos.priority() == Priority::DEFAULT { &self.priority_rx[0] } else { bail!( diff --git a/io/zenoh-transport/tests/multicast_compression.rs b/io/zenoh-transport/tests/multicast_compression.rs index f8e56a5484..4d1196e10f 100644 --- a/io/zenoh-transport/tests/multicast_compression.rs +++ b/io/zenoh-transport/tests/multicast_compression.rs @@ -269,11 +269,11 @@ mod tests { wire_expr: "test".into(), ext_qos: QoSType::new(channel.priority, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -363,7 +363,7 @@ mod tests { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { diff --git a/io/zenoh-transport/tests/multicast_transport.rs b/io/zenoh-transport/tests/multicast_transport.rs index ebb290af1e..fe5a44b7ee 100644 --- a/io/zenoh-transport/tests/multicast_transport.rs +++ b/io/zenoh-transport/tests/multicast_transport.rs @@ -265,11 +265,11 @@ mod tests { wire_expr: "test".into(), ext_qos: QoSType::new(channel.priority, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -359,7 +359,7 @@ mod tests { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { diff --git a/io/zenoh-transport/tests/unicast_compression.rs b/io/zenoh-transport/tests/unicast_compression.rs index 323c6f529e..dd4f55b5f5 100644 --- a/io/zenoh-transport/tests/unicast_compression.rs +++ b/io/zenoh-transport/tests/unicast_compression.rs @@ -297,11 +297,11 @@ mod tests { wire_expr: "test".into(), ext_qos: QoSType::new(channel.priority, cctrl, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -442,7 +442,7 @@ mod tests { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { @@ -472,7 +472,7 @@ mod tests { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { @@ -505,7 +505,7 @@ mod tests { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -535,7 +535,7 @@ mod tests { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { diff --git a/io/zenoh-transport/tests/unicast_concurrent.rs b/io/zenoh-transport/tests/unicast_concurrent.rs index d13f763b68..4e90432193 100644 --- a/io/zenoh-transport/tests/unicast_concurrent.rs +++ b/io/zenoh-transport/tests/unicast_concurrent.rs @@ -194,13 +194,13 @@ async fn transport_concurrent(endpoint01: Vec, endpoint02: Vec, endpoint02: Vec, client_transport: TransportUn wire_expr: "test".into(), ext_qos: QoSType::new(*p, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: vec![0u8; *ms].into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/tests/unicast_shm.rs b/io/zenoh-transport/tests/unicast_shm.rs index f9180849af..d12a9db7dc 100644 --- a/io/zenoh-transport/tests/unicast_shm.rs +++ b/io/zenoh-transport/tests/unicast_shm.rs @@ -271,13 +271,13 @@ mod tests { let message: NetworkMessage = Push { wire_expr: "test".into(), - ext_qos: QoSType::new(Priority::default(), CongestionControl::Block, false), + ext_qos: QoSType::new(Priority::DEFAULT, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: sbuf.into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, ext_shm: None, ext_attachment: None, @@ -319,13 +319,13 @@ mod tests { let message: NetworkMessage = Push { wire_expr: "test".into(), - ext_qos: QoSType::new(Priority::default(), CongestionControl::Block, false), + ext_qos: QoSType::new(Priority::DEFAULT, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: sbuf.into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, ext_shm: None, ext_attachment: None, diff --git a/io/zenoh-transport/tests/unicast_simultaneous.rs b/io/zenoh-transport/tests/unicast_simultaneous.rs index 19380eb49e..db73e99480 100644 --- a/io/zenoh-transport/tests/unicast_simultaneous.rs +++ b/io/zenoh-transport/tests/unicast_simultaneous.rs @@ -78,11 +78,11 @@ mod tests { wire_expr: "test".into(), ext_qos: QoSType::new(Priority::Control, CongestionControl::Block, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: vec![0u8; MSG_SIZE].into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/tests/unicast_transport.rs b/io/zenoh-transport/tests/unicast_transport.rs index 11839aef2a..795ea90b41 100644 --- a/io/zenoh-transport/tests/unicast_transport.rs +++ b/io/zenoh-transport/tests/unicast_transport.rs @@ -468,11 +468,11 @@ async fn test_transport( wire_expr: "test".into(), ext_qos: QoSType::new(channel.priority, cctrl, false), ext_tstamp: None, - ext_nodeid: NodeIdType::default(), + ext_nodeid: NodeIdType::DEFAULT, payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -614,7 +614,7 @@ fn transport_unicast_tcp_only() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { @@ -644,7 +644,7 @@ fn transport_unicast_tcp_only_with_lowlatency_transport() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { @@ -677,7 +677,7 @@ fn transport_unicast_udp_only() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -707,7 +707,7 @@ fn transport_unicast_udp_only_with_lowlatency_transport() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -739,7 +739,7 @@ fn transport_unicast_unix_only() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -773,7 +773,7 @@ fn transport_unicast_unix_only_with_lowlatency_transport() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -808,11 +808,11 @@ fn transport_unicast_ws_only() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -846,11 +846,11 @@ fn transport_unicast_ws_only_with_lowlatency_transport() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -887,7 +887,7 @@ fn transport_unicast_unixpipe_only() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { @@ -921,7 +921,7 @@ fn transport_unicast_unixpipe_only_with_lowlatency_transport() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { @@ -956,7 +956,7 @@ fn transport_unicast_tcp_udp() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -996,7 +996,7 @@ fn transport_unicast_tcp_unix() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -1038,7 +1038,7 @@ fn transport_unicast_udp_unix() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -1083,7 +1083,7 @@ fn transport_unicast_tcp_udp_unix() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -1130,11 +1130,11 @@ fn transport_unicast_tls_only_server() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -1184,11 +1184,11 @@ fn transport_unicast_quic_only_server() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -1256,11 +1256,11 @@ fn transport_unicast_tls_only_mutual_success() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -1323,11 +1323,11 @@ fn transport_unicast_tls_only_mutual_no_client_certs_failure() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { @@ -1403,11 +1403,11 @@ fn transport_unicast_tls_only_mutual_wrong_client_certs_failure() { // Define the reliability and congestion control let channel = [ Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::Reliable, }, Channel { - priority: Priority::default(), + priority: Priority::DEFAULT, reliability: Reliability::BestEffort, }, Channel { diff --git a/zenoh-ext/src/subscriber_ext.rs b/zenoh-ext/src/subscriber_ext.rs index a2987f8833..83de47779c 100644 --- a/zenoh-ext/src/subscriber_ext.rs +++ b/zenoh-ext/src/subscriber_ext.rs @@ -290,7 +290,7 @@ impl<'a, 'b, Handler> SubscriberBuilderExt<'a, 'b, Handler> session: self.session, key_expr: self.key_expr, key_space: crate::LivelinessSpace, - reliability: Reliability::default(), + reliability: Reliability::DEFAULT, origin: Locality::default(), fetch, handler: self.handler, @@ -334,11 +334,11 @@ impl<'a, 'b, Handler> SubscriberBuilderExt<'a, 'b, Handler> session: self.session, key_expr: self.key_expr, key_space: crate::LivelinessSpace, - reliability: Reliability::default(), + reliability: Reliability::DEFAULT, origin: Locality::default(), query_selector: None, - query_target: QueryTarget::default(), - query_consolidation: QueryConsolidation::default(), + query_target: QueryTarget::DEFAULT, + query_consolidation: QueryConsolidation::DEFAULT, query_accept_replies: ReplyKeyExpr::MatchingQuery, query_timeout: Duration::from_secs(10), handler: self.handler, diff --git a/zenoh/src/key_expr.rs b/zenoh/src/key_expr.rs index d2295f9798..36c696000a 100644 --- a/zenoh/src/key_expr.rs +++ b/zenoh/src/key_expr.rs @@ -633,9 +633,9 @@ impl SyncResolve for KeyExprUndeclaration<'_> { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(zenoh_protocol::network::Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareKeyExpr(UndeclareKeyExpr { id: expr_id }), }); diff --git a/zenoh/src/liveliness.rs b/zenoh/src/liveliness.rs index 0883041bb7..26a803fa43 100644 --- a/zenoh/src/liveliness.rs +++ b/zenoh/src/liveliness.rs @@ -549,7 +549,7 @@ where &Some(KeyExpr::from(*KE_PREFIX_LIVELINESS)), Locality::default(), callback, - &SubscriberInfo::default(), + &SubscriberInfo::DEFAULT, ) .map(|sub_state| Subscriber { subscriber: SubscriberInner { @@ -747,8 +747,8 @@ where .query( &self.key_expr?.into(), &Some(KeyExpr::from(*KE_PREFIX_LIVELINESS)), - QueryTarget::default(), - QueryConsolidation::default(), + QueryTarget::DEFAULT, + QueryConsolidation::DEFAULT, Locality::default(), self.timeout, None, diff --git a/zenoh/src/net/routing/dispatcher/pubsub.rs b/zenoh/src/net/routing/dispatcher/pubsub.rs index ffe2d3ccca..d6497a80b3 100644 --- a/zenoh/src/net/routing/dispatcher/pubsub.rs +++ b/zenoh/src/net/routing/dispatcher/pubsub.rs @@ -593,9 +593,9 @@ pub fn pull_data(tables_ref: &RwLock, face: &Arc, expr: WireE for (key_expr, payload) in route { face.primitives.send_push(Push { wire_expr: key_expr, - ext_qos: ext::QoSType::push_default(), + ext_qos: ext::QoSType::PUSH, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload, }); } diff --git a/zenoh/src/net/routing/dispatcher/queries.rs b/zenoh/src/net/routing/dispatcher/queries.rs index a6748650ab..e8e84395f8 100644 --- a/zenoh/src/net/routing/dispatcher/queries.rs +++ b/zenoh/src/net/routing/dispatcher/queries.rs @@ -494,7 +494,6 @@ macro_rules! inc_res_stats { e.ext_body.as_ref().map(|b| b.payload.len()).unwrap_or(0), ); } - ResponseBody::Ack(_) => (), } } } @@ -555,14 +554,14 @@ pub fn route_query( for (wexpr, payload) in local_replies { let payload = ResponseBody::Reply(Reply { - consolidation: Consolidation::default(), // @TODO: handle Del case - ext_unknown: vec![], // @TODO: handle unknown extensions + consolidation: Consolidation::DEFAULT, // @TODO: handle Del case + ext_unknown: vec![], // @TODO: handle unknown extensions payload: ReplyBody::Put(Put { // @TODO: handle Del case - timestamp: None, // @TODO: handle timestamp - encoding: Encoding::default(), // @TODO: handle encoding - ext_sinfo: None, // @TODO: handle source info - ext_attachment: None, // @TODO: expose it in the API + timestamp: None, // @TODO: handle timestamp + encoding: Encoding::DEFAULT, // @TODO: handle encoding + ext_sinfo: None, // @TODO: handle source info + ext_attachment: None, // @TODO: expose it in the API #[cfg(feature = "shared-memory")] ext_shm: None, ext_unknown: vec![], // @TODO: handle unknown extensions @@ -583,7 +582,7 @@ pub fn route_query( rid: qid, wire_expr: wexpr, payload, - ext_qos: response::ext::QoSType::declare_default(), + ext_qos: response::ext::QoSType::DECLARE, ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { zid, @@ -605,7 +604,7 @@ pub fn route_query( .send_response_final(RoutingContext::with_expr( ResponseFinal { rid: qid, - ext_qos: response::ext::QoSType::response_final_default(), + ext_qos: response::ext::QoSType::RESPONSE_FINAL, ext_tstamp: None, }, expr.full_expr().to_string(), @@ -636,7 +635,7 @@ pub fn route_query( Request { id: *qid, wire_expr: key_expr.into(), - ext_qos: ext::QoSType::request_default(), + ext_qos: ext::QoSType::REQUEST, ext_tstamp: None, ext_nodeid: ext::NodeIdType { node_id: *context }, ext_target: *t, @@ -672,7 +671,7 @@ pub fn route_query( Request { id: *qid, wire_expr: key_expr.into(), - ext_qos: ext::QoSType::request_default(), + ext_qos: ext::QoSType::REQUEST, ext_tstamp: None, ext_nodeid: ext::NodeIdType { node_id: *context }, ext_target: target, @@ -693,7 +692,7 @@ pub fn route_query( .send_response_final(RoutingContext::with_expr( ResponseFinal { rid: qid, - ext_qos: response::ext::QoSType::response_final_default(), + ext_qos: response::ext::QoSType::RESPONSE_FINAL, ext_tstamp: None, }, expr.full_expr().to_string(), @@ -711,7 +710,7 @@ pub fn route_query( .send_response_final(RoutingContext::with_expr( ResponseFinal { rid: qid, - ext_qos: response::ext::QoSType::response_final_default(), + ext_qos: response::ext::QoSType::RESPONSE_FINAL, ext_tstamp: None, }, "".to_string(), @@ -758,7 +757,7 @@ pub(crate) fn route_send_response( rid: query.src_qid, wire_expr: key_expr.to_owned(), payload: body, - ext_qos: response::ext::QoSType::response_default(), + ext_qos: response::ext::QoSType::RESPONSE, ext_tstamp: None, ext_respid, }, @@ -818,7 +817,7 @@ pub(crate) fn finalize_pending_query(query: Arc) { .send_response_final(RoutingContext::with_expr( ResponseFinal { rid: query.src_qid, - ext_qos: response::ext::QoSType::response_final_default(), + ext_qos: response::ext::QoSType::RESPONSE_FINAL, ext_tstamp: None, }, "".to_string(), diff --git a/zenoh/src/net/routing/dispatcher/resource.rs b/zenoh/src/net/routing/dispatcher/resource.rs index 7fc71c623d..fb4dec4ad5 100644 --- a/zenoh/src/net/routing/dispatcher/resource.rs +++ b/zenoh/src/net/routing/dispatcher/resource.rs @@ -466,9 +466,9 @@ impl Resource { .insert(expr_id, nonwild_prefix.clone()); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareKeyExpr(DeclareKeyExpr { id: expr_id, wire_expr: nonwild_prefix.expr().into(), diff --git a/zenoh/src/net/routing/hat/client/pubsub.rs b/zenoh/src/net/routing/hat/client/pubsub.rs index 7becff4b4d..6f71ef443a 100644 --- a/zenoh/src/net/routing/hat/client/pubsub.rs +++ b/zenoh/src/net/routing/hat/client/pubsub.rs @@ -49,9 +49,9 @@ fn propagate_simple_subscription_to( let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: key_expr, @@ -137,9 +137,9 @@ fn declare_client_subscription( .primitives .send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: res.expr().into(), @@ -171,9 +171,9 @@ fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, @@ -209,9 +209,9 @@ pub(super) fn undeclare_client_subscription( let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, diff --git a/zenoh/src/net/routing/hat/client/queries.rs b/zenoh/src/net/routing/hat/client/queries.rs index 35a10557dc..667ff63c0e 100644 --- a/zenoh/src/net/routing/hat/client/queries.rs +++ b/zenoh/src/net/routing/hat/client/queries.rs @@ -96,9 +96,9 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, @@ -166,9 +166,9 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // TODO ext_wire_expr: WireExprType { wire_expr }, @@ -431,9 +431,9 @@ pub(super) fn undeclare_client_subscription( let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // TODO ext_wire_expr: WireExprType { wire_expr }, @@ -467,9 +467,9 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // TODO wire_expr: key_expr, diff --git a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs index 6281993c93..03a1e11e67 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs @@ -137,7 +137,7 @@ fn send_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { node_id: routing_context, @@ -177,9 +177,9 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, @@ -347,7 +347,7 @@ fn send_forget_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { node_id: routing_context, @@ -373,9 +373,9 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, diff --git a/zenoh/src/net/routing/hat/p2p_peer/gossip.rs b/zenoh/src/net/routing/hat/p2p_peer/gossip.rs index ae3fda51a7..cf4d201867 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/gossip.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/gossip.rs @@ -214,7 +214,7 @@ impl Network { Ok(NetworkBody::OAM(Oam { id: OAM_LINKSTATE, body: ZExtBody::ZBuf(buf), - ext_qos: oam::ext::QoSType::oam_default(), + ext_qos: oam::ext::QoSType::OAM, ext_tstamp: None, }) .into()) diff --git a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs index 8f91335f0a..97677893aa 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs @@ -49,9 +49,9 @@ fn propagate_simple_subscription_to( let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: key_expr, @@ -137,9 +137,9 @@ fn declare_client_subscription( .primitives .send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: res.expr().into(), @@ -171,9 +171,9 @@ fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, @@ -209,9 +209,9 @@ pub(super) fn undeclare_client_subscription( let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, diff --git a/zenoh/src/net/routing/hat/p2p_peer/queries.rs b/zenoh/src/net/routing/hat/p2p_peer/queries.rs index 35a10557dc..667ff63c0e 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/queries.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/queries.rs @@ -96,9 +96,9 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, @@ -166,9 +166,9 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, @@ -422,9 +422,9 @@ fn propagate_forget_simple_subscription_to_peers(tables: &mut Tables, res: &Arc< let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, @@ -587,9 +587,9 @@ pub(super) fn undeclare_client_subscription( let wire_expr = Resource::get_best_key(res, "", face.id); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, @@ -623,9 +623,9 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: key_expr, @@ -650,9 +650,9 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: key_expr, @@ -790,9 +790,9 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: let wire_expr = Resource::get_best_key(res, "", dst_face.id); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber( UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) @@ -815,9 +815,9 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: }; dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: key_expr, diff --git a/zenoh/src/net/routing/hat/router/queries.rs b/zenoh/src/net/routing/hat/router/queries.rs index 90944a524f..dfffe42e0d 100644 --- a/zenoh/src/net/routing/hat/router/queries.rs +++ b/zenoh/src/net/routing/hat/router/queries.rs @@ -208,7 +208,7 @@ fn send_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { node_id: routing_context, @@ -258,9 +258,9 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, @@ -488,7 +488,7 @@ fn send_forget_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { node_id: routing_context, @@ -514,9 +514,9 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, @@ -785,9 +785,9 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, @@ -884,9 +884,9 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links let wire_expr = Resource::get_best_key(res, "", dst_face.id); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareQueryable( UndeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) @@ -908,9 +908,9 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: key_expr, diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index f6fb13e76e..227dd035f4 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -270,9 +270,9 @@ impl AdminSpace { zlock!(admin.primitives).replace(primitives.clone()); primitives.send_declare(Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: [&root_key, "/**"].concat().into(), @@ -284,13 +284,13 @@ impl AdminSpace { }); primitives.send_declare(Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) wire_expr: [&root_key, "/config/**"].concat().into(), - ext_info: SubscriberInfo::default(), + ext_info: SubscriberInfo::DEFAULT, }), }); } @@ -392,7 +392,7 @@ impl Primitives for AdminSpace { ); primitives.send_response_final(ResponseFinal { rid: msg.id, - ext_qos: ext::QoSType::response_final_default(), + ext_qos: ext::QoSType::RESPONSE_FINAL, ext_tstamp: None, }); return; @@ -405,7 +405,7 @@ impl Primitives for AdminSpace { log::error!("Unknown KeyExpr: {}", e); primitives.send_response_final(ResponseFinal { rid: msg.id, - ext_qos: ext::QoSType::response_final_default(), + ext_qos: ext::QoSType::RESPONSE_FINAL, ext_tstamp: None, }); return; diff --git a/zenoh/src/net/tests/tables.rs b/zenoh/src/net/tests/tables.rs index 363803f682..57f6a6dcbc 100644 --- a/zenoh/src/net/tests/tables.rs +++ b/zenoh/src/net/tests/tables.rs @@ -497,9 +497,9 @@ fn client_test() { Primitives::send_declare( primitives0.as_ref(), Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareKeyExpr(DeclareKeyExpr { id: 11, wire_expr: "test/client".into(), @@ -523,9 +523,9 @@ fn client_test() { Primitives::send_declare( primitives0.as_ref(), Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareKeyExpr(DeclareKeyExpr { id: 12, wire_expr: WireExpr::from(11).with_suffix("/z1_pub1"), @@ -544,9 +544,9 @@ fn client_test() { Primitives::send_declare( primitives1.as_ref(), Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareKeyExpr(DeclareKeyExpr { id: 21, wire_expr: "test/client".into(), @@ -570,9 +570,9 @@ fn client_test() { Primitives::send_declare( primitives1.as_ref(), Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareKeyExpr(DeclareKeyExpr { id: 22, wire_expr: WireExpr::from(21).with_suffix("/z2_pub1"), @@ -591,9 +591,9 @@ fn client_test() { Primitives::send_declare( primitives2.as_ref(), Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareKeyExpr(DeclareKeyExpr { id: 31, wire_expr: "test/client".into(), @@ -617,10 +617,10 @@ fn client_test() { &tables, &face0.upgrade().unwrap(), &"test/client/z1_wr1".into(), - ext::QoSType::default(), + ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -650,10 +650,10 @@ fn client_test() { &router.tables, &face0.upgrade().unwrap(), &WireExpr::from(11).with_suffix("/z1_wr2"), - ext::QoSType::default(), + ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -683,10 +683,10 @@ fn client_test() { &router.tables, &face1.upgrade().unwrap(), &"test/client/**".into(), - ext::QoSType::default(), + ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -716,10 +716,10 @@ fn client_test() { &router.tables, &face0.upgrade().unwrap(), &12.into(), - ext::QoSType::default(), + ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -749,10 +749,10 @@ fn client_test() { &router.tables, &face1.upgrade().unwrap(), &22.into(), - ext::QoSType::default(), + ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::default(), + encoding: Encoding::DEFAULT, ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/zenoh/src/prelude.rs b/zenoh/src/prelude.rs index 36a841d1ef..ad28470f63 100644 --- a/zenoh/src/prelude.rs +++ b/zenoh/src/prelude.rs @@ -50,9 +50,7 @@ pub(crate) mod common { pub use crate::sample::Locality; #[cfg(not(feature = "unstable"))] pub(crate) use crate::sample::Locality; - pub use crate::sample::Sample; - - pub use zenoh_protocol::core::SampleKind; + pub use crate::sample::{Sample, SampleKind}; pub use crate::publication::Priority; #[zenoh_macros::unstable] diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 843190ad45..58c7c5c367 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -811,7 +811,7 @@ fn resolve_put( false, ), ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, payload: match kind { SampleKind::Put => { #[allow(unused_mut)] @@ -887,6 +887,8 @@ pub enum Priority { } impl Priority { + /// Default + pub const DEFAULT: Self = Self::Data; /// The lowest Priority pub const MIN: Self = Self::Background; /// The highest Priority @@ -1328,7 +1330,6 @@ mod tests { #[test] fn sample_kind_integrity_in_publication() { use crate::{open, prelude::sync::*}; - use zenoh_protocol::core::SampleKind; const KEY_EXPR: &str = "test/sample_kind_integrity/publication"; const VALUE: &str = "zenoh"; @@ -1351,7 +1352,6 @@ mod tests { #[test] fn sample_kind_integrity_in_put_builder() { use crate::{open, prelude::sync::*}; - use zenoh_protocol::core::SampleKind; const KEY_EXPR: &str = "test/sample_kind_integrity/put_builder"; const VALUE: &str = "zenoh"; diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index c4f3fb35e9..7a7a867cd8 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -13,7 +13,6 @@ // //! Query primitives. - use crate::handlers::{locked, Callback, DefaultHandler}; use crate::prelude::*; #[zenoh_macros::unstable] @@ -23,13 +22,38 @@ use std::collections::HashMap; use std::future::Ready; use std::time::Duration; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; +use zenoh_protocol::zenoh::query::Consolidation; use zenoh_result::ZResult; /// The [`Queryable`](crate::queryable::Queryable)s that should be target of a [`get`](Session::get). -pub use zenoh_protocol::core::QueryTarget; +pub type QueryTarget = zenoh_protocol::network::request::ext::TargetType; /// The kind of consolidation. -pub use zenoh_protocol::core::ConsolidationMode; +#[derive(Debug, Clone, PartialEq, Eq, Copy)] +pub enum ConsolidationMode { + /// No consolidation applied: multiple samples may be received for the same key-timestamp. + None, + /// Monotonic consolidation immediately forwards samples, except if one with an equal or more recent timestamp + /// has already been sent with the same key. + /// + /// This optimizes latency while potentially reducing bandwidth. + /// + /// Note that this doesn't cause re-ordering, but drops the samples for which a more recent timestamp has already + /// been observed with the same key. + Monotonic, + /// Holds back samples to only send the set of samples that had the highest timestamp for their key. + Latest, +} + +impl From for Consolidation { + fn from(val: ConsolidationMode) -> Self { + match val { + ConsolidationMode::None => Consolidation::None, + ConsolidationMode::Monotonic => Consolidation::Monotonic, + ConsolidationMode::Latest => Consolidation::Latest, + } + } +} /// The operation: either manual or automatic. #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -45,6 +69,7 @@ pub struct QueryConsolidation { } impl QueryConsolidation { + pub const DEFAULT: Self = Self::AUTO; /// Automatic query consolidation strategy selection. pub const AUTO: Self = Self { mode: Mode::Auto }; @@ -72,7 +97,7 @@ impl From for QueryConsolidation { impl Default for QueryConsolidation { fn default() -> Self { - QueryConsolidation::AUTO + Self::DEFAULT } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 4e9f4914dd..d0ce99b512 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -56,7 +56,7 @@ impl Drop for QueryInner { fn drop(&mut self) { self.primitives.send_response_final(ResponseFinal { rid: self.qid, - ext_qos: response::ext::QoSType::response_final_default(), + ext_qos: response::ext::QoSType::RESPONSE_FINAL, ext_tstamp: None, }); } @@ -241,7 +241,7 @@ impl SyncResolve for ReplyBuilder<'_> { mapping: Mapping::Sender, }, payload: ResponseBody::Reply(zenoh::Reply { - consolidation: zenoh::Consolidation::default(), + consolidation: zenoh::Consolidation::DEFAULT, ext_unknown: vec![], payload: match kind { SampleKind::Put => ReplyBody::Put(Put { @@ -262,7 +262,7 @@ impl SyncResolve for ReplyBuilder<'_> { }), }, }), - ext_qos: response::ext::QoSType::response_default(), + ext_qos: response::ext::QoSType::RESPONSE, ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { zid: self.query.inner.zid, @@ -292,7 +292,7 @@ impl SyncResolve for ReplyBuilder<'_> { }), code: 0, // TODO }), - ext_qos: response::ext::QoSType::response_default(), + ext_qos: response::ext::QoSType::RESPONSE, ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { zid: self.query.inner.zid, diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 5d707e5936..d41e8c83a1 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -14,13 +14,15 @@ //! Sample primitives use crate::buffers::ZBuf; -use crate::prelude::ZenohId; -use crate::prelude::{KeyExpr, SampleKind, Value}; +use crate::prelude::{KeyExpr, Value, ZenohId}; use crate::query::Reply; use crate::time::{new_reception_timestamp, Timestamp}; #[zenoh_macros::unstable] use serde::Serialize; -use std::convert::{TryFrom, TryInto}; +use std::{ + convert::{TryFrom, TryInto}, + fmt, +}; use zenoh_protocol::core::Encoding; pub type SourceSn = u64; @@ -311,6 +313,38 @@ mod attachment { } } } + +/// The kind of a `Sample`. +#[repr(u8)] +#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] +pub enum SampleKind { + /// if the `Sample` was issued by a `put` operation. + #[default] + Put = 0, + /// if the `Sample` was issued by a `delete` operation. + Delete = 1, +} + +impl fmt::Display for SampleKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + SampleKind::Put => write!(f, "PUT"), + SampleKind::Delete => write!(f, "DELETE"), + } + } +} + +impl TryFrom for SampleKind { + type Error = u64; + fn try_from(kind: u64) -> Result { + match kind { + 0 => Ok(SampleKind::Put), + 1 => Ok(SampleKind::Delete), + _ => Err(kind), + } + } +} + #[zenoh_macros::unstable] pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 46cfd5e499..329e44e43f 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -296,7 +296,7 @@ impl<'s, 'a> SessionDeclarations<'s, 'a> for SessionRef<'a> { SubscriberBuilder { session: self.clone(), key_expr: TryIntoKeyExpr::try_into(key_expr).map_err(Into::into), - reliability: Reliability::default(), + reliability: Reliability::DEFAULT, mode: PushMode, origin: Locality::default(), handler: DefaultHandler, @@ -329,8 +329,8 @@ impl<'s, 'a> SessionDeclarations<'s, 'a> for SessionRef<'a> { PublisherBuilder { session: self.clone(), key_expr: key_expr.try_into().map_err(Into::into), - congestion_control: CongestionControl::default(), - priority: Priority::default(), + congestion_control: CongestionControl::DEFAULT, + priority: Priority::DEFAULT, destination: Locality::default(), } } @@ -775,8 +775,8 @@ impl Session { session: self, selector, scope: Ok(None), - target: QueryTarget::default(), - consolidation: QueryConsolidation::default(), + target: QueryTarget::DEFAULT, + consolidation: QueryConsolidation::DEFAULT, destination: Locality::default(), timeout, value: None, @@ -858,9 +858,9 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareKeyExpr(DeclareKeyExpr { id: expr_id, wire_expr: WireExpr { @@ -1059,9 +1059,9 @@ impl Session { // }; primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: id as u32, wire_expr: key_expr.to_wire(self).to_owned(), @@ -1124,9 +1124,9 @@ impl Session { let wire_expr = WireExpr::from(join_sub).to_owned(); drop(state); primitives.send_declare(Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { wire_expr }, @@ -1149,9 +1149,9 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { @@ -1205,9 +1205,9 @@ impl Session { distance: 0, }; primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: id as u32, wire_expr: key_expr.to_owned(), @@ -1233,9 +1233,9 @@ impl Session { distance: 0, }; primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: id as u32, wire_expr: key_expr.to_owned(), @@ -1298,9 +1298,9 @@ impl Session { distance: 0, }; primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: qable_state.key_expr.clone(), @@ -1317,9 +1317,9 @@ impl Session { distance: 0, }; primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) wire_expr: qable_state.key_expr.clone(), @@ -1333,9 +1333,9 @@ impl Session { // There are no more Queryables on the same KeyExpr. drop(state); primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareQueryable(UndeclareQueryable { id: 0, // @TODO use proper QueryableId (#703) ext_wire_expr: WireExprType { @@ -1369,13 +1369,13 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::declare_default(), + ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::default(), + ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { id: id as u32, wire_expr: key_expr.to_wire(self).to_owned(), - ext_info: SubscriberInfo::default(), + ext_info: SubscriberInfo::DEFAULT, }), }); Ok(tok_state) @@ -1393,9 +1393,9 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - ext_qos: ext::QoSType::declare_default(), + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { id: 0, // @TODO use proper SubscriberId (#703) ext_wire_expr: WireExprType { @@ -1698,10 +1698,10 @@ impl Session { primitives.send_request(Request { id: 0, // @TODO compute a proper request ID wire_expr: key_expr.to_wire(self).to_owned(), - ext_qos: ext::QoSType::request_default(), + ext_qos: ext::QoSType::REQUEST, ext_tstamp: None, - ext_nodeid: ext::NodeIdType::default(), - ext_target: request::ext::TargetType::default(), + ext_nodeid: ext::NodeIdType::DEFAULT, + ext_target: request::ext::TargetType::DEFAULT, ext_budget: None, ext_timeout: None, payload: RequestBody::Pull(Pull { @@ -1801,9 +1801,9 @@ impl Session { primitives.send_request(Request { id: qid, wire_expr: wexpr.clone(), - ext_qos: request::ext::QoSType::request_default(), + ext_qos: request::ext::QoSType::REQUEST, ext_tstamp: None, - ext_nodeid: request::ext::NodeIdType::default(), + ext_nodeid: request::ext::NodeIdType::DEFAULT, ext_target: target, ext_budget: None, ext_timeout: Some(timeout), @@ -1959,7 +1959,7 @@ impl<'s> SessionDeclarations<'s, 'static> for Arc { SubscriberBuilder { session: SessionRef::Shared(self.clone()), key_expr: key_expr.try_into().map_err(Into::into), - reliability: Reliability::default(), + reliability: Reliability::DEFAULT, mode: PushMode, origin: Locality::default(), handler: DefaultHandler, @@ -2040,8 +2040,8 @@ impl<'s> SessionDeclarations<'s, 'static> for Arc { PublisherBuilder { session: SessionRef::Shared(self.clone()), key_expr: key_expr.try_into().map_err(Into::into), - congestion_control: CongestionControl::default(), - priority: Priority::default(), + congestion_control: CongestionControl::DEFAULT, + priority: Priority::DEFAULT, destination: Locality::default(), } } @@ -2247,11 +2247,6 @@ impl Primitives for Session { fn send_response(&self, msg: Response) { trace!("recv Response {:?}", msg); match msg.payload { - ResponseBody::Ack(_) => { - log::warn!( - "Received a ResponseBody::Ack, but this isn't supported yet. Dropping message." - ) - } ResponseBody::Put(_) => { log::warn!( "Received a ResponseBody::Put, but this isn't supported yet. Dropping message." diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index 7258833d28..fe2236076f 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -25,9 +25,6 @@ use std::sync::Arc; use zenoh_core::{AsyncResolve, Resolvable, Resolve, SyncResolve}; use zenoh_protocol::network::declare::{subscriber::ext::SubscriberInfo, Mode}; -/// The subscription mode. -pub use zenoh_protocol::core::SubMode; - /// The kind of reliability. pub use zenoh_protocol::core::Reliability; @@ -117,7 +114,6 @@ impl<'a> PullSubscriberInner<'a> { /// ``` /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; - /// use zenoh::subscriber::SubMode; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session @@ -252,12 +248,6 @@ impl Drop for SubscriberInner<'_> { #[derive(Debug, Clone, Copy)] pub struct PullMode; -impl From for SubMode { - fn from(_: PullMode) -> Self { - SubMode::Pull - } -} - impl From for Mode { fn from(_: PullMode) -> Self { Mode::Pull @@ -269,12 +259,6 @@ impl From for Mode { #[derive(Debug, Clone, Copy)] pub struct PushMode; -impl From for SubMode { - fn from(_: PushMode) -> Self { - SubMode::Push - } -} - impl From for Mode { fn from(_: PushMode) -> Self { Mode::Push @@ -712,7 +696,6 @@ impl<'a, Receiver> PullSubscriber<'a, Receiver> { /// ``` /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; - /// use zenoh::subscriber::SubMode; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session From cc8d4a1f93f358ef3a951e0ae0fe27c5b3e41171 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Mon, 26 Feb 2024 12:30:09 +0100 Subject: [PATCH 003/124] Allow DeclareInterest for any keyexpr (#739) * Allow to DeclareInterest for any keyexpr * Remove forgotten println --- commons/zenoh-codec/src/network/declare.rs | 41 +-- commons/zenoh-protocol/src/network/declare.rs | 283 ++++++++++++++---- zenoh/src/net/routing/mod.rs | 2 +- 3 files changed, 249 insertions(+), 77 deletions(-) diff --git a/commons/zenoh-codec/src/network/declare.rs b/commons/zenoh-codec/src/network/declare.rs index cf92b27c17..6df25a8d2a 100644 --- a/commons/zenoh-codec/src/network/declare.rs +++ b/commons/zenoh-codec/src/network/declare.rs @@ -24,6 +24,7 @@ use zenoh_protocol::{ network::{ declare::{ self, common, interest, keyexpr, queryable, subscriber, token, Declare, DeclareBody, + Interest, }, id, Mapping, }, @@ -845,24 +846,20 @@ where fn write(self, writer: &mut W, x: &interest::DeclareInterest) -> Self::Output { let interest::DeclareInterest { id, + interest: _, wire_expr, - interest, } = x; // Header - let mut header = declare::id::D_INTEREST; - if wire_expr.mapping != Mapping::DEFAULT { - header |= subscriber::flag::M; - } - if wire_expr.has_suffix() { - header |= subscriber::flag::N; - } + let header = declare::id::D_INTEREST | x.flags(); self.write(&mut *writer, header)?; // Body self.write(&mut *writer, id)?; - self.write(&mut *writer, wire_expr)?; - self.write(&mut *writer, interest.as_u8())?; + self.write(&mut *writer, x.options())?; + if let Some(we) = wire_expr.as_ref() { + self.write(&mut *writer, we)?; + } Ok(()) } @@ -894,14 +891,20 @@ where // Body let id: interest::InterestId = self.codec.read(&mut *reader)?; - let ccond = Zenoh080Condition::new(imsg::has_flag(self.header, token::flag::N)); - let mut wire_expr: WireExpr<'static> = ccond.read(&mut *reader)?; - wire_expr.mapping = if imsg::has_flag(self.header, token::flag::M) { - Mapping::Sender - } else { - Mapping::Receiver - }; - let interest: u8 = self.codec.read(&mut *reader)?; + let options: u8 = self.codec.read(&mut *reader)?; + let interest = Interest::from((imsg::flags(self.header), options)); + + let mut wire_expr = None; + if interest.restricted() { + let ccond = Zenoh080Condition::new(interest.named()); + let mut we: WireExpr<'static> = ccond.read(&mut *reader)?; + we.mapping = if interest.mapping() { + Mapping::Sender + } else { + Mapping::Receiver + }; + wire_expr = Some(we); + } // Extensions let has_ext = imsg::has_flag(self.header, token::flag::Z); @@ -911,8 +914,8 @@ where Ok(interest::DeclareInterest { id, + interest, wire_expr, - interest: interest.into(), }) } } diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 1568029cc6..8164d9440d 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -18,7 +18,6 @@ use crate::{ zextz64, zextzbuf, }; use alloc::borrow::Cow; -use core::ops::BitOr; pub use interest::*; pub use keyexpr::*; pub use queryable::*; @@ -703,13 +702,18 @@ pub mod token { } pub mod interest { + use core::{ + fmt::{self, Debug}, + ops::{Add, AddAssign, Sub, SubAssign}, + }; + use super::*; pub type InterestId = u32; pub mod flag { - pub const N: u8 = 1 << 5; // 0x20 Named if N==1 then the key expr has name/suffix - pub const M: u8 = 1 << 6; // 0x40 Mapping if M==1 then key expr mapping is the one declared by the sender, else it is the one declared by the receiver + pub const C: u8 = 1 << 5; // 0x20 Current if C==1 then the interest refers to the current declarations. + pub const F: u8 = 1 << 6; // 0x40 Future if F==1 then the interest refers to the future declarations. pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow } @@ -753,21 +757,23 @@ pub mod interest { /// /// ```text /// Flags: - /// - N: Named If N==1 then the key expr has name/suffix - /// - M: Mapping if M==1 then key expr mapping is the one declared by the sender, else it is the one declared by the receiver + /// - C: Current if C==1 then the interest refers to the current declarations. + /// - F: Future if F==1 then the interest refers to the future declarations. Note that if F==0 then: + /// - Declarations SHOULD NOT be sent after the FinalInterest; + /// - UndeclareInterest SHOULD NOT be sent after the FinalInterest. /// - Z: Extension If Z==1 then at least one extension is present /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ - /// |Z|M|N| D_INT | + /// |Z|F|C| D_INT | /// +---------------+ /// ~ intst_id:z32 ~ /// +---------------+ - /// ~ key_scope:z16 ~ + /// |A|M|N|R|T|Q|S|K| (*) /// +---------------+ - /// ~ key_suffix ~ if N==1 -- + /// ~ key_scope:z16 ~ if R==1 /// +---------------+ - /// |A|F|C|X|T|Q|S|K| (*) + /// ~ key_suffix ~ if R==1 && N==1 -- /// +---------------+ /// ~ [decl_exts] ~ if Z==1 /// +---------------+ @@ -776,63 +782,141 @@ pub mod interest { /// - if S==1 then the interest refers to subscribers /// - if Q==1 then the interest refers to queryables /// - if T==1 then the interest refers to tokens - /// - if C==1 then the interest refers to the current declarations. - /// - if F==1 then the interest refers to the future declarations. Note that if F==0 then: - /// - replies SHOULD NOT be sent after the FinalInterest; - /// - UndeclareInterest SHOULD NOT be sent after the FinalInterest. + /// - if R==1 then the interest is restricted to the matching key expression, else it is for all key expressions. + /// - if N==1 then the key expr has name/suffix. If R==0 then N should be set to 0. + /// - if M==1 then key expr mapping is the one declared by the sender, else it is the one declared by the receiver. + /// If R==0 then M should be set to 0. /// - if A==1 then the replies SHOULD be aggregated /// ``` #[derive(Debug, Clone, PartialEq, Eq)] pub struct DeclareInterest { pub id: InterestId, - pub wire_expr: WireExpr<'static>, pub interest: Interest, + pub wire_expr: Option>, } - #[repr(transparent)] - #[derive(Debug, Clone, PartialEq, Eq)] - pub struct Interest(u8); + impl DeclareInterest { + pub fn flags(&self) -> u8 { + let mut interest = self.interest; + if self.interest.current() { + interest += Interest::CURRENT; + } + if self.interest.future() { + interest += Interest::FUTURE; + } + interest.flags + } + + pub fn options(&self) -> u8 { + let mut interest = self.interest; + if let Some(we) = self.wire_expr.as_ref() { + interest += Interest::RESTRICTED; + if we.has_suffix() { + interest += Interest::NAMED; + } + if let Mapping::Sender = we.mapping { + interest += Interest::MAPPING; + } + } + interest.options + } + + #[cfg(feature = "test")] + pub fn rand() -> Self { + use rand::Rng; + let mut rng = rand::thread_rng(); + + let id: InterestId = rng.gen(); + let wire_expr = rng.gen_bool(0.5).then_some(WireExpr::rand()); + let interest = Interest::rand(); + + Self { + id, + wire_expr, + interest, + } + } + } + + #[derive(Clone, Copy)] + pub struct Interest { + flags: u8, + options: u8, + } impl Interest { - pub const KEYEXPRS: Interest = Interest(1); - pub const SUBSCRIBERS: Interest = Interest(1 << 1); - pub const QUERYABLES: Interest = Interest(1 << 2); - pub const TOKENS: Interest = Interest(1 << 3); - // pub const X: Interest = Interest(1 << 4); - pub const CURRENT: Interest = Interest(1 << 5); - pub const FUTURE: Interest = Interest(1 << 6); - pub const AGGREGATE: Interest = Interest(1 << 7); + // Header + pub const CURRENT: Interest = Interest::flags(interest::flag::C); + pub const FUTURE: Interest = Interest::flags(interest::flag::F); + // Flags + pub const KEYEXPRS: Interest = Interest::options(1); + pub const SUBSCRIBERS: Interest = Interest::options(1 << 1); + pub const QUERYABLES: Interest = Interest::options(1 << 2); + pub const TOKENS: Interest = Interest::options(1 << 3); + const RESTRICTED: Interest = Interest::options(1 << 4); + const NAMED: Interest = Interest::options(1 << 5); + const MAPPING: Interest = Interest::options(1 << 6); + pub const AGGREGATE: Interest = Interest::options(1 << 7); + pub const ALL: Interest = Interest::options( + Interest::KEYEXPRS.options + | Interest::SUBSCRIBERS.options + | Interest::QUERYABLES.options + | Interest::TOKENS.options, + ); + + const fn flags(flags: u8) -> Self { + Self { flags, options: 0 } + } + + const fn options(options: u8) -> Self { + Self { flags: 0, options } + } + + pub const fn empty() -> Self { + Self { + flags: 0, + options: 0, + } + } + + pub const fn current(&self) -> bool { + imsg::has_flag(self.flags, Self::CURRENT.flags) + } + + pub const fn future(&self) -> bool { + imsg::has_flag(self.flags, Self::FUTURE.flags) + } pub const fn keyexprs(&self) -> bool { - imsg::has_flag(self.0, Self::KEYEXPRS.0) + imsg::has_flag(self.options, Self::KEYEXPRS.options) } pub const fn subscribers(&self) -> bool { - imsg::has_flag(self.0, Self::SUBSCRIBERS.0) + imsg::has_flag(self.options, Self::SUBSCRIBERS.options) } pub const fn queryables(&self) -> bool { - imsg::has_flag(self.0, Self::QUERYABLES.0) + imsg::has_flag(self.options, Self::QUERYABLES.options) } pub const fn tokens(&self) -> bool { - imsg::has_flag(self.0, Self::TOKENS.0) + imsg::has_flag(self.options, Self::TOKENS.options) } - pub const fn current(&self) -> bool { - imsg::has_flag(self.0, Self::CURRENT.0) + pub const fn restricted(&self) -> bool { + imsg::has_flag(self.options, Self::RESTRICTED.options) } - pub const fn future(&self) -> bool { - imsg::has_flag(self.0, Self::FUTURE.0) + pub const fn named(&self) -> bool { + imsg::has_flag(self.options, Self::NAMED.options) } - pub const fn aggregate(&self) -> bool { - imsg::has_flag(self.0, Self::AGGREGATE.0) + pub const fn mapping(&self) -> bool { + imsg::has_flag(self.options, Self::MAPPING.options) } - pub const fn as_u8(&self) -> u8 { - self.0 + pub const fn aggregate(&self) -> bool { + imsg::has_flag(self.options, Self::AGGREGATE.options) } #[cfg(feature = "test")] @@ -840,44 +924,129 @@ pub mod interest { use rand::Rng; let mut rng = rand::thread_rng(); - let inner: u8 = rng.gen(); + let mut s = Self::empty(); + if rng.gen_bool(0.5) { + s += Interest::CURRENT; + } + if rng.gen_bool(0.5) { + s += Interest::FUTURE; + } + if rng.gen_bool(0.5) { + s += Interest::KEYEXPRS; + } + if rng.gen_bool(0.5) { + s += Interest::SUBSCRIBERS; + } + if rng.gen_bool(0.5) { + s += Interest::TOKENS; + } + if rng.gen_bool(0.5) { + s += Interest::AGGREGATE; + } + s + } + } - Self(inner) + impl PartialEq for Interest { + fn eq(&self, other: &Self) -> bool { + self.current() == other.current() + && self.future() == other.future() + && self.keyexprs() == other.keyexprs() + && self.subscribers() == other.subscribers() + && self.queryables() == other.queryables() + && self.tokens() == other.tokens() + && self.aggregate() == other.aggregate() } } - impl BitOr for Interest { + impl Debug for Interest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Interest {{ ")?; + if self.current() { + write!(f, "C:Y, ")?; + } else { + write!(f, "C:N, ")?; + } + if self.future() { + write!(f, "F:Y, ")?; + } else { + write!(f, "F:N, ")?; + } + if self.keyexprs() { + write!(f, "K:Y, ")?; + } else { + write!(f, "K:N, ")?; + } + if self.subscribers() { + write!(f, "S:Y, ")?; + } else { + write!(f, "S:N, ")?; + } + if self.queryables() { + write!(f, "Q:Y, ")?; + } else { + write!(f, "Q:N, ")?; + } + if self.tokens() { + write!(f, "T:Y, ")?; + } else { + write!(f, "T:N, ")?; + } + if self.aggregate() { + write!(f, "A:Y")?; + } else { + write!(f, "A:N")?; + } + write!(f, " }}")?; + Ok(()) + } + } + + impl Eq for Interest {} + + impl Add for Interest { type Output = Self; - fn bitor(self, rhs: Self) -> Self::Output { - Self(self.0 | rhs.0) + fn add(self, rhs: Self) -> Self::Output { + Self { + flags: self.flags | rhs.flags, + options: self.options | rhs.options, + } } } - impl From for Interest { - fn from(v: u8) -> Self { - Self(v) + impl AddAssign for Interest { + fn add_assign(&mut self, rhs: Self) { + self.flags |= rhs.flags; + self.options |= rhs.options; } } - impl DeclareInterest { - #[cfg(feature = "test")] - pub fn rand() -> Self { - use rand::Rng; - let mut rng = rand::thread_rng(); - - let id: InterestId = rng.gen(); - let wire_expr = WireExpr::rand(); - let interest = Interest::rand(); + impl Sub for Interest { + type Output = Self; + fn sub(self, rhs: Self) -> Self::Output { Self { - id, - wire_expr, - interest, + flags: self.flags & !rhs.flags, + options: self.options & !rhs.options, } } } + impl SubAssign for Interest { + fn sub_assign(&mut self, rhs: Self) { + self.flags &= !rhs.flags; + self.options &= !rhs.options; + } + } + + impl From<(u8, u8)> for Interest { + fn from(value: (u8, u8)) -> Self { + let (flags, options) = value; + Self { flags, options } + } + } + /// ```text /// Flags: /// - X: Reserved diff --git a/zenoh/src/net/routing/mod.rs b/zenoh/src/net/routing/mod.rs index 0b069c1337..8147cca31c 100644 --- a/zenoh/src/net/routing/mod.rs +++ b/zenoh/src/net/routing/mod.rs @@ -115,7 +115,7 @@ impl RoutingContext { DeclareBody::UndeclareQueryable(m) => Some(&m.ext_wire_expr.wire_expr), DeclareBody::DeclareToken(m) => Some(&m.wire_expr), DeclareBody::UndeclareToken(m) => Some(&m.ext_wire_expr.wire_expr), - DeclareBody::DeclareInterest(m) => Some(&m.wire_expr), + DeclareBody::DeclareInterest(m) => m.wire_expr.as_ref(), DeclareBody::FinalInterest(_) => None, DeclareBody::UndeclareInterest(m) => Some(&m.ext_wire_expr.wire_expr), }, From 24e5ef573f3454f7bfea2eb86467b28113ffc6dc Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 28 Feb 2024 10:31:45 +0100 Subject: [PATCH 004/124] ConsolidationMode can be Auto (#738) * ConsolidationMode rework * Fix QueryConsolidation::DEFAULT --- commons/zenoh-codec/src/zenoh/query.rs | 2 - commons/zenoh-protocol/src/zenoh/query.rs | 16 +++----- zenoh/src/query.rs | 45 ++++------------------- zenoh/src/session.rs | 10 ++--- 4 files changed, 18 insertions(+), 55 deletions(-) diff --git a/commons/zenoh-codec/src/zenoh/query.rs b/commons/zenoh-codec/src/zenoh/query.rs index 55f25cd5ea..efac7b5671 100644 --- a/commons/zenoh-codec/src/zenoh/query.rs +++ b/commons/zenoh-codec/src/zenoh/query.rs @@ -39,7 +39,6 @@ where Consolidation::None => 1, Consolidation::Monotonic => 2, Consolidation::Latest => 3, - Consolidation::Unique => 4, }; self.write(&mut *writer, v) } @@ -58,7 +57,6 @@ where 1 => Consolidation::None, 2 => Consolidation::Monotonic, 3 => Consolidation::Latest, - 4 => Consolidation::Unique, _ => Consolidation::Auto, // Fallback on Auto if Consolidation is unknown }; Ok(c) diff --git a/commons/zenoh-protocol/src/zenoh/query.rs b/commons/zenoh-protocol/src/zenoh/query.rs index ac53b963f5..f1baaebe20 100644 --- a/commons/zenoh-protocol/src/zenoh/query.rs +++ b/commons/zenoh-protocol/src/zenoh/query.rs @@ -33,8 +33,8 @@ pub enum Consolidation { Monotonic, /// Holds back samples to only send the set of samples that had the highest timestamp for their key. Latest, - /// Remove the duplicates of any samples based on the their timestamp. - Unique, + // Remove the duplicates of any samples based on the their timestamp. + // Unique, } impl Consolidation { @@ -45,15 +45,9 @@ impl Consolidation { use rand::prelude::SliceRandom; let mut rng = rand::thread_rng(); - *[ - Self::None, - Self::Monotonic, - Self::Latest, - Self::Unique, - Self::Auto, - ] - .choose(&mut rng) - .unwrap() + *[Self::None, Self::Monotonic, Self::Latest, Self::Auto] + .choose(&mut rng) + .unwrap() } } diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index 7a7a867cd8..a848913c7a 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -22,38 +22,13 @@ use std::collections::HashMap; use std::future::Ready; use std::time::Duration; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; -use zenoh_protocol::zenoh::query::Consolidation; use zenoh_result::ZResult; /// The [`Queryable`](crate::queryable::Queryable)s that should be target of a [`get`](Session::get). pub type QueryTarget = zenoh_protocol::network::request::ext::TargetType; /// The kind of consolidation. -#[derive(Debug, Clone, PartialEq, Eq, Copy)] -pub enum ConsolidationMode { - /// No consolidation applied: multiple samples may be received for the same key-timestamp. - None, - /// Monotonic consolidation immediately forwards samples, except if one with an equal or more recent timestamp - /// has already been sent with the same key. - /// - /// This optimizes latency while potentially reducing bandwidth. - /// - /// Note that this doesn't cause re-ordering, but drops the samples for which a more recent timestamp has already - /// been observed with the same key. - Monotonic, - /// Holds back samples to only send the set of samples that had the highest timestamp for their key. - Latest, -} - -impl From for Consolidation { - fn from(val: ConsolidationMode) -> Self { - match val { - ConsolidationMode::None => Consolidation::None, - ConsolidationMode::Monotonic => Consolidation::Monotonic, - ConsolidationMode::Latest => Consolidation::Latest, - } - } -} +pub type ConsolidationMode = zenoh_protocol::zenoh::query::Consolidation; /// The operation: either manual or automatic. #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -65,30 +40,26 @@ pub enum Mode { /// The replies consolidation strategy to apply on replies to a [`get`](Session::get). #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct QueryConsolidation { - pub(crate) mode: Mode, + pub(crate) mode: ConsolidationMode, } impl QueryConsolidation { pub const DEFAULT: Self = Self::AUTO; /// Automatic query consolidation strategy selection. - pub const AUTO: Self = Self { mode: Mode::Auto }; + pub const AUTO: Self = Self { + mode: ConsolidationMode::Auto, + }; pub(crate) const fn from_mode(mode: ConsolidationMode) -> Self { - Self { - mode: Mode::Manual(mode), - } + Self { mode } } /// Returns the requested [`ConsolidationMode`]. - pub fn mode(&self) -> Mode { + pub fn mode(&self) -> ConsolidationMode { self.mode } } -impl From> for QueryConsolidation { - fn from(mode: Mode) -> Self { - Self { mode } - } -} + impl From for QueryConsolidation { fn from(mode: ConsolidationMode) -> Self { Self::from_mode(mode) diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 329e44e43f..efb7756ba4 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -1728,14 +1728,14 @@ impl Session { log::trace!("get({}, {:?}, {:?})", selector, target, consolidation); let mut state = zwrite!(self.state); let consolidation = match consolidation.mode { - Mode::Auto => { + ConsolidationMode::Auto => { if selector.decode().any(|(k, _)| k.as_ref() == TIME_RANGE_KEY) { ConsolidationMode::None } else { ConsolidationMode::Latest } } - Mode::Manual(mode) => mode, + mode => mode, }; let qid = state.qid_counter.fetch_add(1, Ordering::SeqCst); let nb_final = match destination { @@ -1808,7 +1808,7 @@ impl Session { ext_budget: None, ext_timeout: Some(timeout), payload: RequestBody::Query(zenoh_protocol::zenoh::Query { - consolidation: consolidation.into(), + consolidation, parameters: selector.parameters().to_string(), ext_sinfo: None, ext_body: value.as_ref().map(|v| query::ext::QueryBodyType { @@ -1829,7 +1829,7 @@ impl Session { selector.parameters(), qid, target, - consolidation.into(), + consolidation, value.as_ref().map(|v| query::ext::QueryBodyType { #[cfg(feature = "shared-memory")] ext_shm: None, @@ -2441,7 +2441,7 @@ impl Primitives for Session { } } } - ConsolidationMode::Latest => { + Consolidation::Auto | ConsolidationMode::Latest => { match query.replies.as_ref().unwrap().get( new_reply.sample.as_ref().unwrap().key_expr.as_keyexpr(), ) { From e41f768b2b32d0893839807d0c2208e96d71709a Mon Sep 17 00:00:00 2001 From: OlivierHecart Date: Wed, 28 Feb 2024 12:14:02 +0100 Subject: [PATCH 005/124] Fix bug building reply --- zenoh/src/queryable.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index d0ce99b512..c802c29689 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -214,8 +214,6 @@ impl SyncResolve for ReplyBuilder<'_> { let mut ext_attachment = None; #[cfg(feature = "unstable")] { - data_info.source_id = source_info.source_id; - data_info.source_sn = source_info.source_sn; if let Some(attachment) = attachment { ext_attachment = Some(attachment.into()); } @@ -224,6 +222,11 @@ impl SyncResolve for ReplyBuilder<'_> { }}; } + #[cfg(feature = "unstable")] + { + data_info.source_id = source_info.source_id; + data_info.source_sn = source_info.source_sn; + } let ext_sinfo = if data_info.source_id.is_some() || data_info.source_sn.is_some() { Some(zenoh::put::ext::SourceInfoType { zid: data_info.source_id.unwrap_or_default(), From a8cdbbe802b0c307961c5e731d27cb5cf835e0f2 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 12 Mar 2024 09:39:25 +0100 Subject: [PATCH 006/124] Revised Encoding API and wire format (#764) * Remove KnownEncoding enum and replace it with consts * Fix no_std * Encoding encoder (#746) * Encoding contains a mapping * Add forgotten file * Provide default encoder * Refine encoder * Fix encoding codec * Do not change the protocol representation * Accept Cow<'static, str> in EncodingMapping trait * Improve Value::Display * Fix doctests * Bump EncodingPrefix to u16. Add IANA encoding mapping. * Improve doc * Remove generic from Encoding::starts_with * Remove Display impl for Encoding * Improve doc * Improve doc * Improve encoding parsing * Improve comments * Improve doc * Encoding suffix bitflag * Encoder/Decoder traits take self * Rename encoding() to with_encoding() * Make Value, ZBuf, SingleOrVec empty() const * Derive Encoder for &mut u* and i* * Integers are encoded as le_bytes are not as string * Integers are encoded as le_bytes are not as string * Fix doctest * Refine default encoding mapping * IANA mapping starts from 1024 * Move IANA encoding to zneoh-ext * Improve docs * Improve DefaultEncoding * Add From for ZBuf * Remove Value and Sample Display trait impl * Encoder/Decoder operate on ZBuf * Payload type. Put takes Into. * Flat sample Value to Payload and Encoding fields * Add payload.rs * Polish up Publication * Add serde_cbor::Value as supported DefaultSerializer supported types * Add serde_pickle::Value as supported DefaultSerializer supported types * Add serde_yaml::Value as supported DefaultSerializer supported types * Impl TryFrom for Payload * Remove encoding folder * Polish up Value and Encoding * Fix doctest * Fix some erroneous prelude usage * Fix wrong typedef in publication * Encoding Id and Schema * Encoding Id and Schema * Fix encoding w_len * Wrapper type for Encoding * Add forgotten file * Expand Encoding consts and add doc * Polish doc * Polishing up Payload * Add EncodingMapping trait * Improve docs * Add deserialize in examples * Use deserialize in examples * Remove encoding from zenoh-ext * Add repr(transparent) to Payload * Improve encoding doc --- Cargo.lock | 140 ++- Cargo.toml | 7 +- commons/zenoh-buffers/src/lib.rs | 3 +- commons/zenoh-buffers/src/zbuf.rs | 9 +- commons/zenoh-buffers/src/zslice.rs | 31 +- commons/zenoh-codec/benches/codec.rs | 14 +- commons/zenoh-codec/src/core/encoding.rs | 44 +- commons/zenoh-codec/src/zenoh/put.rs | 6 +- .../zenoh-collections/src/single_or_vec.rs | 10 +- commons/zenoh-protocol/src/core/cowstr.rs | 2 +- commons/zenoh-protocol/src/core/encoding.rs | 292 +----- commons/zenoh-protocol/src/core/mod.rs | 4 +- commons/zenoh-protocol/src/zenoh/mod.rs | 4 +- examples/examples/z_get.rs | 25 +- examples/examples/z_get_liveliness.rs | 9 +- examples/examples/z_ping.rs | 2 +- examples/examples/z_pong.rs | 2 +- examples/examples/z_pub_thr.rs | 2 +- examples/examples/z_pull.rs | 6 +- examples/examples/z_queryable.rs | 5 +- examples/examples/z_storage.rs | 4 +- examples/examples/z_sub.rs | 5 +- io/zenoh-transport/src/common/batch.rs | 4 +- io/zenoh-transport/src/common/pipeline.rs | 6 +- io/zenoh-transport/src/multicast/link.rs | 2 +- io/zenoh-transport/src/unicast/link.rs | 2 +- .../src/unicast/lowlatency/link.rs | 4 +- .../tests/multicast_compression.rs | 2 +- .../tests/multicast_transport.rs | 2 +- .../tests/unicast_compression.rs | 2 +- .../tests/unicast_concurrent.rs | 4 +- .../tests/unicast_defragmentation.rs | 2 +- .../tests/unicast_intermittent.rs | 2 +- .../tests/unicast_priorities.rs | 2 +- io/zenoh-transport/tests/unicast_shm.rs | 4 +- .../tests/unicast_simultaneous.rs | 2 +- io/zenoh-transport/tests/unicast_transport.rs | 2 +- plugins/zenoh-plugin-example/src/lib.rs | 3 +- .../zenoh-plugin-rest/examples/z_serve_sse.rs | 6 +- plugins/zenoh-plugin-rest/src/lib.rs | 71 +- .../src/replica/align_queryable.rs | 15 +- .../src/replica/aligner.rs | 29 +- .../src/replica/mod.rs | 5 +- .../src/replica/storage.rs | 43 +- .../tests/operations.rs | 7 +- .../tests/wildcard.rs | 11 +- zenoh-ext/Cargo.toml | 3 + zenoh-ext/examples/z_query_sub.rs | 8 +- zenoh-ext/src/group.rs | 4 +- zenoh-ext/src/lib.rs | 13 + zenoh-ext/src/querying_subscriber.rs | 48 +- zenoh-ext/src/subscriber_ext.rs | 12 +- zenoh/Cargo.toml | 4 + zenoh/src/admin.rs | 26 +- zenoh/src/encoding.rs | 850 ++++++++++++++++++ zenoh/src/lib.rs | 25 +- zenoh/src/liveliness.rs | 13 +- zenoh/src/net/routing/dispatcher/queries.rs | 2 +- zenoh/src/net/runtime/adminspace.rs | 87 +- zenoh/src/net/tests/tables.rs | 10 +- zenoh/src/payload.rs | 673 ++++++++++++++ zenoh/src/prelude.rs | 20 +- zenoh/src/publication.rs | 110 ++- zenoh/src/queryable.rs | 19 +- zenoh/src/sample.rs | 115 +-- zenoh/src/session.rs | 53 +- zenoh/src/subscriber.rs | 18 +- zenoh/src/value.rs | 696 +------------- zenoh/tests/attachments.rs | 2 +- zenoh/tests/routing.rs | 8 +- zenoh/tests/session.rs | 6 +- zenoh/tests/unicity.rs | 6 +- 72 files changed, 2255 insertions(+), 1434 deletions(-) create mode 100644 zenoh/src/encoding.rs create mode 100644 zenoh/src/payload.rs diff --git a/Cargo.lock b/Cargo.lock index 1d5fab2365..53f2600071 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -446,7 +446,7 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -739,7 +739,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -1025,7 +1025,7 @@ checksum = "d150dea618e920167e5973d70ae6ece4385b7164e0d799fe7c122dd0a5d912ad" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -1337,7 +1337,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -1703,6 +1703,12 @@ dependencies = [ "nom", ] +[[package]] +name = "iter-read" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c397ca3ea05ad509c4ec451fea28b4771236a376ca1c69fd5143aae0cf8f93c4" + [[package]] name = "itertools" version = "0.10.5" @@ -2110,9 +2116,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" dependencies = [ "autocfg", "libm", @@ -2267,7 +2273,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -2291,6 +2297,48 @@ dependencies = [ "indexmap", ] +[[package]] +name = "phf" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +dependencies = [ + "phf_macros", + "phf_shared", +] + +[[package]] +name = "phf_generator" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +dependencies = [ + "phf_shared", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "phf_shared" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +dependencies = [ + "siphasher", +] + [[package]] name = "pin-project" version = "1.1.3" @@ -2308,7 +2356,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -2451,9 +2499,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.67" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] @@ -2508,9 +2556,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.33" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -3005,22 +3053,45 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.188" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] +[[package]] +name = "serde-pickle" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c762ad136a26407c6a80825813600ceeab5e613660d93d79a41f0ec877171e71" +dependencies = [ + "byteorder", + "iter-read", + "num-bigint", + "num-traits", + "serde", +] + +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half", + "serde", +] + [[package]] name = "serde_derive" -version = "1.0.188" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -3045,9 +3116,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.107" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", @@ -3227,6 +3298,12 @@ dependencies = [ "event-listener 2.5.3", ] +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + [[package]] name = "slab" version = "0.4.9" @@ -3456,9 +3533,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.33" +version = "2.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9caece70c63bfba29ec2fed841a09851b14a235c60010fa4de58089b6c025668" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" dependencies = [ "proc-macro2", "quote", @@ -3491,7 +3568,7 @@ checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -3641,7 +3718,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -3690,7 +3767,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] @@ -3995,7 +4072,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", "wasm-bindgen-shared", ] @@ -4029,7 +4106,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4325,11 +4402,15 @@ dependencies = [ "ordered-float", "paste", "petgraph", + "phf", "rand 0.8.5", "regex", "rustc_version 0.4.0", "serde", + "serde-pickle", + "serde_cbor", "serde_json", + "serde_yaml", "socket2 0.5.4", "stop-token", "uhlc", @@ -4467,7 +4548,10 @@ dependencies = [ "flume", "futures", "log", + "phf", "serde", + "serde_cbor", + "serde_json", "zenoh", "zenoh-core", "zenoh-macros", @@ -4695,7 +4779,7 @@ version = "0.11.0-dev" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", "zenoh-keyexpr", ] @@ -4939,7 +5023,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.33", + "syn 2.0.52", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d7210ebc0e..9830b56490 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -110,11 +110,12 @@ libloading = "0.8" log = "0.4.17" lz4_flex = "0.11" nix = { version = "0.27", features = ["fs"] } -num_cpus = "1.15.0" +num_cpus = "1.16.0" ordered-float = "4.1.1" panic-message = "0.3.0" paste = "1.0.12" petgraph = "0.6.3" +phf = { version = "0.11.2", features = ["macros"] } pnet = "0.34" pnet_datalink = "0.34" proc-macro2 = "1.0.51" @@ -136,7 +137,9 @@ secrecy = { version = "0.8.0", features = ["serde", "alloc"] } serde = { version = "1.0.154", default-features = false, features = [ "derive", ] } # Default features are disabled due to usage in no_std crates -serde_json = "1.0.94" +serde_cbor = "0.11.2" +serde_json = "1.0.114" +serde-pickle = "1.1.1" serde_yaml = "0.9.19" sha3 = "0.10.6" shared_memory = "0.12.4" diff --git a/commons/zenoh-buffers/src/lib.rs b/commons/zenoh-buffers/src/lib.rs index 4dee599ea7..eae7f1715c 100644 --- a/commons/zenoh-buffers/src/lib.rs +++ b/commons/zenoh-buffers/src/lib.rs @@ -101,7 +101,8 @@ pub mod buffer { let mut slices = self.slices(); match slices.len() { 0 => Cow::Borrowed(b""), - 1 => Cow::Borrowed(slices.next().unwrap()), + // SAFETY: it's safe to use unwrap_unchecked() beacuse we are explicitly checking the length is 1. + 1 => Cow::Borrowed(unsafe { slices.next().unwrap_unchecked() }), _ => Cow::Owned(slices.fold(Vec::new(), |mut acc, it| { acc.extend(it); acc diff --git a/commons/zenoh-buffers/src/zbuf.rs b/commons/zenoh-buffers/src/zbuf.rs index 1365397966..fd86f454af 100644 --- a/commons/zenoh-buffers/src/zbuf.rs +++ b/commons/zenoh-buffers/src/zbuf.rs @@ -34,8 +34,10 @@ pub struct ZBuf { impl ZBuf { #[must_use] - pub fn empty() -> Self { - Self::default() + pub const fn empty() -> Self { + Self { + slices: SingleOrVec::empty(), + } } pub fn clear(&mut self) { @@ -72,6 +74,7 @@ impl ZBuf { } self.insert(start, replacement); } + fn remove(&mut self, mut start: usize, mut end: usize) { assert!(start <= end); assert!(end <= self.len()); @@ -100,6 +103,7 @@ impl ZBuf { let drain_end = end_slice_idx + (end_slice.start >= end_slice.end) as usize; self.slices.drain(drain_start..drain_end); } + fn insert(&mut self, mut at: usize, slice: &[u8]) { if slice.is_empty() { return; @@ -206,6 +210,7 @@ where zbuf } } + // Reader #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub struct ZBufPos { diff --git a/commons/zenoh-buffers/src/zslice.rs b/commons/zenoh-buffers/src/zslice.rs index e53e6f3334..c15cbc6828 100644 --- a/commons/zenoh-buffers/src/zslice.rs +++ b/commons/zenoh-buffers/src/zslice.rs @@ -92,24 +92,41 @@ pub struct ZSlice { } impl ZSlice { + #[deprecated(note = "use `new` instead")] pub fn make( buf: Arc, start: usize, end: usize, + ) -> Result> { + Self::new(buf, start, end) + } + + pub fn new( + buf: Arc, + start: usize, + end: usize, ) -> Result> { if start <= end && end <= buf.as_slice().len() { - Ok(ZSlice { - buf, - start, - end, - #[cfg(feature = "shared-memory")] - kind: ZSliceKind::Raw, - }) + // unsafe: this operation is safe because we just checked the slice boundaries + Ok(unsafe { ZSlice::new_unchecked(buf, start, end) }) } else { Err(buf) } } + /// # Safety + /// This function does not verify wether the `start` and `end` indexes are within the buffer boundaries. + /// If a [`ZSlice`] is built via this constructor, a later access may panic if `start` and `end` indexes are out-of-bound. + pub unsafe fn new_unchecked(buf: Arc, start: usize, end: usize) -> Self { + ZSlice { + buf, + start, + end, + #[cfg(feature = "shared-memory")] + kind: ZSliceKind::Raw, + } + } + #[inline] #[must_use] pub fn downcast_ref(&self) -> Option<&T> diff --git a/commons/zenoh-codec/benches/codec.rs b/commons/zenoh-codec/benches/codec.rs index 34c9313a7f..d897038f91 100644 --- a/commons/zenoh-codec/benches/codec.rs +++ b/commons/zenoh-codec/benches/codec.rs @@ -87,7 +87,7 @@ fn criterion_benchmark(c: &mut Criterion) { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -133,7 +133,7 @@ fn criterion_benchmark(c: &mut Criterion) { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -174,7 +174,7 @@ fn criterion_benchmark(c: &mut Criterion) { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -215,7 +215,7 @@ fn criterion_benchmark(c: &mut Criterion) { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -243,7 +243,7 @@ fn criterion_benchmark(c: &mut Criterion) { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -282,7 +282,7 @@ fn criterion_benchmark(c: &mut Criterion) { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -305,7 +305,7 @@ fn criterion_benchmark(c: &mut Criterion) { let mut idx = 0; while idx < zslice.len() { let len = (zslice.len() - idx).min(chunk); - zbuf.push_zslice(ZSlice::make(buff.clone(), idx, idx + len).unwrap()); + zbuf.push_zslice(ZSlice::new(buff.clone(), idx, idx + len).unwrap()); idx += len; } diff --git a/commons/zenoh-codec/src/core/encoding.rs b/commons/zenoh-codec/src/core/encoding.rs index 478bcf1cd8..cfbe0084ba 100644 --- a/commons/zenoh-codec/src/core/encoding.rs +++ b/commons/zenoh-codec/src/core/encoding.rs @@ -12,16 +12,22 @@ // ZettaScale Zenoh Team, // use crate::{LCodec, RCodec, WCodec, Zenoh080, Zenoh080Bounded}; -use alloc::string::String; use zenoh_buffers::{ reader::{DidntRead, Reader}, writer::{DidntWrite, Writer}, }; -use zenoh_protocol::core::Encoding; +use zenoh_protocol::{ + common::imsg, + core::encoding::{flag, Encoding, EncodingId}, +}; impl LCodec<&Encoding> for Zenoh080 { fn w_len(self, x: &Encoding) -> usize { - 1 + self.w_len(x.suffix()) + let mut len = self.w_len((x.id as u32) << 1); + if let Some(schema) = x.schema.as_ref() { + len += self.w_len(schema.as_slice()); + } + len } } @@ -32,9 +38,17 @@ where type Output = Result<(), DidntWrite>; fn write(self, writer: &mut W, x: &Encoding) -> Self::Output { - let zodec = Zenoh080Bounded::::new(); - zodec.write(&mut *writer, *x.prefix() as u8)?; - zodec.write(&mut *writer, x.suffix())?; + let mut id = (x.id as u32) << 1; + + if x.schema.is_some() { + id |= flag::S; + } + let zodec = Zenoh080Bounded::::new(); + zodec.write(&mut *writer, id)?; + if let Some(schema) = x.schema.as_ref() { + let zodec = Zenoh080Bounded::::new(); + zodec.write(&mut *writer, schema)?; + } Ok(()) } } @@ -46,10 +60,20 @@ where type Error = DidntRead; fn read(self, reader: &mut R) -> Result { - let zodec = Zenoh080Bounded::::new(); - let prefix: u8 = zodec.read(&mut *reader)?; - let suffix: String = zodec.read(&mut *reader)?; - let encoding = Encoding::new(prefix, suffix).map_err(|_| DidntRead)?; + let zodec = Zenoh080Bounded::::new(); + let id: u32 = zodec.read(&mut *reader)?; + let (id, has_suffix) = ( + (id >> 1) as EncodingId, + imsg::has_flag(id as u8, flag::S as u8), + ); + + let mut schema = None; + if has_suffix { + let zodec = Zenoh080Bounded::::new(); + schema = Some(zodec.read(&mut *reader)?); + } + + let encoding = Encoding { id, schema }; Ok(encoding) } } diff --git a/commons/zenoh-codec/src/zenoh/put.rs b/commons/zenoh-codec/src/zenoh/put.rs index 4f50be4872..776b47245f 100644 --- a/commons/zenoh-codec/src/zenoh/put.rs +++ b/commons/zenoh-codec/src/zenoh/put.rs @@ -54,7 +54,7 @@ where if timestamp.is_some() { header |= flag::T; } - if encoding != &Encoding::DEFAULT { + if encoding != &Encoding::empty() { header |= flag::E; } let mut n_exts = (ext_sinfo.is_some()) as u8 @@ -73,7 +73,7 @@ where if let Some(ts) = timestamp.as_ref() { self.write(&mut *writer, ts)?; } - if encoding != &Encoding::DEFAULT { + if encoding != &Encoding::empty() { self.write(&mut *writer, encoding)?; } @@ -143,7 +143,7 @@ where timestamp = Some(self.codec.read(&mut *reader)?); } - let mut encoding = Encoding::DEFAULT; + let mut encoding = Encoding::empty(); if imsg::has_flag(self.header, flag::E) { encoding = self.codec.read(&mut *reader)?; } diff --git a/commons/zenoh-collections/src/single_or_vec.rs b/commons/zenoh-collections/src/single_or_vec.rs index c68ac6d8ff..ceb43e4025 100644 --- a/commons/zenoh-collections/src/single_or_vec.rs +++ b/commons/zenoh-collections/src/single_or_vec.rs @@ -30,6 +30,10 @@ enum SingleOrVecInner { } impl SingleOrVecInner { + const fn empty() -> Self { + SingleOrVecInner::Vec(Vec::new()) + } + fn push(&mut self, value: T) { match self { SingleOrVecInner::Vec(vec) if vec.capacity() == 0 => *self = Self::Single(value), @@ -53,7 +57,7 @@ where impl Default for SingleOrVecInner { fn default() -> Self { - SingleOrVecInner::Vec(Vec::new()) + Self::empty() } } @@ -88,6 +92,10 @@ where pub struct SingleOrVec(SingleOrVecInner); impl SingleOrVec { + pub const fn empty() -> Self { + Self(SingleOrVecInner::empty()) + } + pub fn push(&mut self, value: T) { self.0.push(value); } diff --git a/commons/zenoh-protocol/src/core/cowstr.rs b/commons/zenoh-protocol/src/core/cowstr.rs index 33dac4524f..209d020f40 100644 --- a/commons/zenoh-protocol/src/core/cowstr.rs +++ b/commons/zenoh-protocol/src/core/cowstr.rs @@ -21,7 +21,7 @@ enum CowStrInner<'a> { } pub struct CowStr<'a>(CowStrInner<'a>); impl<'a> CowStr<'a> { - pub(crate) fn borrowed(s: &'a str) -> Self { + pub(crate) const fn borrowed(s: &'a str) -> Self { Self(CowStrInner::Borrowed(s)) } pub fn as_str(&self) -> &str { diff --git a/commons/zenoh-protocol/src/core/encoding.rs b/commons/zenoh-protocol/src/core/encoding.rs index b3abae8aae..9b9aa5bf2f 100644 --- a/commons/zenoh-protocol/src/core/encoding.rs +++ b/commons/zenoh-protocol/src/core/encoding.rs @@ -11,282 +11,68 @@ // Contributors: // ZettaScale Zenoh Team, // -use crate::core::CowStr; -use alloc::{borrow::Cow, string::String}; -use core::{ - convert::TryFrom, - fmt::{self, Debug}, - mem, -}; -use zenoh_result::{bail, zerror, ZError, ZResult}; - -mod consts { - pub(super) const MIMES: [&str; 21] = [ - /* 0 */ "", - /* 1 */ "application/octet-stream", - /* 2 */ "application/custom", // non iana standard - /* 3 */ "text/plain", - /* 4 */ "application/properties", // non iana standard - /* 5 */ "application/json", // if not readable from casual users - /* 6 */ "application/sql", - /* 7 */ "application/integer", // non iana standard - /* 8 */ "application/float", // non iana standard - /* 9 */ - "application/xml", // if not readable from casual users (RFC 3023, sec 3) - /* 10 */ "application/xhtml+xml", - /* 11 */ "application/x-www-form-urlencoded", - /* 12 */ "text/json", // non iana standard - if readable from casual users - /* 13 */ "text/html", - /* 14 */ "text/xml", // if readable from casual users (RFC 3023, section 3) - /* 15 */ "text/css", - /* 16 */ "text/csv", - /* 17 */ "text/javascript", - /* 18 */ "image/jpeg", - /* 19 */ "image/png", - /* 20 */ "image/gif", - ]; -} - -#[repr(u8)] -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub enum KnownEncoding { - Empty = 0, - AppOctetStream = 1, - AppCustom = 2, - TextPlain = 3, - AppProperties = 4, - AppJson = 5, - AppSql = 6, - AppInteger = 7, - AppFloat = 8, - AppXml = 9, - AppXhtmlXml = 10, - AppXWwwFormUrlencoded = 11, - TextJson = 12, - TextHtml = 13, - TextXml = 14, - TextCss = 15, - TextCsv = 16, - TextJavascript = 17, - ImageJpeg = 18, - ImagePng = 19, - ImageGif = 20, -} - -impl From for u8 { - fn from(val: KnownEncoding) -> Self { - val as u8 - } -} - -impl From for &str { - fn from(val: KnownEncoding) -> Self { - consts::MIMES[u8::from(val) as usize] - } -} - -impl TryFrom for KnownEncoding { - type Error = ZError; - fn try_from(value: u8) -> Result { - if value < consts::MIMES.len() as u8 + 1 { - Ok(unsafe { mem::transmute(value) }) - } else { - Err(zerror!("Unknown encoding")) - } - } -} - -impl AsRef for KnownEncoding { - fn as_ref(&self) -> &str { - consts::MIMES[u8::from(*self) as usize] - } -} - -/// The encoding of a zenoh `zenoh::Value`. -/// -/// A zenoh encoding is a HTTP Mime type represented, for wire efficiency, -/// as an integer prefix (that maps to a string) and a string suffix. +use core::fmt::Debug; +use zenoh_buffers::ZSlice; + +pub type EncodingId = u16; + +/// [`Encoding`] is a metadata that indicates how the data payload should be interpreted. +/// For wire-efficiency and extensibility purposes, Zenoh defines an [`Encoding`] as +/// composed of an unsigned integer prefix and a string suffix. The actual meaning of the +/// prefix and suffix are out-of-scope of the protocol definition. Therefore, Zenoh does not +/// impose any encoding mapping and users are free to use any mapping they like. +/// Nevertheless, it is worth highlighting that Zenoh still provides a default mapping as part +/// of the API as per user convenience. That mapping has no impact on the Zenoh protocol definition. #[derive(Clone, Debug, PartialEq, Eq)] -pub enum Encoding { - Exact(KnownEncoding), - WithSuffix(KnownEncoding, CowStr<'static>), +pub struct Encoding { + pub id: EncodingId, + pub schema: Option, } -impl Encoding { - pub fn new(prefix: u8, suffix: IntoCowStr) -> ZResult - where - IntoCowStr: Into> + AsRef, - { - let prefix = KnownEncoding::try_from(prefix)?; - let suffix = suffix.into(); - if suffix.as_bytes().len() > u8::MAX as usize { - bail!("Suffix length is limited to 255 characters") - } - if suffix.as_ref().is_empty() { - Ok(Encoding::Exact(prefix)) - } else { - Ok(Encoding::WithSuffix(prefix, suffix.into())) - } - } - - /// Sets the suffix of this encoding. - pub fn with_suffix(self, suffix: IntoCowStr) -> ZResult - where - IntoCowStr: Into> + AsRef, - { - match self { - Encoding::Exact(e) => Encoding::new(e as u8, suffix), - Encoding::WithSuffix(e, s) => Encoding::new(e as u8, s + suffix.as_ref()), - } - } - - pub fn as_ref<'a, T>(&'a self) -> T - where - &'a Self: Into, - { - self.into() - } - - /// Returns `true`if the string representation of this encoding starts with - /// the string representation of ther given encoding. - pub fn starts_with(&self, with: T) -> bool - where - T: Into, - { - let with: Encoding = with.into(); - self.prefix() == with.prefix() && self.suffix().starts_with(with.suffix()) - } - - pub const fn prefix(&self) -> &KnownEncoding { - match self { - Encoding::Exact(e) | Encoding::WithSuffix(e, _) => e, - } - } - - pub fn suffix(&self) -> &str { - match self { - Encoding::Exact(_) => "", - Encoding::WithSuffix(_, s) => s.as_ref(), - } - } +/// # Encoding field +/// +/// ```text +/// 7 6 5 4 3 2 1 0 +/// +-+-+-+-+-+-+-+-+ +/// ~ id: z16 |S~ +/// +---------------+ +/// ~schema: ~ -- if S==1 +/// +---------------+ +/// ``` +pub mod flag { + pub const S: u32 = 1; // 0x01 Suffix if S==1 then suffix is present } impl Encoding { - pub const EMPTY: Encoding = Encoding::Exact(KnownEncoding::Empty); - pub const APP_OCTET_STREAM: Encoding = Encoding::Exact(KnownEncoding::AppOctetStream); - pub const APP_CUSTOM: Encoding = Encoding::Exact(KnownEncoding::AppCustom); - pub const TEXT_PLAIN: Encoding = Encoding::Exact(KnownEncoding::TextPlain); - pub const APP_PROPERTIES: Encoding = Encoding::Exact(KnownEncoding::AppProperties); - pub const APP_JSON: Encoding = Encoding::Exact(KnownEncoding::AppJson); - pub const APP_SQL: Encoding = Encoding::Exact(KnownEncoding::AppSql); - pub const APP_INTEGER: Encoding = Encoding::Exact(KnownEncoding::AppInteger); - pub const APP_FLOAT: Encoding = Encoding::Exact(KnownEncoding::AppFloat); - pub const APP_XML: Encoding = Encoding::Exact(KnownEncoding::AppXml); - pub const APP_XHTML_XML: Encoding = Encoding::Exact(KnownEncoding::AppXhtmlXml); - pub const APP_XWWW_FORM_URLENCODED: Encoding = - Encoding::Exact(KnownEncoding::AppXWwwFormUrlencoded); - pub const TEXT_JSON: Encoding = Encoding::Exact(KnownEncoding::TextJson); - pub const TEXT_HTML: Encoding = Encoding::Exact(KnownEncoding::TextHtml); - pub const TEXT_XML: Encoding = Encoding::Exact(KnownEncoding::TextXml); - pub const TEXT_CSS: Encoding = Encoding::Exact(KnownEncoding::TextCss); - pub const TEXT_CSV: Encoding = Encoding::Exact(KnownEncoding::TextCsv); - pub const TEXT_JAVASCRIPT: Encoding = Encoding::Exact(KnownEncoding::TextJavascript); - pub const IMAGE_JPEG: Encoding = Encoding::Exact(KnownEncoding::ImageJpeg); - pub const IMAGE_PNG: Encoding = Encoding::Exact(KnownEncoding::ImagePng); - pub const IMAGE_GIF: Encoding = Encoding::Exact(KnownEncoding::ImageGif); -} - -impl fmt::Display for Encoding { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Encoding::Exact(e) => f.write_str(e.as_ref()), - Encoding::WithSuffix(e, s) => { - f.write_str(e.as_ref())?; - f.write_str(s) - } - } - } -} - -impl From<&'static str> for Encoding { - fn from(s: &'static str) -> Self { - for (i, v) in consts::MIMES.iter().enumerate().skip(1) { - if let Some(suffix) = s.strip_prefix(v) { - if suffix.is_empty() { - return Encoding::Exact(unsafe { mem::transmute(i as u8) }); - } else { - return Encoding::WithSuffix(unsafe { mem::transmute(i as u8) }, suffix.into()); - } - } - } - if s.is_empty() { - Encoding::Exact(KnownEncoding::Empty) - } else { - Encoding::WithSuffix(KnownEncoding::Empty, s.into()) - } - } -} - -impl From for Encoding { - fn from(mut s: String) -> Self { - for (i, v) in consts::MIMES.iter().enumerate().skip(1) { - if s.starts_with(v) { - s.replace_range(..v.len(), ""); - if s.is_empty() { - return Encoding::Exact(unsafe { mem::transmute(i as u8) }); - } else { - return Encoding::WithSuffix(unsafe { mem::transmute(i as u8) }, s.into()); - } - } - } - if s.is_empty() { - Encoding::Exact(KnownEncoding::Empty) - } else { - Encoding::WithSuffix(KnownEncoding::Empty, s.into()) + /// Returns a new [`Encoding`] object with default empty prefix ID. + pub const fn empty() -> Self { + Self { + id: 0, + schema: None, } } } -impl From<&KnownEncoding> for Encoding { - fn from(e: &KnownEncoding) -> Encoding { - Encoding::Exact(*e) - } -} - -impl From for Encoding { - fn from(e: KnownEncoding) -> Encoding { - Encoding::Exact(e) - } -} - impl Default for Encoding { fn default() -> Self { - KnownEncoding::Empty.into() + Self::empty() } } impl Encoding { - pub const DEFAULT: Self = Self::EMPTY; - #[cfg(feature = "test")] pub fn rand() -> Self { - use rand::{ - distributions::{Alphanumeric, DistString}, - Rng, - }; + use rand::Rng; const MIN: usize = 2; const MAX: usize = 16; let mut rng = rand::thread_rng(); - let prefix: u8 = rng.gen_range(0..20); - let suffix: String = if rng.gen_bool(0.5) { - let len = rng.gen_range(MIN..MAX); - Alphanumeric.sample_string(&mut rng, len) - } else { - String::new() - }; - Encoding::new(prefix, suffix).unwrap() + let id: EncodingId = rng.gen(); + let schema = rng + .gen_bool(0.5) + .then_some(ZSlice::rand(rng.gen_range(MIN..MAX))); + Encoding { id, schema } } } diff --git a/commons/zenoh-protocol/src/core/mod.rs b/commons/zenoh-protocol/src/core/mod.rs index 3e9315bec2..82658db2fd 100644 --- a/commons/zenoh-protocol/src/core/mod.rs +++ b/commons/zenoh-protocol/src/core/mod.rs @@ -41,8 +41,8 @@ pub use wire_expr::*; mod cowstr; pub use cowstr::CowStr; -mod encoding; -pub use encoding::{Encoding, KnownEncoding}; +pub mod encoding; +pub use encoding::{Encoding, EncodingId}; pub mod locator; pub use locator::*; diff --git a/commons/zenoh-protocol/src/zenoh/mod.rs b/commons/zenoh-protocol/src/zenoh/mod.rs index d73d8cdd06..4c8458885b 100644 --- a/commons/zenoh-protocol/src/zenoh/mod.rs +++ b/commons/zenoh-protocol/src/zenoh/mod.rs @@ -209,12 +209,14 @@ pub mod ext { } } + /// ```text /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ /// ~ encoding ~ /// +---------------+ - /// ~ pl: [u8;z32] ~ -- Payload + /// ~ pl: ~ -- Payload /// +---------------+ + /// ``` #[derive(Debug, Clone, PartialEq, Eq)] pub struct ValueType { #[cfg(feature = "shared-memory")] diff --git a/examples/examples/z_get.rs b/examples/examples/z_get.rs index 57c36c2e62..0fff95c250 100644 --- a/examples/examples/z_get.rs +++ b/examples/examples/z_get.rs @@ -12,7 +12,6 @@ // ZettaScale Zenoh Team, // use clap::Parser; -use std::convert::TryFrom; use std::time::Duration; use zenoh::config::Config; use zenoh::prelude::r#async::*; @@ -40,12 +39,24 @@ async fn main() { .unwrap(); while let Ok(reply) = replies.recv_async().await { match reply.sample { - Ok(sample) => println!( - ">> Received ('{}': '{}')", - sample.key_expr.as_str(), - sample.value, - ), - Err(err) => println!(">> Received (ERROR: '{}')", String::try_from(&err).unwrap()), + Ok(sample) => { + let payload = sample + .payload + .deserialize::() + .unwrap_or_else(|e| format!("{}", e)); + println!( + ">> Received ('{}': '{}')", + sample.key_expr.as_str(), + payload, + ); + } + Err(err) => { + let payload = err + .payload + .deserialize::() + .unwrap_or_else(|e| format!("{}", e)); + println!(">> Received (ERROR: '{}')", payload); + } } } } diff --git a/examples/examples/z_get_liveliness.rs b/examples/examples/z_get_liveliness.rs index e0aaf8cd23..036dc0ab98 100644 --- a/examples/examples/z_get_liveliness.rs +++ b/examples/examples/z_get_liveliness.rs @@ -12,7 +12,6 @@ // ZettaScale Zenoh Team, // use clap::Parser; -use std::convert::TryFrom; use std::time::Duration; use zenoh::config::Config; use zenoh::prelude::r#async::*; @@ -39,7 +38,13 @@ async fn main() { while let Ok(reply) = replies.recv_async().await { match reply.sample { Ok(sample) => println!(">> Alive token ('{}')", sample.key_expr.as_str(),), - Err(err) => println!(">> Received (ERROR: '{}')", String::try_from(&err).unwrap()), + Err(err) => { + let payload = err + .payload + .deserialize::() + .unwrap_or_else(|e| format!("{}", e)); + println!(">> Received (ERROR: '{}')", payload); + } } } } diff --git a/examples/examples/z_ping.rs b/examples/examples/z_ping.rs index fe5ed4d46b..cb6fecd81a 100644 --- a/examples/examples/z_ping.rs +++ b/examples/examples/z_ping.rs @@ -38,7 +38,7 @@ fn main() { .res() .unwrap(); - let data: Value = (0usize..size) + let data: Payload = (0usize..size) .map(|i| (i % 10) as u8) .collect::>() .into(); diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index f057075434..1f06c7abb9 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -41,7 +41,7 @@ fn main() { let _sub = session .declare_subscriber(key_expr_ping) - .callback(move |sample| publisher.put(sample.value).res().unwrap()) + .callback(move |sample| publisher.put(sample.payload).res().unwrap()) .res() .unwrap(); for _ in stdin().bytes().take_while(|b| !matches!(b, Ok(b'q'))) {} diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index b698cbc80b..7a3e90f627 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -30,7 +30,7 @@ fn main() { let payload_size = args.payload_size; - let data: Value = (0..payload_size) + let data: Payload = (0..payload_size) .map(|i| (i % 10) as u8) .collect::>() .into(); diff --git a/examples/examples/z_pull.rs b/examples/examples/z_pull.rs index 812c47294e..ed2a90f1a6 100644 --- a/examples/examples/z_pull.rs +++ b/examples/examples/z_pull.rs @@ -44,11 +44,15 @@ async fn main() { // Define the future to handle incoming samples of the subscription. let subs = async { while let Ok(sample) = subscriber.recv_async().await { + let payload = sample + .payload + .deserialize::() + .unwrap_or_else(|e| format!("{}", e)); println!( ">> [Subscriber] Received {} ('{}': '{}')", sample.kind, sample.key_expr.as_str(), - sample.value, + payload, ); } }; diff --git a/examples/examples/z_queryable.rs b/examples/examples/z_queryable.rs index 54b9858cf0..d7376835b7 100644 --- a/examples/examples/z_queryable.rs +++ b/examples/examples/z_queryable.rs @@ -49,7 +49,10 @@ async fn main() { let query = query.unwrap(); match query.value() { None => println!(">> [Queryable ] Received Query '{}'", query.selector()), - Some(value) => println!(">> [Queryable ] Received Query '{}' with value '{}'", query.selector(), value), + Some(value) => { + let payload = value.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); + println!(">> [Queryable ] Received Query '{}' with value '{}'", query.selector(), payload); + }, } let reply = if send_errors.swap(false, Relaxed) { println!( diff --git a/examples/examples/z_storage.rs b/examples/examples/z_storage.rs index 79164c914a..5e0eaabd44 100644 --- a/examples/examples/z_storage.rs +++ b/examples/examples/z_storage.rs @@ -53,8 +53,8 @@ async fn main() { select!( sample = subscriber.recv_async() => { let sample = sample.unwrap(); - println!(">> [Subscriber] Received {} ('{}': '{}')", - sample.kind, sample.key_expr.as_str(), sample.value); + let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); + println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind, sample.key_expr.as_str(),payload); if sample.kind == SampleKind::Delete { stored.remove(&sample.key_expr.to_string()); } else { diff --git a/examples/examples/z_sub.rs b/examples/examples/z_sub.rs index 0542f85870..195e2f7640 100644 --- a/examples/examples/z_sub.rs +++ b/examples/examples/z_sub.rs @@ -46,10 +46,9 @@ async fn main() { select!( sample = subscriber.recv_async() => { let sample = sample.unwrap(); - println!(">> [Subscriber] Received {} ('{}': '{}')", - sample.kind, sample.key_expr.as_str(), sample.value); + let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); + println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind, sample.key_expr.as_str(), payload); }, - _ = stdin.read_exact(&mut input).fuse() => { match input[0] { b'q' => break, diff --git a/io/zenoh-transport/src/common/batch.rs b/io/zenoh-transport/src/common/batch.rs index a6aad76f7b..e923a7e1af 100644 --- a/io/zenoh-transport/src/common/batch.rs +++ b/io/zenoh-transport/src/common/batch.rs @@ -460,7 +460,7 @@ impl RBatch { let mut into = (buff)(); let n = lz4_flex::block::decompress_into(payload, into.as_mut_slice()) .map_err(|_| zerror!("Decompression error"))?; - let zslice = ZSlice::make(Arc::new(into), 0, n) + let zslice = ZSlice::new(Arc::new(into), 0, n) .map_err(|_| zerror!("Invalid decompression buffer length"))?; Ok(zslice) } @@ -579,7 +579,7 @@ mod tests { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/src/common/pipeline.rs b/io/zenoh-transport/src/common/pipeline.rs index eebf23abc9..3968eabdf5 100644 --- a/io/zenoh-transport/src/common/pipeline.rs +++ b/io/zenoh-transport/src/common/pipeline.rs @@ -754,7 +754,7 @@ mod tests { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -884,7 +884,7 @@ mod tests { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -996,7 +996,7 @@ mod tests { ext_nodeid: ext::NodeIdType::DEFAULT, payload: PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/src/multicast/link.rs b/io/zenoh-transport/src/multicast/link.rs index b24c077c57..0172902935 100644 --- a/io/zenoh-transport/src/multicast/link.rs +++ b/io/zenoh-transport/src/multicast/link.rs @@ -216,7 +216,7 @@ impl TransportLinkMulticastRx { let mut into = (buff)(); let (n, locator) = self.inner.link.read(into.as_mut_slice()).await?; - let buffer = ZSlice::make(Arc::new(into), 0, n).map_err(|_| zerror!("Error"))?; + let buffer = ZSlice::new(Arc::new(into), 0, n).map_err(|_| zerror!("Error"))?; let mut batch = RBatch::new(self.inner.config.batch, buffer); batch.initialize(buff).map_err(|_| zerror!("{ERR}{self}"))?; Ok((batch, locator.into_owned())) diff --git a/io/zenoh-transport/src/unicast/link.rs b/io/zenoh-transport/src/unicast/link.rs index bd756d6396..daa6c3e5a5 100644 --- a/io/zenoh-transport/src/unicast/link.rs +++ b/io/zenoh-transport/src/unicast/link.rs @@ -232,7 +232,7 @@ impl TransportLinkUnicastRx { // log::trace!("RBytes: {:02x?}", &into.as_slice()[0..end]); - let buffer = ZSlice::make(Arc::new(into), 0, end) + let buffer = ZSlice::new(Arc::new(into), 0, end) .map_err(|_| zerror!("{ERR}{self}. ZSlice index(es) out of bounds"))?; let mut batch = RBatch::new(self.batch, buffer); batch diff --git a/io/zenoh-transport/src/unicast/lowlatency/link.rs b/io/zenoh-transport/src/unicast/lowlatency/link.rs index 6a382f5960..3c290ac89e 100644 --- a/io/zenoh-transport/src/unicast/lowlatency/link.rs +++ b/io/zenoh-transport/src/unicast/lowlatency/link.rs @@ -239,7 +239,7 @@ async fn rx_task_stream( transport.stats.inc_rx_bytes(2 + bytes); // Account for the batch len encoding (16 bits) // Deserialize all the messages from the current ZBuf - let zslice = ZSlice::make(Arc::new(buffer), 0, bytes).unwrap(); + let zslice = ZSlice::new(Arc::new(buffer), 0, bytes).unwrap(); transport.read_messages(zslice, &link.link).await?; } } @@ -274,7 +274,7 @@ async fn rx_task_dgram( transport.stats.inc_rx_bytes(bytes); // Deserialize all the messages from the current ZBuf - let zslice = ZSlice::make(Arc::new(buffer), 0, bytes).unwrap(); + let zslice = ZSlice::new(Arc::new(buffer), 0, bytes).unwrap(); transport.read_messages(zslice, &link.link).await?; } } diff --git a/io/zenoh-transport/tests/multicast_compression.rs b/io/zenoh-transport/tests/multicast_compression.rs index 4d1196e10f..5301b967f6 100644 --- a/io/zenoh-transport/tests/multicast_compression.rs +++ b/io/zenoh-transport/tests/multicast_compression.rs @@ -273,7 +273,7 @@ mod tests { payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/tests/multicast_transport.rs b/io/zenoh-transport/tests/multicast_transport.rs index fe5a44b7ee..69c1decd83 100644 --- a/io/zenoh-transport/tests/multicast_transport.rs +++ b/io/zenoh-transport/tests/multicast_transport.rs @@ -269,7 +269,7 @@ mod tests { payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/tests/unicast_compression.rs b/io/zenoh-transport/tests/unicast_compression.rs index dd4f55b5f5..a9c10e1a9e 100644 --- a/io/zenoh-transport/tests/unicast_compression.rs +++ b/io/zenoh-transport/tests/unicast_compression.rs @@ -301,7 +301,7 @@ mod tests { payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/tests/unicast_concurrent.rs b/io/zenoh-transport/tests/unicast_concurrent.rs index 4e90432193..b14cebaaf9 100644 --- a/io/zenoh-transport/tests/unicast_concurrent.rs +++ b/io/zenoh-transport/tests/unicast_concurrent.rs @@ -200,7 +200,7 @@ async fn transport_concurrent(endpoint01: Vec, endpoint02: Vec, endpoint02: Vec, client_transport: TransportUn payload: Put { payload: vec![0u8; *ms].into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/tests/unicast_shm.rs b/io/zenoh-transport/tests/unicast_shm.rs index d12a9db7dc..5ec7e31aba 100644 --- a/io/zenoh-transport/tests/unicast_shm.rs +++ b/io/zenoh-transport/tests/unicast_shm.rs @@ -277,7 +277,7 @@ mod tests { payload: Put { payload: sbuf.into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, ext_shm: None, ext_attachment: None, @@ -325,7 +325,7 @@ mod tests { payload: Put { payload: sbuf.into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, ext_shm: None, ext_attachment: None, diff --git a/io/zenoh-transport/tests/unicast_simultaneous.rs b/io/zenoh-transport/tests/unicast_simultaneous.rs index db73e99480..d465497556 100644 --- a/io/zenoh-transport/tests/unicast_simultaneous.rs +++ b/io/zenoh-transport/tests/unicast_simultaneous.rs @@ -82,7 +82,7 @@ mod tests { payload: Put { payload: vec![0u8; MSG_SIZE].into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/io/zenoh-transport/tests/unicast_transport.rs b/io/zenoh-transport/tests/unicast_transport.rs index 795ea90b41..2a830a9e2b 100644 --- a/io/zenoh-transport/tests/unicast_transport.rs +++ b/io/zenoh-transport/tests/unicast_transport.rs @@ -472,7 +472,7 @@ async fn test_transport( payload: Put { payload: vec![0u8; msg_size].into(), timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/plugins/zenoh-plugin-example/src/lib.rs b/plugins/zenoh-plugin-example/src/lib.rs index c2f083827d..592a08ca9b 100644 --- a/plugins/zenoh-plugin-example/src/lib.rs +++ b/plugins/zenoh-plugin-example/src/lib.rs @@ -164,7 +164,8 @@ async fn run(runtime: Runtime, selector: KeyExpr<'_>, flag: Arc) { // on sample received by the Subscriber sample = sub.recv_async() => { let sample = sample.unwrap(); - info!("Received data ('{}': '{}')", sample.key_expr, sample.value); + let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); + info!("Received data ('{}': '{}')", sample.key_expr, payload); stored.insert(sample.key_expr.to_string(), sample); }, // on query received by the Queryable diff --git a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs index 0c6eb4357b..c5bdcc4c73 100644 --- a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs +++ b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs @@ -75,11 +75,7 @@ async fn main() { println!("Data updates are accessible through HTML5 SSE at http://:8000/{key}"); loop { - publisher - .put(Value::from(value).encoding(KnownEncoding::TextPlain.into())) - .res() - .await - .unwrap(); + publisher.put(value).res().await.unwrap(); async_std::task::sleep(Duration::from_secs(1)).await; } } diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index 6f4e80f4eb..1a99d7b5a4 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -21,6 +21,7 @@ use async_std::prelude::FutureExt; use base64::{engine::general_purpose::STANDARD as b64_std_engine, Engine}; use futures::StreamExt; use http_types::Method; +use std::borrow::Cow; use std::convert::TryFrom; use std::str::FromStr; use std::sync::Arc; @@ -29,7 +30,6 @@ use tide::sse::Sender; use tide::{Request, Response, Server, StatusCode}; use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; -use zenoh::properties::Properties; use zenoh::query::{QueryConsolidation, Reply}; use zenoh::runtime::Runtime; use zenoh::selector::TIME_RANGE_KEY; @@ -46,38 +46,18 @@ lazy_static::lazy_static! { } const RAW_KEY: &str = "_raw"; -fn value_to_json(value: Value) -> String { - // @TODO: transcode to JSON when implemented in Value - match &value.encoding { - p if p.starts_with(KnownEncoding::TextPlain) - || p.starts_with(KnownEncoding::AppXWwwFormUrlencoded) => - { - // convert to Json string for special characters escaping - serde_json::json!(value.to_string()).to_string() - } - p if p.starts_with(KnownEncoding::AppProperties) => { - // convert to Json string for special characters escaping - serde_json::json!(*Properties::from(value.to_string())).to_string() - } - p if p.starts_with(KnownEncoding::AppJson) - || p.starts_with(KnownEncoding::AppInteger) - || p.starts_with(KnownEncoding::AppFloat) => - { - value.to_string() - } - _ => { - format!(r#""{}""#, b64_std_engine.encode(value.payload.contiguous())) - } - } +fn payload_to_json(payload: Payload) -> String { + payload + .deserialize::() + .unwrap_or_else(|_| format!(r#""{}""#, b64_std_engine.encode(payload.contiguous()))) } fn sample_to_json(sample: Sample) -> String { - let encoding = sample.value.encoding.to_string(); format!( r#"{{ "key": "{}", "value": {}, "encoding": "{}", "time": "{}" }}"#, sample.key_expr.as_str(), - value_to_json(sample.value), - encoding, + payload_to_json(sample.payload), + sample.encoding, if let Some(ts) = sample.timestamp { ts.to_string() } else { @@ -90,11 +70,10 @@ fn result_to_json(sample: Result) -> String { match sample { Ok(sample) => sample_to_json(sample), Err(err) => { - let encoding = err.encoding.to_string(); format!( r#"{{ "key": "ERROR", "value": {}, "encoding": "{}"}}"#, - value_to_json(err), - encoding, + payload_to_json(err.payload), + err.encoding, ) } } @@ -157,12 +136,12 @@ async fn to_raw_response(results: flume::Receiver) -> Response { Ok(reply) => match reply.sample { Ok(sample) => response( StatusCode::Ok, - sample.value.encoding.to_string().as_ref(), + Cow::from(&sample.encoding).as_ref(), String::from_utf8_lossy(&sample.payload.contiguous()).as_ref(), ), Err(value) => response( StatusCode::Ok, - value.encoding.to_string().as_ref(), + Cow::from(&value.encoding).as_ref(), String::from_utf8_lossy(&value.payload.contiguous()).as_ref(), ), }, @@ -404,9 +383,9 @@ async fn query(mut req: Request<(Arc, String)>) -> tide::Result { @@ -441,21 +420,25 @@ async fn write(mut req: Request<(Arc, String)>) -> tide::Result { + session + .put(&key_expr, bytes) + .with_encoding(encoding) + .res() + .await + } + SampleKind::Delete => session.delete(&key_expr).res().await, + }; + match res { Ok(_) => Ok(Response::new(StatusCode::Ok)), Err(e) => Ok(response( StatusCode::InternalServerError, diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index 7295367a06..359b8dd7e8 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -18,6 +18,7 @@ use std::cmp::Ordering; use std::collections::{BTreeSet, HashMap, HashSet}; use std::str; use std::str::FromStr; +use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; use zenoh::time::Timestamp; use zenoh::Session; @@ -115,7 +116,12 @@ impl AlignQueryable { query.reply(Ok(sample)).res().await.unwrap(); } AlignData::Data(k, (v, ts)) => { - let sample = Sample::new(k, v).with_timestamp(ts); + let Value { + payload, encoding, .. + } = v; + let sample = Sample::new(k, payload) + .with_encoding(encoding) + .with_timestamp(ts); query.reply(Ok(sample)).res().await.unwrap(); } } @@ -165,7 +171,10 @@ impl AlignQueryable { let entry = entry.unwrap(); result.push(AlignData::Data( OwnedKeyExpr::from(entry.key_expr), - (entry.value, each.timestamp), + ( + Value::new(entry.payload).with_encoding(entry.encoding), + each.timestamp, + ), )); } } @@ -221,7 +230,7 @@ impl AlignQueryable { log::trace!( "[ALIGN QUERYABLE] Received ('{}': '{}')", sample.key_expr.as_str(), - sample.value + StringOrBase64::from(sample.payload.clone()) ); if let Some(timestamp) = sample.timestamp { match timestamp.cmp(&logentry.timestamp) { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 041567ae27..03c6fa949a 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -19,6 +19,7 @@ use flume::{Receiver, Sender}; use std::collections::{HashMap, HashSet}; use std::str; use zenoh::key_expr::{KeyExpr, OwnedKeyExpr}; +use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; use zenoh::time::Timestamp; use zenoh::Session; @@ -104,7 +105,12 @@ impl Aligner { log::trace!("[ALIGNER] Received queried samples: {missing_data:?}"); for (key, (ts, value)) in missing_data { - let sample = Sample::new(key, value).with_timestamp(ts); + let Value { + payload, encoding, .. + } = value; + let sample = Sample::new(key, payload) + .with_encoding(encoding) + .with_timestamp(ts); log::debug!("[ALIGNER] Adding {:?} to storage", sample); self.tx_sample.send_async(sample).await.unwrap_or_else(|e| { log::error!("[ALIGNER] Error adding sample to storage: {}", e) @@ -136,7 +142,10 @@ impl Aligner { for sample in replies { result.insert( sample.key_expr.into(), - (sample.timestamp.unwrap(), sample.value), + ( + sample.timestamp.unwrap(), + Value::new(sample.payload).with_encoding(sample.encoding), + ), ); } (result, no_err) @@ -202,9 +211,9 @@ impl Aligner { let properties = format!("timestamp={}&{}=cold", other.timestamp, ERA); let (reply_content, mut no_err) = self.perform_query(other_rep, properties).await; let mut other_intervals: HashMap = HashMap::new(); - // expecting sample.value to be a vec of intervals with their checksum + // expecting sample.payload to be a vec of intervals with their checksum for each in reply_content { - match serde_json::from_str(&each.value.to_string()) { + match serde_json::from_str(&StringOrBase64::from(each.payload)) { Ok((i, c)) => { other_intervals.insert(i, c); } @@ -246,11 +255,11 @@ impl Aligner { INTERVALS, diff_string.join(",") ); - // expecting sample.value to be a vec of subintervals with their checksum + // expecting sample.payload to be a vec of subintervals with their checksum let (reply_content, mut no_err) = self.perform_query(other_rep, properties).await; let mut other_subintervals: HashMap = HashMap::new(); for each in reply_content { - match serde_json::from_str(&each.value.to_string()) { + match serde_json::from_str(&StringOrBase64::from(each.payload)) { Ok((i, c)) => { other_subintervals.insert(i, c); } @@ -287,11 +296,11 @@ impl Aligner { SUBINTERVALS, diff_string.join(",") ); - // expecting sample.value to be a vec of log entries with their checksum + // expecting sample.payload to be a vec of log entries with their checksum let (reply_content, mut no_err) = self.perform_query(other_rep, properties).await; let mut other_content: HashMap> = HashMap::new(); for each in reply_content { - match serde_json::from_str(&each.value.to_string()) { + match serde_json::from_str(&StringOrBase64::from(each.payload)) { Ok((i, c)) => { other_content.insert(i, c); } @@ -332,13 +341,13 @@ impl Aligner { log::trace!( "[ALIGNER] Received ('{}': '{}')", sample.key_expr.as_str(), - sample.value + StringOrBase64::from(sample.payload.clone()) ); return_val.push(sample); } Err(err) => { log::error!( - "[ALIGNER] Received error for query on selector {} :{}", + "[ALIGNER] Received error for query on selector {} :{:?}", selector, err ); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs b/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs index b743a70451..78254213f7 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs @@ -26,6 +26,7 @@ use std::str; use std::str::FromStr; use std::time::{Duration, SystemTime}; use urlencoding::encode; +use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; use zenoh::time::Timestamp; use zenoh::Session; @@ -226,9 +227,9 @@ impl Replica { from, sample.kind, sample.key_expr.as_str(), - sample.value + StringOrBase64::from(sample.payload.clone()) ); - let digest: Digest = match serde_json::from_str(&format!("{}", sample.value)) { + let digest: Digest = match serde_json::from_str(&StringOrBase64::from(sample.payload)) { Ok(digest) => digest, Err(e) => { log::error!("[DIGEST_SUB] Error in decoding the digest: {}", e); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 115ed1e8d9..1ef7e65390 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -180,7 +180,7 @@ impl StorageService { // log error if the sample is not timestamped // This is to reduce down the line inconsistencies of having duplicate samples stored if sample.get_timestamp().is_none() { - log::error!("Sample {} is not timestamped. Please timestamp samples meant for replicated storage.", sample); + log::error!("Sample {:?} is not timestamped. Please timestamp samples meant for replicated storage.", sample); } else { self.process_sample(sample).await; @@ -262,7 +262,7 @@ impl StorageService { // The storage should only simply save the key, sample pair while put and retrieve the same during get // the trimming during PUT and GET should be handled by the plugin async fn process_sample(&self, sample: Sample) { - log::trace!("[STORAGE] Processing sample: {}", sample); + log::trace!("[STORAGE] Processing sample: {:?}", sample); // Call incoming data interceptor (if any) let sample = if let Some(ref interceptor) = self.in_interceptor { interceptor(sample) @@ -295,7 +295,7 @@ impl StorageService { && self.is_latest(&k, sample.get_timestamp().unwrap()).await)) { log::trace!( - "Sample `{}` identified as neded processing for key {}", + "Sample `{:?}` identified as neded processing for key {}", sample, k ); @@ -306,15 +306,19 @@ impl StorageService { .await { Some(overriding_update) => { - let mut sample_to_store = - Sample::new(KeyExpr::from(k.clone()), overriding_update.data.value) - .with_timestamp(overriding_update.data.timestamp); + let Value { + payload, encoding, .. + } = overriding_update.data.value; + let mut sample_to_store = Sample::new(KeyExpr::from(k.clone()), payload) + .with_encoding(encoding) + .with_timestamp(overriding_update.data.timestamp); sample_to_store.kind = overriding_update.kind; sample_to_store } None => { let mut sample_to_store = - Sample::new(KeyExpr::from(k.clone()), sample.value.clone()) + Sample::new(KeyExpr::from(k.clone()), sample.payload.clone()) + .with_encoding(sample.encoding.clone()) .with_timestamp(sample.timestamp.unwrap()); sample_to_store.kind = sample.kind; sample_to_store @@ -333,7 +337,8 @@ impl StorageService { storage .put( stripped_key, - sample_to_store.value.clone(), + Value::new(sample_to_store.payload.clone()) + .with_encoding(sample_to_store.encoding.clone()), sample_to_store.timestamp.unwrap(), ) .await @@ -397,7 +402,7 @@ impl StorageService { Update { kind: sample.kind, data: StoredData { - value: sample.value, + value: Value::new(sample.payload).with_encoding(sample.encoding), timestamp: sample.timestamp.unwrap(), }, }, @@ -515,7 +520,11 @@ impl StorageService { match storage.get(stripped_key, q.parameters()).await { Ok(stored_data) => { for entry in stored_data { - let sample = Sample::new(key.clone(), entry.value) + let Value { + payload, encoding, .. + } = entry.value; + let sample = Sample::new(key.clone(), payload) + .with_encoding(encoding) .with_timestamp(entry.timestamp); // apply outgoing interceptor on results let sample = if let Some(ref interceptor) = self.out_interceptor { @@ -549,7 +558,11 @@ impl StorageService { match storage.get(stripped_key, q.parameters()).await { Ok(stored_data) => { for entry in stored_data { - let sample = Sample::new(q.key_expr().clone(), entry.value) + let Value { + payload, encoding, .. + } = entry.value; + let sample = Sample::new(q.key_expr().clone(), payload) + .with_encoding(encoding) .with_timestamp(entry.timestamp); // apply outgoing interceptor on results let sample = if let Some(ref interceptor) = self.out_interceptor { @@ -667,7 +680,7 @@ impl StorageService { self.process_sample(sample).await; } Err(e) => log::warn!( - "Storage '{}' received an error to align query: {}", + "Storage '{}' received an error to align query: {:?}", self.name, e ), @@ -688,15 +701,15 @@ fn serialize_update(update: &Update) -> String { } fn construct_update(data: String) -> Update { - let result: (String, String, String, Vec<&[u8]>) = serde_json::from_str(&data).unwrap(); + let result: (String, String, String, Vec<&[u8]>) = serde_json::from_str(&data).unwrap(); // @TODO: remove the unwrap() let mut payload = ZBuf::default(); for slice in result.3 { payload.push_zslice(slice.to_vec().into()); } - let value = Value::new(payload).encoding(Encoding::from(result.2)); + let value = Value::new(payload).with_encoding(result.2); let data = StoredData { value, - timestamp: Timestamp::from_str(&result.1).unwrap(), + timestamp: Timestamp::from_str(&result.1).unwrap(), // @TODO: remove the unwrap() }; let kind = if result.0.eq(&(SampleKind::Put).to_string()) { SampleKind::Put diff --git a/plugins/zenoh-plugin-storage-manager/tests/operations.rs b/plugins/zenoh-plugin-storage-manager/tests/operations.rs index a4293f31f1..81029e2fa7 100644 --- a/plugins/zenoh-plugin-storage-manager/tests/operations.rs +++ b/plugins/zenoh-plugin-storage-manager/tests/operations.rs @@ -20,6 +20,7 @@ use std::str::FromStr; use std::thread::sleep; use async_std::task; +use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; use zenoh::query::Reply; use zenoh::{prelude::Config, time::Timestamp}; @@ -100,7 +101,7 @@ async fn test_updates_in_order() { // expects exactly one sample let data = get_data(&session, "operation/test/a").await; assert_eq!(data.len(), 1); - assert_eq!(format!("{}", data[0].value), "1"); + assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "1"); put_data( &session, @@ -116,7 +117,7 @@ async fn test_updates_in_order() { // expects exactly one sample let data = get_data(&session, "operation/test/b").await; assert_eq!(data.len(), 1); - assert_eq!(format!("{}", data[0].value), "2"); + assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "2"); delete_data( &session, @@ -135,7 +136,7 @@ async fn test_updates_in_order() { // expects exactly one sample let data = get_data(&session, "operation/test/b").await; assert_eq!(data.len(), 1); - assert_eq!(format!("{}", data[0].value), "2"); + assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "2"); assert_eq!(data[0].key_expr.as_str(), "operation/test/b"); drop(storage); diff --git a/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs b/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs index 60970b2247..4808ec246f 100644 --- a/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs +++ b/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs @@ -21,6 +21,7 @@ use std::thread::sleep; // use std::collections::HashMap; use async_std::task; +use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; use zenoh::query::Reply; use zenoh::{prelude::Config, time::Timestamp}; @@ -117,7 +118,7 @@ async fn test_wild_card_in_order() { let data = get_data(&session, "wild/test/*").await; assert_eq!(data.len(), 1); assert_eq!(data[0].key_expr.as_str(), "wild/test/a"); - assert_eq!(format!("{}", data[0].value), "2"); + assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "2"); put_data( &session, @@ -135,8 +136,8 @@ async fn test_wild_card_in_order() { assert_eq!(data.len(), 2); assert!(["wild/test/a", "wild/test/b"].contains(&data[0].key_expr.as_str())); assert!(["wild/test/a", "wild/test/b"].contains(&data[1].key_expr.as_str())); - assert!(["2", "3"].contains(&format!("{}", data[0].value).as_str())); - assert!(["2", "3"].contains(&format!("{}", data[1].value).as_str())); + assert!(["2", "3"].contains(&StringOrBase64::from(data[0].payload.clone()).as_str())); + assert!(["2", "3"].contains(&StringOrBase64::from(data[1].payload.clone()).as_str())); put_data( &session, @@ -154,8 +155,8 @@ async fn test_wild_card_in_order() { assert_eq!(data.len(), 2); assert!(["wild/test/a", "wild/test/b"].contains(&data[0].key_expr.as_str())); assert!(["wild/test/a", "wild/test/b"].contains(&data[1].key_expr.as_str())); - assert_eq!(format!("{}", data[0].value).as_str(), "4"); - assert_eq!(format!("{}", data[1].value).as_str(), "4"); + assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "4"); + assert_eq!(StringOrBase64::from(data[1].payload.clone()).as_str(), "4"); delete_data( &session, diff --git a/zenoh-ext/Cargo.toml b/zenoh-ext/Cargo.toml index 91b0283ddb..7ee6e7213c 100644 --- a/zenoh-ext/Cargo.toml +++ b/zenoh-ext/Cargo.toml @@ -37,7 +37,10 @@ env_logger = { workspace = true } flume = { workspace = true } futures = { workspace = true } log = { workspace = true } +phf = { workspace = true } serde = { workspace = true, features = ["default"] } +serde_cbor = { workspace = true } +serde_json = { workspace = true } zenoh = { workspace = true, features = ["unstable"], default-features = false } zenoh-core = { workspace = true } zenoh-macros = { workspace = true } diff --git a/zenoh-ext/examples/z_query_sub.rs b/zenoh-ext/examples/z_query_sub.rs index 73433ebf14..80efc0854f 100644 --- a/zenoh-ext/examples/z_query_sub.rs +++ b/zenoh-ext/examples/z_query_sub.rs @@ -17,9 +17,7 @@ use clap::Command; use futures::prelude::*; use futures::select; use std::time::Duration; -use zenoh::config::Config; -use zenoh::prelude::r#async::*; -use zenoh::query::ReplyKeyExpr; +use zenoh::{config::Config, prelude::r#async::*, query::ReplyKeyExpr}; use zenoh_ext::*; #[async_std::main] @@ -62,8 +60,8 @@ async fn main() { select!( sample = subscriber.recv_async() => { let sample = sample.unwrap(); - println!(">> [Subscriber] Received {} ('{}': '{}')", - sample.kind, sample.key_expr.as_str(), sample.value); + let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); + println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind, sample.key_expr.as_str(), payload); }, _ = stdin.read_exact(&mut input).fuse() => { diff --git a/zenoh-ext/src/group.rs b/zenoh-ext/src/group.rs index aece581fde..9078e61741 100644 --- a/zenoh-ext/src/group.rs +++ b/zenoh-ext/src/group.rs @@ -252,7 +252,7 @@ async fn net_event_handler(z: Arc, state: Arc) { .await .unwrap(); while let Ok(s) = sub.recv_async().await { - match bincode::deserialize::(&(s.value.payload.contiguous())) { + match bincode::deserialize::(&(s.payload.contiguous())) { Ok(evt) => match evt { GroupNetEvent::Join(je) => { log::debug!("Member join: {:?}", &je.member); @@ -342,7 +342,7 @@ async fn net_event_handler(z: Arc, state: Arc) { } } Err(e) => { - log::warn!("Error received: {}", e); + log::warn!("Error received: {:?}", e); } } } diff --git a/zenoh-ext/src/lib.rs b/zenoh-ext/src/lib.rs index 7440d80a53..7ac880fd8c 100644 --- a/zenoh-ext/src/lib.rs +++ b/zenoh-ext/src/lib.rs @@ -23,6 +23,9 @@ pub use querying_subscriber::{ pub use session_ext::SessionExt; pub use subscriber_ext::SubscriberBuilderExt; pub use subscriber_ext::SubscriberForward; +use zenoh::query::Reply; +use zenoh::{sample::Sample, Result as ZResult}; +use zenoh_core::zerror; /// The space of keys to use in a [`FetchingSubscriber`]. pub enum KeySpace { @@ -51,3 +54,13 @@ impl From for KeySpace { KeySpace::Liveliness } } + +pub trait ExtractSample { + fn extract(self) -> ZResult; +} + +impl ExtractSample for Reply { + fn extract(self) -> ZResult { + self.sample.map_err(|e| zerror!("{:?}", e).into()) + } +} diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 4a7c4f2ded..2c89ec82ae 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -26,6 +26,8 @@ use zenoh::Result as ZResult; use zenoh::SessionRef; use zenoh_core::{zlock, AsyncResolve, Resolvable, SyncResolve}; +use crate::ExtractSample; + /// The builder of [`FetchingSubscriber`], allowing to configure it. #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] pub struct QueryingSubscriberBuilder<'a, 'b, KeySpace, Handler> { @@ -350,8 +352,7 @@ pub struct FetchingSubscriberBuilder< Fetch: FnOnce(Box) -> ZResult<()>, TryIntoSample, > where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { pub(crate) session: SessionRef<'a>, pub(crate) key_expr: ZResult>, @@ -372,8 +373,7 @@ impl< TryIntoSample, > FetchingSubscriberBuilder<'a, 'b, KeySpace, Handler, Fetch, TryIntoSample> where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { fn with_static_keys( self, @@ -399,8 +399,7 @@ impl< TryIntoSample, > FetchingSubscriberBuilder<'a, 'b, KeySpace, DefaultHandler, Fetch, TryIntoSample> where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { /// Add callback to [`FetchingSubscriber`]. #[inline] @@ -496,8 +495,7 @@ impl< TryIntoSample, > FetchingSubscriberBuilder<'a, 'b, crate::UserSpace, Handler, Fetch, TryIntoSample> where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { /// Change the subscription reliability. #[inline] @@ -540,8 +538,7 @@ impl< where Handler: IntoCallbackReceiverPair<'static, Sample>, Handler::Receiver: Send, - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { type To = ZResult>; } @@ -556,8 +553,7 @@ where KeySpace: Into, Handler: IntoCallbackReceiverPair<'static, Sample> + Send, Handler::Receiver: Send, - TryIntoSample: TryInto + Send + Sync, - >::Error: Into, + TryIntoSample: ExtractSample + Send + Sync, { fn res_sync(self) -> ::To { FetchingSubscriber::new(self.with_static_keys()) @@ -575,8 +571,7 @@ where KeySpace: Into, Handler: IntoCallbackReceiverPair<'static, Sample> + Send, Handler::Receiver: Send, - TryIntoSample: TryInto + Send + Sync, - >::Error: Into, + TryIntoSample: ExtractSample + Send + Sync, { type Future = Ready; @@ -649,8 +644,7 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { where KeySpace: Into, Handler: IntoCallbackReceiverPair<'static, Sample, Receiver = Receiver> + Send, - TryIntoSample: TryInto + Send + Sync, - >::Error: Into, + TryIntoSample: ExtractSample + Send + Sync, { let state = Arc::new(Mutex::new(InnerState { pending_fetches: 0, @@ -769,8 +763,7 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { fetch: Fetch, ) -> impl Resolve> where - TryIntoSample: TryInto + Send + Sync, - >::Error: Into, + TryIntoSample: ExtractSample + Send + Sync, { FetchBuilder { fetch, @@ -846,8 +839,7 @@ pub struct FetchBuilder< Fetch: FnOnce(Box) -> ZResult<()>, TryIntoSample, > where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { fetch: Fetch, phantom: std::marker::PhantomData, @@ -858,8 +850,7 @@ pub struct FetchBuilder< impl) -> ZResult<()>, TryIntoSample> Resolvable for FetchBuilder where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { type To = ZResult<()>; } @@ -867,8 +858,7 @@ where impl) -> ZResult<()>, TryIntoSample> SyncResolve for FetchBuilder where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { fn res_sync(self) -> ::To { let handler = register_handler(self.state, self.callback); @@ -879,8 +869,7 @@ where impl) -> ZResult<()>, TryIntoSample> AsyncResolve for FetchBuilder where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { type Future = Ready; @@ -906,16 +895,15 @@ fn run_fetch< handler: RepliesHandler, ) -> ZResult<()> where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { log::debug!("Fetch data for FetchingSubscriber"); - (fetch)(Box::new(move |s: TryIntoSample| match s.try_into() { + (fetch)(Box::new(move |s: TryIntoSample| match s.extract() { Ok(s) => { let mut state = zlock!(handler.state); log::trace!("Fetched sample received: push it to merge_queue"); state.merge_queue.push(s); } - Err(e) => log::debug!("Received error fetching data: {}", e.into()), + Err(e) => log::debug!("Received error fetching data: {}", e), })) } diff --git a/zenoh-ext/src/subscriber_ext.rs b/zenoh-ext/src/subscriber_ext.rs index 83de47779c..89d3b5f691 100644 --- a/zenoh-ext/src/subscriber_ext.rs +++ b/zenoh-ext/src/subscriber_ext.rs @@ -13,7 +13,7 @@ // use flume::r#async::RecvStream; use futures::stream::{Forward, Map}; -use std::{convert::TryInto, time::Duration}; +use std::time::Duration; use zenoh::query::ReplyKeyExpr; use zenoh::sample::Locality; use zenoh::Result as ZResult; @@ -24,6 +24,7 @@ use zenoh::{ subscriber::{PushMode, Reliability, Subscriber, SubscriberBuilder}, }; +use crate::ExtractSample; use crate::{querying_subscriber::QueryingSubscriberBuilder, FetchingSubscriberBuilder}; /// Allows writing `subscriber.forward(receiver)` instead of `subscriber.stream().map(Ok).forward(publisher)` @@ -87,8 +88,7 @@ pub trait SubscriberBuilderExt<'a, 'b, Handler> { fetch: Fetch, ) -> FetchingSubscriberBuilder<'a, 'b, Self::KeySpace, Handler, Fetch, TryIntoSample> where - TryIntoSample: TryInto, - >::Error: Into; + TryIntoSample: ExtractSample; /// Create a [`FetchingSubscriber`](super::FetchingSubscriber) that will perform a query (`session.get()`) as it's /// initial fetch. @@ -169,8 +169,7 @@ impl<'a, 'b, Handler> SubscriberBuilderExt<'a, 'b, Handler> fetch: Fetch, ) -> FetchingSubscriberBuilder<'a, 'b, Self::KeySpace, Handler, Fetch, TryIntoSample> where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { FetchingSubscriberBuilder { session: self.session, @@ -283,8 +282,7 @@ impl<'a, 'b, Handler> SubscriberBuilderExt<'a, 'b, Handler> fetch: Fetch, ) -> FetchingSubscriberBuilder<'a, 'b, Self::KeySpace, Handler, Fetch, TryIntoSample> where - TryIntoSample: TryInto, - >::Error: Into, + TryIntoSample: ExtractSample, { FetchingSubscriberBuilder { session: self.session, diff --git a/zenoh/Cargo.toml b/zenoh/Cargo.toml index 11ecfad1bf..e6f7a4d9aa 100644 --- a/zenoh/Cargo.toml +++ b/zenoh/Cargo.toml @@ -79,10 +79,14 @@ log = { workspace = true } ordered-float = { workspace = true } paste = { workspace = true } petgraph = { workspace = true } +phf = { workspace = true } rand = { workspace = true, features = ["default"] } regex = { workspace = true } serde = { workspace = true, features = ["default"] } +serde_cbor = { workspace = true } serde_json = { workspace = true } +serde-pickle = { workspace = true } +serde_yaml = { workspace = true } socket2 = { workspace = true } stop-token = { workspace = true } uhlc = { workspace = true, features = ["default"] } diff --git a/zenoh/src/admin.rs b/zenoh/src/admin.rs index 8cdf638af5..5a242d51b7 100644 --- a/zenoh/src/admin.rs +++ b/zenoh/src/admin.rs @@ -12,11 +12,12 @@ // ZettaScale Zenoh Team, // use crate::{ + encoding::Encoding, keyexpr, prelude::sync::{KeyExpr, Locality, SampleKind}, queryable::Query, sample::DataInfo, - Sample, Session, ZResult, + Payload, Sample, Session, ZResult, }; use async_std::task; use std::{ @@ -25,10 +26,7 @@ use std::{ sync::Arc, }; use zenoh_core::SyncResolve; -use zenoh_protocol::{ - core::{Encoding, KnownEncoding, WireExpr}, - network::NetworkMessage, -}; +use zenoh_protocol::{core::WireExpr, network::NetworkMessage}; use zenoh_transport::{ TransportEventHandler, TransportMulticastEventHandler, TransportPeer, TransportPeerEventHandler, }; @@ -71,7 +69,12 @@ pub(crate) fn on_admin_query(session: &Session, query: Query) { let key_expr = *KE_PREFIX / own_zid / *KE_TRANSPORT_UNICAST / zid; if query.key_expr().intersects(&key_expr) { if let Ok(value) = serde_json::value::to_value(peer.clone()) { - let _ = query.reply(Ok(Sample::new(key_expr, value))).res_sync(); + match Payload::try_from(value) { + Ok(zbuf) => { + let _ = query.reply(Ok(Sample::new(key_expr, zbuf))).res_sync(); + } + Err(e) => log::debug!("Admin query error: {}", e), + } } } @@ -83,7 +86,12 @@ pub(crate) fn on_admin_query(session: &Session, query: Query) { *KE_PREFIX / own_zid / *KE_TRANSPORT_UNICAST / zid / *KE_LINK / lid; if query.key_expr().intersects(&key_expr) { if let Ok(value) = serde_json::value::to_value(link) { - let _ = query.reply(Ok(Sample::new(key_expr, value))).res_sync(); + match Payload::try_from(value) { + Ok(zbuf) => { + let _ = query.reply(Ok(Sample::new(key_expr, zbuf))).res_sync(); + } + Err(e) => log::debug!("Admin query error: {}", e), + } } } } @@ -145,7 +153,7 @@ impl TransportMulticastEventHandler for Handler { let expr = WireExpr::from(&(*KE_PREFIX / own_zid / *KE_TRANSPORT_UNICAST / zid)) .to_owned(); let info = DataInfo { - encoding: Some(Encoding::Exact(KnownEncoding::AppJson)), + encoding: Some(Encoding::APPLICATION_JSON), ..Default::default() }; self.session.handle_data( @@ -191,7 +199,7 @@ impl TransportPeerEventHandler for PeerHandler { let mut s = DefaultHasher::new(); link.hash(&mut s); let info = DataInfo { - encoding: Some(Encoding::Exact(KnownEncoding::AppJson)), + encoding: Some(Encoding::APPLICATION_JSON), ..Default::default() }; self.session.handle_data( diff --git a/zenoh/src/encoding.rs b/zenoh/src/encoding.rs new file mode 100644 index 0000000000..d9fa725ed5 --- /dev/null +++ b/zenoh/src/encoding.rs @@ -0,0 +1,850 @@ +// +// Copyright (c) 2023 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// +use crate::payload::Payload; +use phf::phf_map; +use std::{borrow::Cow, convert::Infallible, fmt, str::FromStr}; +use zenoh_buffers::{ZBuf, ZSlice}; +use zenoh_protocol::core::EncodingId; +#[cfg(feature = "shared-memory")] +use ::{std::sync::Arc, zenoh_shm::SharedMemoryBuf}; + +/// Default encoding values used by Zenoh. +/// +/// An encoding has a similar role to Content-type in HTTP: it indicates, when present, how data should be interpreted by the application. +/// +/// Please note the Zenoh protocol does not impose any encoding value nor it operates on it. +/// It can be seen as some optional metadata that is carried over by Zenoh in such a way the application may perform different operations depending on the encoding value. +/// +/// A set of associated constants are provided to cover the most common encodings for user convenience. +/// This is parcticular useful in helping Zenoh to perform additional network optimizations. +/// +/// # Examples +/// +/// ### String operations +/// +/// Create an [`Encoding`] from a string and viceversa. +/// ``` +/// use zenoh::prelude::Encoding; +/// +/// let encoding: Encoding = "text/plain".into(); +/// let text: String = encoding.clone().into(); +/// assert_eq!("text/plain", &text); +/// ``` +/// +/// ### Constants and cow operations +/// +/// Since some encoding values are internally optimized by Zenoh, it's generally more efficient to use +/// the defined constants and [`Cow`][std::borrow::Cow] conversion to obtain its string representation. +/// ``` +/// use zenoh::prelude::Encoding; +/// use std::borrow::Cow; +/// +/// // This allocates +/// assert_eq!("text/plain", &String::from(Encoding::TEXT_PLAIN)); +/// // This does NOT allocate +/// assert_eq!("text/plain", &Cow::from(Encoding::TEXT_PLAIN)); +/// ``` +/// +/// ### Schema +/// +/// Additionally, a schema can be associated to the encoding. +/// The convetions is to use the `;` separator if an encoding is created from a string. +/// Alternatively, [`with_schema()`](Encoding::with_schema) can be used to add a schme to one of the associated constants. +/// ``` +/// use zenoh::prelude::Encoding; +/// +/// let encoding1 = Encoding::from("text/plain;utf-8"); +/// let encoding2 = Encoding::TEXT_PLAIN.with_schema("utf-8"); +/// assert_eq!(encoding1, encoding2); +/// assert_eq!("text/plain;utf-8", &encoding1.to_string()); +/// assert_eq!("text/plain;utf-8", &encoding2.to_string()); +/// ``` +#[repr(transparent)] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Encoding(zenoh_protocol::core::Encoding); + +impl Encoding { + const SCHEMA_SEP: char = ';'; + + // For compatibility purposes Zenoh reserves any prefix value from `0` to `1023` included. + + // - Primitives types supported in all Zenoh bindings + /// Just some bytes. + /// + /// Constant alias for string: `"zenoh/bytes"`. + pub const ZENOH_BYTES: Encoding = Self(zenoh_protocol::core::Encoding { + id: 0, + schema: None, + }); + /// A VLE-encoded signed little-endian integer. Either 8bit, 16bit, 32bit, or 64bit. Binary reprensentation uses two's complement. + /// + /// Constant alias for string: `"zenoh/int"`. + pub const ZENOH_INT: Encoding = Self(zenoh_protocol::core::Encoding { + id: 1, + schema: None, + }); + /// A VLE-encoded little-endian unsigned integer. Either 8bit, 16bit, 32bit, or 64bit. + /// + /// Constant alias for string: `"zenoh/uint"`. + pub const ZENOH_UINT: Encoding = Self(zenoh_protocol::core::Encoding { + id: 2, + schema: None, + }); + /// A VLE-encoded float. Either little-endian 32bit or 64bit. Binary representation uses *IEEE 754-2008* *binary32* or *binary64*, respectively. + /// + /// Constant alias for string: `"zenoh/float"`. + pub const ZENOH_FLOAT: Encoding = Self(zenoh_protocol::core::Encoding { + id: 3, + schema: None, + }); + /// A boolean. `0` is `false`, `1` is `true`. Other values are invalid. + /// + /// Constant alias for string: `"zenoh/bool"`. + pub const ZENOH_BOOL: Encoding = Self(zenoh_protocol::core::Encoding { + id: 4, + schema: None, + }); + /// A UTF-8 string. + /// + /// Constant alias for string: `"zenoh/string"`. + pub const ZENOH_STRING: Encoding = Self(zenoh_protocol::core::Encoding { + id: 5, + schema: None, + }); + /// A zenoh error. + /// + /// Constant alias for string: `"zenoh/error"`. + pub const ZENOH_ERROR: Encoding = Self(zenoh_protocol::core::Encoding { + id: 6, + schema: None, + }); + + // - Advanced types may be supported in some of the Zenoh bindings. + /// An application-specific stream of bytes. + /// + /// Constant alias for string: `"application/octet-stream"`. + pub const APPLICATION_OCTET_STREAM: Encoding = Self(zenoh_protocol::core::Encoding { + id: 7, + schema: None, + }); + /// A textual file. + /// + /// Constant alias for string: `"text/plain"`. + pub const TEXT_PLAIN: Encoding = Self(zenoh_protocol::core::Encoding { + id: 8, + schema: None, + }); + /// JSON data intended to be consumed by an application. + /// + /// Constant alias for string: `"application/json"`. + pub const APPLICATION_JSON: Encoding = Self(zenoh_protocol::core::Encoding { + id: 9, + schema: None, + }); + /// JSON data intended to be human readable. + /// + /// Constant alias for string: `"text/json"`. + pub const TEXT_JSON: Encoding = Self(zenoh_protocol::core::Encoding { + id: 10, + schema: None, + }); + /// A Common Data Representation (CDR)-encoded data. + /// + /// Constant alias for string: `"application/cdr"`. + pub const APPLICATION_CDR: Encoding = Self(zenoh_protocol::core::Encoding { + id: 11, + schema: None, + }); + /// A Concise Binary Object Representation (CBOR)-encoded data. + /// + /// Constant alias for string: `"application/cbor"`. + pub const APPLICATION_CBOR: Encoding = Self(zenoh_protocol::core::Encoding { + id: 12, + schema: None, + }); + /// YAML data intended to be consumed by an application. + /// + /// Constant alias for string: `"application/yaml"`. + pub const APPLICATION_YAML: Encoding = Self(zenoh_protocol::core::Encoding { + id: 13, + schema: None, + }); + /// YAML data intended to be human readable. + /// + /// Constant alias for string: `"text/yaml"`. + pub const TEXT_YAML: Encoding = Self(zenoh_protocol::core::Encoding { + id: 14, + schema: None, + }); + /// JSON5 encoded data that are human readable. + /// + /// Constant alias for string: `"text/json5"`. + pub const TEXT_JSON5: Encoding = Self(zenoh_protocol::core::Encoding { + id: 15, + schema: None, + }); + /// A Python object serialized using [pickle](https://docs.python.org/3/library/pickle.html). + /// + /// Constant alias for string: `"application/python-serialized-object"`. + pub const APPLICATION_PYTHON_SERIALIZED_OBJECT: Encoding = + Self(zenoh_protocol::core::Encoding { + id: 16, + schema: None, + }); + /// An application-specific protobuf-encoded data. + /// + /// Constant alias for string: `"application/protobuf"`. + pub const APPLICATION_PROTOBUF: Encoding = Self(zenoh_protocol::core::Encoding { + id: 17, + schema: None, + }); + /// A Java serialized object. + /// + /// Constant alias for string: `"application/java-serialized-object"`. + pub const APPLICATION_JAVA_SERIALIZED_OBJECT: Encoding = Self(zenoh_protocol::core::Encoding { + id: 18, + schema: None, + }); + /// An [openmetrics](https://github.com/OpenObservability/OpenMetrics) data, common used by [Prometheus](https://prometheus.io/). + /// + /// Constant alias for string: `"application/openmetrics-text"`. + pub const APPLICATION_OPENMETRICS_TEXT: Encoding = Self(zenoh_protocol::core::Encoding { + id: 19, + schema: None, + }); + /// A Portable Network Graphics (PNG) image. + /// + /// Constant alias for string: `"image/png"`. + pub const IMAGE_PNG: Encoding = Self(zenoh_protocol::core::Encoding { + id: 20, + schema: None, + }); + /// A Joint Photographic Experts Group (JPEG) image. + /// + /// Constant alias for string: `"image/jpeg"`. + pub const IMAGE_JPEG: Encoding = Self(zenoh_protocol::core::Encoding { + id: 21, + schema: None, + }); + /// A Graphics Interchange Format (GIF) image. + /// + /// Constant alias for string: `"image/gif"`. + pub const IMAGE_GIF: Encoding = Self(zenoh_protocol::core::Encoding { + id: 22, + schema: None, + }); + /// A BitMap (BMP) image. + /// + /// Constant alias for string: `"image/bmp"`. + pub const IMAGE_BMP: Encoding = Self(zenoh_protocol::core::Encoding { + id: 23, + schema: None, + }); + /// A Web Protable (WebP) image. + /// + /// Constant alias for string: `"image/webp"`. + pub const IMAGE_WEBP: Encoding = Self(zenoh_protocol::core::Encoding { + id: 24, + schema: None, + }); + /// An XML file intended to be consumed by an application.. + /// + /// Constant alias for string: `"application/xml"`. + pub const APPLICATION_XML: Encoding = Self(zenoh_protocol::core::Encoding { + id: 25, + schema: None, + }); + /// An encoded a list of tuples, each consisting of a name and a value. + /// + /// Constant alias for string: `"application/x-www-form-urlencoded"`. + pub const APPLICATION_X_WWW_FORM_URLENCODED: Encoding = Self(zenoh_protocol::core::Encoding { + id: 26, + schema: None, + }); + /// An HTML file. + /// + /// Constant alias for string: `"text/html"`. + pub const TEXT_HTML: Encoding = Self(zenoh_protocol::core::Encoding { + id: 27, + schema: None, + }); + /// An XML file that is human readable. + /// + /// Constant alias for string: `"text/xml"`. + pub const TEXT_XML: Encoding = Self(zenoh_protocol::core::Encoding { + id: 28, + schema: None, + }); + /// A CSS file. + /// + /// Constant alias for string: `"text/css"`. + pub const TEXT_CSS: Encoding = Self(zenoh_protocol::core::Encoding { + id: 29, + schema: None, + }); + /// A JavaScript file. + /// + /// Constant alias for string: `"text/javascript"`. + pub const TEXT_JAVASCRIPT: Encoding = Self(zenoh_protocol::core::Encoding { + id: 30, + schema: None, + }); + /// A MarkDown file. + /// + /// Constant alias for string: `"text/markdown"`. + pub const TEXT_MARKDOWN: Encoding = Self(zenoh_protocol::core::Encoding { + id: 31, + schema: None, + }); + /// A CSV file. + /// + /// Constant alias for string: `"text/csv"`. + pub const TEXT_CSV: Encoding = Self(zenoh_protocol::core::Encoding { + id: 32, + schema: None, + }); + /// An application-specific SQL query. + /// + /// Constant alias for string: `"application/sql"`. + pub const APPLICATION_SQL: Encoding = Self(zenoh_protocol::core::Encoding { + id: 33, + schema: None, + }); + /// Constrained Application Protocol (CoAP) data intended for CoAP-to-HTTP and HTTP-to-CoAP proxies. + /// + /// Constant alias for string: `"application/coap-payload"`. + pub const APPLICATION_COAP_PAYLOAD: Encoding = Self(zenoh_protocol::core::Encoding { + id: 34, + schema: None, + }); + /// Defines a JSON document structure for expressing a sequence of operations to apply to a JSON document. + /// + /// Constant alias for string: `"application/json-patch+json"`. + pub const APPLICATION_JSON_PATCH_JSON: Encoding = Self(zenoh_protocol::core::Encoding { + id: 35, + schema: None, + }); + /// A JSON text sequence consists of any number of JSON texts, all encoded in UTF-8. + /// + /// Constant alias for string: `"application/json-seq"`. + pub const APPLICATION_JSON_SEQ: Encoding = Self(zenoh_protocol::core::Encoding { + id: 36, + schema: None, + }); + /// A JSONPath defines a string syntax for selecting and extracting JSON values from within a given JSON value. + /// + /// Constant alias for string: `"application/jsonpath"`. + pub const APPLICATION_JSONPATH: Encoding = Self(zenoh_protocol::core::Encoding { + id: 37, + schema: None, + }); + /// A JSON Web Token (JWT). + /// + /// Constant alias for string: `"application/jwt"`. + pub const APPLICATION_JWT: Encoding = Self(zenoh_protocol::core::Encoding { + id: 38, + schema: None, + }); + /// An application-specific MPEG-4 encoded data, either audio or video. + /// + /// Constant alias for string: `"application/mp4"`. + pub const APPLICATION_MP4: Encoding = Self(zenoh_protocol::core::Encoding { + id: 39, + schema: None, + }); + /// A SOAP 1.2 message serialized as XML 1.0. + /// + /// Constant alias for string: `"application/soap+xml"`. + pub const APPLICATION_SOAP_XML: Encoding = Self(zenoh_protocol::core::Encoding { + id: 40, + schema: None, + }); + /// A YANG-encoded data commonly used by the Network Configuration Protocol (NETCONF). + /// + /// Constant alias for string: `"application/yang"`. + pub const APPLICATION_YANG: Encoding = Self(zenoh_protocol::core::Encoding { + id: 41, + schema: None, + }); + /// A MPEG-4 Advanced Audio Coding (AAC) media. + /// + /// Constant alias for string: `"audio/aac"`. + pub const AUDIO_AAC: Encoding = Self(zenoh_protocol::core::Encoding { + id: 42, + schema: None, + }); + /// A Free Lossless Audio Codec (FLAC) media. + /// + /// Constant alias for string: `"audio/flac"`. + pub const AUDIO_FLAC: Encoding = Self(zenoh_protocol::core::Encoding { + id: 43, + schema: None, + }); + /// An audio codec defined in MPEG-1, MPEG-2, MPEG-4, or registered at the MP4 registration authority. + /// + /// Constant alias for string: `"audio/mp4"`. + pub const AUDIO_MP4: Encoding = Self(zenoh_protocol::core::Encoding { + id: 44, + schema: None, + }); + /// An Ogg-encapsulated audio stream. + /// + /// Constant alias for string: `"audio/ogg"`. + pub const AUDIO_OGG: Encoding = Self(zenoh_protocol::core::Encoding { + id: 45, + schema: None, + }); + /// A Vorbis-encoded audio stream. + /// + /// Constant alias for string: `"audio/vorbis"`. + pub const AUDIO_VORBIS: Encoding = Self(zenoh_protocol::core::Encoding { + id: 46, + schema: None, + }); + /// A h261-encoded video stream. + /// + /// Constant alias for string: `"video/h261"`. + pub const VIDEO_H261: Encoding = Self(zenoh_protocol::core::Encoding { + id: 47, + schema: None, + }); + /// A h263-encoded video stream. + /// + /// Constant alias for string: `"video/h263"`. + pub const VIDEO_H263: Encoding = Self(zenoh_protocol::core::Encoding { + id: 48, + schema: None, + }); + /// A h264-encoded video stream. + /// + /// Constant alias for string: `"video/h264"`. + pub const VIDEO_H264: Encoding = Self(zenoh_protocol::core::Encoding { + id: 49, + schema: None, + }); + /// A h265-encoded video stream. + /// + /// Constant alias for string: `"video/h265"`. + pub const VIDEO_H265: Encoding = Self(zenoh_protocol::core::Encoding { + id: 50, + schema: None, + }); + /// A h266-encoded video stream. + /// + /// Constant alias for string: `"video/h266"`. + pub const VIDEO_H266: Encoding = Self(zenoh_protocol::core::Encoding { + id: 51, + schema: None, + }); + /// A video codec defined in MPEG-1, MPEG-2, MPEG-4, or registered at the MP4 registration authority. + /// + /// Constant alias for string: `"video/mp4"`. + pub const VIDEO_MP4: Encoding = Self(zenoh_protocol::core::Encoding { + id: 52, + schema: None, + }); + /// An Ogg-encapsulated video stream. + /// + /// Constant alias for string: `"video/ogg"`. + pub const VIDEO_OGG: Encoding = Self(zenoh_protocol::core::Encoding { + id: 53, + schema: None, + }); + /// An uncompressed, studio-quality video stream. + /// + /// Constant alias for string: `"video/raw"`. + pub const VIDEO_RAW: Encoding = Self(zenoh_protocol::core::Encoding { + id: 54, + schema: None, + }); + /// A VP8-encoded video stream. + /// + /// Constant alias for string: `"video/vp8"`. + pub const VIDEO_VP8: Encoding = Self(zenoh_protocol::core::Encoding { + id: 55, + schema: None, + }); + /// A VP9-encoded video stream. + /// + /// Constant alias for string: `"video/vp9"`. + pub const VIDEO_VP9: Encoding = Self(zenoh_protocol::core::Encoding { + id: 56, + schema: None, + }); + + const ID_TO_STR: phf::Map = phf_map! { + 0u16 => "zenoh/bytes", + 1u16 => "zenoh/int", + 2u16 => "zenoh/uint", + 3u16 => "zenoh/float", + 4u16 => "zenoh/bool", + 5u16 => "zenoh/string", + 6u16 => "zenoh/error", + 7u16 => "application/octet-stream", + 8u16 => "text/plain", + 9u16 => "application/json", + 10u16 => "text/json", + 11u16 => "application/cdr", + 12u16 => "application/cbor", + 13u16 => "application/yaml", + 14u16 => "text/yaml", + 15u16 => "text/json5", + 16u16 => "application/python-serialized-object", + 17u16 => "application/protobuf", + 18u16 => "application/java-serialized-object", + 19u16 => "application/openmetrics-text", + 20u16 => "image/png", + 21u16 => "image/jpeg", + 22u16 => "image/gif", + 23u16 => "image/bmp", + 24u16 => "image/webp", + 25u16 => "application/xml", + 26u16 => "application/x-www-form-urlencoded", + 27u16 => "text/html", + 28u16 => "text/xml", + 29u16 => "text/css", + 30u16 => "text/javascript", + 31u16 => "text/markdown", + 32u16 => "text/csv", + 33u16 => "application/sql", + 34u16 => "application/coap-payload", + 35u16 => "application/json-patch+json", + 36u16 => "application/json-seq", + 37u16 => "application/jsonpath", + 38u16 => "application/jwt", + 39u16 => "application/mp4", + 40u16 => "application/soap+xml", + 41u16 => "application/yang", + 42u16 => "audio/aac", + 43u16 => "audio/flac", + 44u16 => "audio/mp4", + 45u16 => "audio/ogg", + 46u16 => "audio/vorbis", + 47u16 => "video/h261", + 48u16 => "video/h263", + 49u16 => "video/h264", + 50u16 => "video/h265", + 51u16 => "video/h266", + 52u16 => "video/mp4", + 53u16 => "video/ogg", + 54u16 => "video/raw", + 55u16 => "video/vp8", + 56u16 => "video/vp9", + }; + + const STR_TO_ID: phf::Map<&'static str, EncodingId> = phf_map! { + "zenoh/bytes" => 0u16, + "zenoh/int" => 1u16, + "zenoh/uint" => 2u16, + "zenoh/float" => 3u16, + "zenoh/bool" => 4u16, + "zenoh/string" => 5u16, + "zenoh/error" => 6u16, + "application/octet-stream" => 7u16, + "text/plain" => 8u16, + "application/json" => 9u16, + "text/json" => 10u16, + "application/cdr" => 11u16, + "application/cbor" => 12u16, + "application/yaml" => 13u16, + "text/yaml" => 14u16, + "text/json5" => 15u16, + "application/python-serialized-object" => 16u16, + "application/protobuf" => 17u16, + "application/java-serialized-object" => 18u16, + "application/openmetrics-text" => 19u16, + "image/png" => 20u16, + "image/jpeg" => 21u16, + "image/gif" => 22u16, + "image/bmp" => 23u16, + "image/webp" => 24u16, + "application/xml" => 25u16, + "application/x-www-form-urlencoded" => 26u16, + "text/html" => 27u16, + "text/xml" => 28u16, + "text/css" => 29u16, + "text/javascript" => 30u16, + "text/markdown" => 31u16, + "text/csv" => 32u16, + "application/sql" => 33u16, + "application/coap-payload" => 34u16, + "application/json-patch+json" => 35u16, + "application/json-seq" => 36u16, + "application/jsonpath" => 37u16, + "application/jwt" => 38u16, + "application/mp4" => 39u16, + "application/soap+xml" => 40u16, + "application/yang" => 41u16, + "audio/aac" => 42u16, + "audio/flac" => 43u16, + "audio/mp4" => 44u16, + "audio/ogg" => 45u16, + "audio/vorbis" => 46u16, + "video/h261" => 47u16, + "video/h263" => 48u16, + "video/h264" => 49u16, + "video/h265" => 50u16, + "video/h266" => 51u16, + "video/mp4" => 52u16, + "video/ogg" => 53u16, + "video/raw" => 54u16, + "video/vp8" => 55u16, + "video/vp9" => 56u16, + }; + + /// The default [`Encoding`] is [`ZENOH_BYTES`](Encoding::ZENOH_BYTES). + pub const fn default() -> Self { + Self::ZENOH_BYTES + } + + /// Set a schema to this encoding. Zenoh does not define what a schema is and its semantichs is left to the implementer. + /// E.g. a common schema for `text/plain` encoding is `utf-8`. + pub fn with_schema(mut self, s: S) -> Self + where + S: Into, + { + let s: String = s.into(); + self.0.schema = Some(s.into_boxed_str().into_boxed_bytes().into()); + self + } +} + +impl Default for Encoding { + fn default() -> Self { + Self::default() + } +} + +impl From<&str> for Encoding { + fn from(t: &str) -> Self { + let mut inner = zenoh_protocol::core::Encoding::empty(); + + // Check if empty + if t.is_empty() { + return Encoding(inner); + } + + // Everything before `;` may be mapped to a known id + let (id, schema) = t.split_once(Encoding::SCHEMA_SEP).unwrap_or((t, "")); + if let Some(id) = Encoding::STR_TO_ID.get(id).copied() { + inner.id = id; + }; + if !schema.is_empty() { + inner.schema = Some(ZSlice::from(schema.to_string().into_bytes())); + } + + Encoding(inner) + } +} + +impl From for Encoding { + fn from(value: String) -> Self { + Self::from(value.as_str()) + } +} + +impl FromStr for Encoding { + type Err = Infallible; + + fn from_str(s: &str) -> Result { + Ok(Self::from(s)) + } +} + +impl From<&Encoding> for Cow<'static, str> { + fn from(encoding: &Encoding) -> Self { + fn su8_to_str(schema: &[u8]) -> &str { + std::str::from_utf8(schema).unwrap_or("unknown(non-utf8)") + } + + match ( + Encoding::ID_TO_STR.get(&encoding.0.id).copied(), + encoding.0.schema.as_ref(), + ) { + // Perfect match + (Some(i), None) => Cow::Borrowed(i), + // ID and schema + (Some(i), Some(s)) => { + Cow::Owned(format!("{}{}{}", i, Encoding::SCHEMA_SEP, su8_to_str(s))) + } + // + (None, Some(s)) => Cow::Owned(format!( + "unknown({}){}{}", + encoding.0.id, + Encoding::SCHEMA_SEP, + su8_to_str(s) + )), + (None, None) => Cow::Owned(format!("unknown({})", encoding.0.id)), + } + } +} + +impl From for Cow<'static, str> { + fn from(encoding: Encoding) -> Self { + Self::from(&encoding) + } +} + +impl From for String { + fn from(encoding: Encoding) -> Self { + encoding.to_string() + } +} + +impl From for zenoh_protocol::core::Encoding { + fn from(value: Encoding) -> Self { + value.0 + } +} + +impl From for Encoding { + fn from(value: zenoh_protocol::core::Encoding) -> Self { + Self(value) + } +} + +impl fmt::Display for Encoding { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result { + let s = Cow::from(self); + f.write_str(s.as_ref()) + } +} + +// - Encoding trait +pub trait EncodingMapping { + const ENCODING: Encoding; +} + +// Bytes +impl EncodingMapping for Payload { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} + +impl EncodingMapping for ZBuf { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} + +impl EncodingMapping for Vec { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} + +impl EncodingMapping for &[u8] { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} + +impl EncodingMapping for Cow<'_, [u8]> { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} + +// String +impl EncodingMapping for String { + const ENCODING: Encoding = Encoding::ZENOH_STRING; +} + +impl EncodingMapping for &str { + const ENCODING: Encoding = Encoding::ZENOH_STRING; +} + +impl EncodingMapping for Cow<'_, str> { + const ENCODING: Encoding = Encoding::ZENOH_STRING; +} + +// Zenoh unsigned integers +impl EncodingMapping for u8 { + const ENCODING: Encoding = Encoding::ZENOH_UINT; +} + +impl EncodingMapping for u16 { + const ENCODING: Encoding = Encoding::ZENOH_UINT; +} + +impl EncodingMapping for u32 { + const ENCODING: Encoding = Encoding::ZENOH_UINT; +} + +impl EncodingMapping for u64 { + const ENCODING: Encoding = Encoding::ZENOH_UINT; +} + +impl EncodingMapping for usize { + const ENCODING: Encoding = Encoding::ZENOH_UINT; +} + +// Zenoh signed integers +impl EncodingMapping for i8 { + const ENCODING: Encoding = Encoding::ZENOH_INT; +} + +impl EncodingMapping for i16 { + const ENCODING: Encoding = Encoding::ZENOH_INT; +} + +impl EncodingMapping for i32 { + const ENCODING: Encoding = Encoding::ZENOH_INT; +} + +impl EncodingMapping for i64 { + const ENCODING: Encoding = Encoding::ZENOH_INT; +} + +impl EncodingMapping for isize { + const ENCODING: Encoding = Encoding::ZENOH_INT; +} + +// Zenoh floats +impl EncodingMapping for f32 { + const ENCODING: Encoding = Encoding::ZENOH_FLOAT; +} + +impl EncodingMapping for f64 { + const ENCODING: Encoding = Encoding::ZENOH_FLOAT; +} + +// Zenoh bool +impl EncodingMapping for bool { + const ENCODING: Encoding = Encoding::ZENOH_BOOL; +} + +// - Zenoh advanced types encoders/decoders +impl EncodingMapping for serde_json::Value { + const ENCODING: Encoding = Encoding::APPLICATION_JSON; +} + +impl EncodingMapping for serde_yaml::Value { + const ENCODING: Encoding = Encoding::APPLICATION_YAML; +} + +impl EncodingMapping for serde_cbor::Value { + const ENCODING: Encoding = Encoding::APPLICATION_CBOR; +} + +impl EncodingMapping for serde_pickle::Value { + const ENCODING: Encoding = Encoding::APPLICATION_PYTHON_SERIALIZED_OBJECT; +} + +// - Zenoh SHM +#[cfg(feature = "shared-memory")] +impl EncodingMapping for Arc { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} + +#[cfg(feature = "shared-memory")] +impl EncodingMapping for Box { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} + +#[cfg(feature = "shared-memory")] +impl EncodingMapping for SharedMemoryBuf { + const ENCODING: Encoding = Encoding::ZENOH_BYTES; +} diff --git a/zenoh/src/lib.rs b/zenoh/src/lib.rs index 0a8f1feb64..bae81d3a54 100644 --- a/zenoh/src/lib.rs +++ b/zenoh/src/lib.rs @@ -53,7 +53,7 @@ //! let session = zenoh::open(config::default()).res().await.unwrap(); //! let subscriber = session.declare_subscriber("key/expression").res().await.unwrap(); //! while let Ok(sample) = subscriber.recv_async().await { -//! println!("Received: {}", sample); +//! println!("Received: {:?}", sample); //! }; //! } //! ``` @@ -79,9 +79,11 @@ extern crate zenoh_core; #[macro_use] extern crate zenoh_result; +pub(crate) type Id = usize; + use git_version::git_version; use handlers::DefaultHandler; -#[zenoh_macros::unstable] +#[cfg(feature = "unstable")] use net::runtime::Runtime; use prelude::*; use scouting::ScoutBuilder; @@ -132,10 +134,12 @@ pub use net::runtime; pub mod selector; #[deprecated = "This module is now a separate crate. Use the crate directly for shorter compile-times"] pub use zenoh_config as config; +pub(crate) mod encoding; pub mod handlers; pub mod info; #[cfg(feature = "unstable")] pub mod liveliness; +pub mod payload; pub mod plugins; pub mod prelude; pub mod publication; @@ -168,23 +172,6 @@ pub mod time { } } -/// A map of key/value (String,String) properties. -pub mod properties { - use super::prelude::Value; - pub use zenoh_collections::Properties; - - /// Convert a set of [`Properties`] into a [`Value`]. - /// For instance, Properties: `[("k1", "v1"), ("k2, v2")]` - /// is converted into Json: `{ "k1": "v1", "k2": "v2" }` - pub fn properties_to_json_value(props: &Properties) -> Value { - let json_map = props - .iter() - .map(|(k, v)| (k.clone(), serde_json::Value::String(v.clone()))) - .collect::>(); - serde_json::Value::Object(json_map).into() - } -} - /// Scouting primitives. pub mod scouting; diff --git a/zenoh/src/liveliness.rs b/zenoh/src/liveliness.rs index 26a803fa43..9f14866363 100644 --- a/zenoh/src/liveliness.rs +++ b/zenoh/src/liveliness.rs @@ -15,8 +15,7 @@ //! Liveliness primitives. //! //! see [`Liveliness`] - -use crate::query::Reply; +use crate::{query::Reply, Id}; #[zenoh_macros::unstable] use { @@ -426,7 +425,7 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") - /// .callback(|sample| { println!("Received: {} {}", sample.key_expr, sample.value); }) + /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) /// .res() /// .await /// .unwrap(); @@ -500,7 +499,7 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { /// .await /// .unwrap(); /// while let Ok(sample) = subscriber.recv_async().await { - /// println!("Received: {} {}", sample.key_expr, sample.value); + /// println!("Received: {} {:?}", sample.key_expr, sample.payload); /// } /// # }) /// ``` @@ -508,7 +507,7 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { #[zenoh_macros::unstable] pub fn with(self, handler: Handler) -> LivelinessSubscriberBuilder<'a, 'b, Handler> where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, Sample>, + Handler: crate::handlers::IntoCallbackReceiverPair<'static, Sample>, { let LivelinessSubscriberBuilder { session, @@ -594,8 +593,8 @@ where /// .unwrap(); /// while let Ok(token) = tokens.recv_async().await { /// match token.sample { -/// Ok(sample) => println!("Alive token ('{}')", sample.key_expr.as_str(),), -/// Err(err) => println!("Received (ERROR: '{}')", String::try_from(&err).unwrap()), +/// Ok(sample) => println!("Alive token ('{}')", sample.key_expr.as_str()), +/// Err(err) => println!("Received (ERROR: '{:?}')", err.payload), /// } /// } /// # }) diff --git a/zenoh/src/net/routing/dispatcher/queries.rs b/zenoh/src/net/routing/dispatcher/queries.rs index e8e84395f8..b0f7f7f7ef 100644 --- a/zenoh/src/net/routing/dispatcher/queries.rs +++ b/zenoh/src/net/routing/dispatcher/queries.rs @@ -559,7 +559,7 @@ pub fn route_query( payload: ReplyBody::Put(Put { // @TODO: handle Del case timestamp: None, // @TODO: handle timestamp - encoding: Encoding::DEFAULT, // @TODO: handle encoding + encoding: Encoding::empty(), // @TODO: handle encoding ext_sinfo: None, // @TODO: handle source info ext_attachment: None, // @TODO: expose it in the API #[cfg(feature = "shared-memory")] diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index cd7cf448cd..03b447aae0 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -12,8 +12,10 @@ // ZettaScale Zenoh Team, use super::routing::dispatcher::face::Face; use super::Runtime; +use crate::encoding::Encoding; use crate::key_expr::KeyExpr; use crate::net::primitives::Primitives; +use crate::payload::Payload; use crate::plugins::sealed::{self as plugins}; use crate::prelude::sync::{Sample, SyncResolve}; use crate::queryable::Query; @@ -30,9 +32,11 @@ use std::sync::Mutex; use zenoh_buffers::buffer::SplitBuffer; use zenoh_config::{ConfigValidator, ValidatedMap, WhatAmI}; use zenoh_plugin_trait::{PluginControl, PluginStatus}; -use zenoh_protocol::core::key_expr::keyexpr; use zenoh_protocol::{ - core::{key_expr::OwnedKeyExpr, ExprId, KnownEncoding, WireExpr, ZenohId, EMPTY_EXPR_ID}, + core::{ + key_expr::{keyexpr, OwnedKeyExpr}, + ExprId, WireExpr, ZenohId, EMPTY_EXPR_ID, + }, network::{ declare::{queryable::ext::QueryableInfo, subscriber::ext::SubscriberInfo}, ext, Declare, DeclareBody, DeclareQueryable, DeclareSubscriber, Push, Request, Response, @@ -420,7 +424,7 @@ impl Primitives for AdminSpace { parameters, value: query .ext_body - .map(|b| Value::from(b.payload).encoding(b.encoding)), + .map(|b| Value::from(b.payload).with_encoding(b.encoding)), qid: msg.id, zid, primitives, @@ -561,13 +565,18 @@ fn router_data(context: &AdminContext, query: Query) { } log::trace!("AdminSpace router_data: {:?}", json); + let payload = match Payload::try_from(json) { + Ok(p) => p, + Err(e) => { + log::error!("Error serializing AdminSpace reply: {:?}", e); + return; + } + }; if let Err(e) = query - .reply(Ok(Sample::new( - reply_key, - Value::from(json.to_string().as_bytes().to_vec()) - .encoding(KnownEncoding::AppJson.into()), - ))) - .res() + .reply(Ok( + Sample::new(reply_key, payload).with_encoding(Encoding::APPLICATION_JSON) + )) + .res_sync() { log::error!("Error sending AdminSpace reply: {:?}", e); } @@ -596,13 +605,7 @@ zenoh_build{{version="{}"}} 1 .openmetrics_text(), ); - if let Err(e) = query - .reply(Ok(Sample::new( - reply_key, - Value::from(metrics.as_bytes().to_vec()).encoding(KnownEncoding::TextPlain.into()), - ))) - .res() - { + if let Err(e) = query.reply(Ok(Sample::new(reply_key, metrics))).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -617,14 +620,7 @@ fn routers_linkstate_data(context: &AdminContext, query: Query) { if let Err(e) = query .reply(Ok(Sample::new( reply_key, - Value::from( - tables - .hat_code - .info(&tables, WhatAmI::Router) - .as_bytes() - .to_vec(), - ) - .encoding(KnownEncoding::TextPlain.into()), + tables.hat_code.info(&tables, WhatAmI::Router), ))) .res() { @@ -642,14 +638,7 @@ fn peers_linkstate_data(context: &AdminContext, query: Query) { if let Err(e) = query .reply(Ok(Sample::new( reply_key, - Value::from( - tables - .hat_code - .info(&tables, WhatAmI::Peer) - .as_bytes() - .to_vec(), - ) - .encoding(KnownEncoding::TextPlain.into()), + tables.hat_code.info(&tables, WhatAmI::Peer), ))) .res() { @@ -667,7 +656,7 @@ fn subscribers_data(context: &AdminContext, query: Query) { )) .unwrap(); if query.key_expr().intersects(&key) { - if let Err(e) = query.reply(Ok(Sample::new(key, Value::empty()))).res() { + if let Err(e) = query.reply(Ok(Sample::new(key, Payload::empty()))).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -684,7 +673,7 @@ fn queryables_data(context: &AdminContext, query: Query) { )) .unwrap(); if query.key_expr().intersects(&key) { - if let Err(e) = query.reply(Ok(Sample::new(key, Value::empty()))).res() { + if let Err(e) = query.reply(Ok(Sample::new(key, Payload::empty()))).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -702,8 +691,13 @@ fn plugins_data(context: &AdminContext, query: Query) { log::debug!("plugin status: {:?}", status); let key = root_key.join(status.name()).unwrap(); let status = serde_json::to_value(status).unwrap(); - if let Err(e) = query.reply(Ok(Sample::new(key, Value::from(status)))).res() { - log::error!("Error sending AdminSpace reply: {:?}", e); + match Payload::try_from(status) { + Ok(zbuf) => { + if let Err(e) = query.reply(Ok(Sample::new(key, zbuf))).res_sync() { + log::error!("Error sending AdminSpace reply: {:?}", e); + } + } + Err(e) => log::debug!("Admin query error: {}", e), } } } @@ -720,12 +714,7 @@ fn plugins_status(context: &AdminContext, query: Query) { with_extended_string(plugin_key, &["/__path__"], |plugin_path_key| { if let Ok(key_expr) = KeyExpr::try_from(plugin_path_key.clone()) { if query.key_expr().intersects(&key_expr) { - if let Err(e) = query - .reply(Ok(Sample::new( - key_expr, - Value::from(plugin.path()).encoding(KnownEncoding::AppJson.into()), - ))) - .res() + if let Err(e) = query.reply(Ok(Sample::new(key_expr, plugin.path()))).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } @@ -748,13 +737,13 @@ fn plugins_status(context: &AdminContext, query: Query) { Ok(Ok(responses)) => { for response in responses { if let Ok(key_expr) = KeyExpr::try_from(response.key) { - if let Err(e) = query.reply(Ok(Sample::new( - key_expr, - Value::from(response.value).encoding(KnownEncoding::AppJson.into()), - ))) - .res() - { - log::error!("Error sending AdminSpace reply: {:?}", e); + match Payload::try_from(response.value) { + Ok(zbuf) => { + if let Err(e) = query.reply(Ok(Sample::new(key_expr, zbuf))).res_sync() { + log::error!("Error sending AdminSpace reply: {:?}", e); + } + }, + Err(e) => log::debug!("Admin query error: {}", e), } } else { log::error!("Error: plugin {} replied with an invalid key", plugin_key); diff --git a/zenoh/src/net/tests/tables.rs b/zenoh/src/net/tests/tables.rs index fdf0b6fe65..80a9dd458a 100644 --- a/zenoh/src/net/tests/tables.rs +++ b/zenoh/src/net/tests/tables.rs @@ -628,7 +628,7 @@ fn client_test() { ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -661,7 +661,7 @@ fn client_test() { ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -694,7 +694,7 @@ fn client_test() { ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -727,7 +727,7 @@ fn client_test() { ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -760,7 +760,7 @@ fn client_test() { ext::QoSType::DEFAULT, PushBody::Put(Put { timestamp: None, - encoding: Encoding::DEFAULT, + encoding: Encoding::empty(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs new file mode 100644 index 0000000000..f499db50da --- /dev/null +++ b/zenoh/src/payload.rs @@ -0,0 +1,673 @@ +// +// Copyright (c) 2023 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// + +//! Payload primitives. +use crate::buffers::ZBuf; +use std::{ + borrow::Cow, + convert::Infallible, + fmt::Debug, + ops::{Deref, DerefMut}, + string::FromUtf8Error, + sync::Arc, +}; +use zenoh_buffers::{buffer::SplitBuffer, reader::HasReader, writer::HasWriter, ZSlice}; +use zenoh_result::ZResult; +#[cfg(feature = "shared-memory")] +use zenoh_shm::SharedMemoryBuf; + +#[repr(transparent)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct Payload(ZBuf); + +impl Payload { + /// Create an empty payload. + pub const fn empty() -> Self { + Self(ZBuf::empty()) + } + + /// Create a [`Payload`] from any type `T` that can implements [`Into`]. + pub fn new(t: T) -> Self + where + T: Into, + { + Self(t.into()) + } +} + +impl Deref for Payload { + type Target = ZBuf; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Payload { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +/// Provide some facilities specific to the Rust API to encode/decode a [`Value`] with an `Serialize`. +impl Payload { + /// Encode an object of type `T` as a [`Value`] using the [`ZSerde`]. + /// + /// ```rust + /// use zenoh::payload::Payload; + /// + /// let start = String::from("abc"); + /// let payload = Payload::serialize(start.clone()); + /// let end: String = payload.deserialize().unwrap(); + /// assert_eq!(start, end); + /// ``` + pub fn serialize(t: T) -> Self + where + ZSerde: Serialize, + { + ZSerde.serialize(t) + } + + /// Decode an object of type `T` from a [`Value`] using the [`ZSerde`]. + /// See [encode](Value::encode) for an example. + pub fn deserialize(&self) -> ZResult + where + ZSerde: Deserialize, + >::Error: Debug, + { + let t: T = ZSerde.deserialize(self).map_err(|e| zerror!("{:?}", e))?; + Ok(t) + } +} + +/// Trait to encode a type `T` into a [`Value`]. +pub trait Serialize { + type Output; + + /// The implementer should take care of serializing the type `T` and set the proper [`Encoding`]. + fn serialize(self, t: T) -> Self::Output; +} + +pub trait Deserialize { + type Error; + + /// The implementer should take care of deserializing the type `T` based on the [`Encoding`] information. + fn deserialize(self, t: &Payload) -> Result; +} + +/// The default serializer for Zenoh payload. It supports primitives types, such as: vec, int, uint, float, string, bool. +/// It also supports common Rust serde values. +#[derive(Clone, Copy, Debug)] +pub struct ZSerde; + +#[derive(Debug, Clone, Copy)] +pub struct ZDeserializeError; + +// Bytes +impl Serialize for ZSerde { + type Output = Payload; + + fn serialize(self, t: ZBuf) -> Self::Output { + Payload::new(t) + } +} + +impl From for ZBuf { + fn from(value: Payload) -> Self { + value.0 + } +} + +impl Deserialize for ZSerde { + type Error = Infallible; + + fn deserialize(self, v: &Payload) -> Result { + Ok(v.into()) + } +} + +impl From<&Payload> for ZBuf { + fn from(value: &Payload) -> Self { + value.0.clone() + } +} + +impl Serialize> for ZSerde { + type Output = Payload; + + fn serialize(self, t: Vec) -> Self::Output { + Payload::new(t) + } +} + +impl Serialize<&[u8]> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &[u8]) -> Self::Output { + Payload::new(t.to_vec()) + } +} + +impl Deserialize> for ZSerde { + type Error = Infallible; + + fn deserialize(self, v: &Payload) -> Result, Self::Error> { + let v: ZBuf = v.into(); + Ok(v.contiguous().to_vec()) + } +} + +impl From<&Payload> for Vec { + fn from(value: &Payload) -> Self { + value.contiguous().to_vec() + } +} + +impl<'a> Serialize> for ZSerde { + type Output = Payload; + + fn serialize(self, t: Cow<'a, [u8]>) -> Self::Output { + Payload::new(t.to_vec()) + } +} + +impl<'a> Deserialize> for ZSerde { + type Error = Infallible; + + fn deserialize(self, v: &Payload) -> Result, Self::Error> { + let v: Vec = Self.deserialize(v)?; + Ok(Cow::Owned(v)) + } +} + +impl<'a> From<&'a Payload> for Cow<'a, [u8]> { + fn from(value: &'a Payload) -> Self { + value.contiguous() + } +} + +// String +impl Serialize for ZSerde { + type Output = Payload; + + fn serialize(self, s: String) -> Self::Output { + Payload::new(s.into_bytes()) + } +} + +impl Serialize<&str> for ZSerde { + type Output = Payload; + + fn serialize(self, s: &str) -> Self::Output { + Self.serialize(s.to_string()) + } +} + +impl Deserialize for ZSerde { + type Error = FromUtf8Error; + + fn deserialize(self, v: &Payload) -> Result { + String::from_utf8(v.contiguous().to_vec()) + } +} + +impl TryFrom<&Payload> for String { + type Error = FromUtf8Error; + + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) + } +} + +impl TryFrom for String { + type Error = FromUtf8Error; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} + +impl<'a> Serialize> for ZSerde { + type Output = Payload; + + fn serialize(self, s: Cow<'a, str>) -> Self::Output { + Self.serialize(s.to_string()) + } +} + +impl<'a> Deserialize> for ZSerde { + type Error = FromUtf8Error; + + fn deserialize(self, v: &Payload) -> Result, Self::Error> { + let v: String = Self.deserialize(v)?; + Ok(Cow::Owned(v)) + } +} + +impl TryFrom<&Payload> for Cow<'_, str> { + type Error = FromUtf8Error; + + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) + } +} + +// - Integers impl +macro_rules! impl_int { + ($t:ty, $encoding:expr) => { + impl Serialize<$t> for ZSerde { + type Output = Payload; + + fn serialize(self, t: $t) -> Self::Output { + let bs = t.to_le_bytes(); + let end = 1 + bs.iter().rposition(|b| *b != 0).unwrap_or(bs.len() - 1); + // SAFETY: + // - 0 is a valid start index because bs is guaranteed to always have a length greater or equal than 1 + // - end is a valid end index because is bounded between 0 and bs.len() + Payload::new(unsafe { ZSlice::new_unchecked(Arc::new(bs), 0, end) }) + } + } + + impl Serialize<&$t> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &$t) -> Self::Output { + Self.serialize(*t) + } + } + + impl Serialize<&mut $t> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &mut $t) -> Self::Output { + Self.serialize(*t) + } + } + + impl Deserialize<$t> for ZSerde { + type Error = ZDeserializeError; + + fn deserialize(self, v: &Payload) -> Result<$t, Self::Error> { + let p = v.contiguous(); + let mut bs = (0 as $t).to_le_bytes(); + if p.len() > bs.len() { + return Err(ZDeserializeError); + } + bs[..p.len()].copy_from_slice(&p); + let t = <$t>::from_le_bytes(bs); + Ok(t) + } + } + + impl TryFrom<&Payload> for $t { + type Error = ZDeserializeError; + + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) + } + } + }; +} + +// Zenoh unsigned integers +impl_int!(u8, ZSerde::ZENOH_UINT); +impl_int!(u16, ZSerde::ZENOH_UINT); +impl_int!(u32, ZSerde::ZENOH_UINT); +impl_int!(u64, ZSerde::ZENOH_UINT); +impl_int!(usize, ZSerde::ZENOH_UINT); + +// Zenoh signed integers +impl_int!(i8, ZSerde::ZENOH_INT); +impl_int!(i16, ZSerde::ZENOH_INT); +impl_int!(i32, ZSerde::ZENOH_INT); +impl_int!(i64, ZSerde::ZENOH_INT); +impl_int!(isize, ZSerde::ZENOH_INT); + +// Zenoh floats +impl_int!(f32, ZSerde::ZENOH_FLOAT); +impl_int!(f64, ZSerde::ZENOH_FLOAT); + +// Zenoh bool +impl Serialize for ZSerde { + type Output = ZBuf; + + fn serialize(self, t: bool) -> Self::Output { + // SAFETY: casting a bool into an integer is well-defined behaviour. + // 0 is false, 1 is true: https://doc.rust-lang.org/std/primitive.bool.html + ZBuf::from((t as u8).to_le_bytes()) + } +} + +impl Deserialize for ZSerde { + type Error = ZDeserializeError; + + fn deserialize(self, v: &Payload) -> Result { + let p = v.contiguous(); + if p.len() != 1 { + return Err(ZDeserializeError); + } + match p[0] { + 0 => Ok(false), + 1 => Ok(true), + _ => Err(ZDeserializeError), + } + } +} + +impl TryFrom<&Payload> for bool { + type Error = ZDeserializeError; + + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) + } +} + +// - Zenoh advanced types encoders/decoders +// JSON +impl Serialize<&serde_json::Value> for ZSerde { + type Output = Result; + + fn serialize(self, t: &serde_json::Value) -> Self::Output { + let mut payload = Payload::empty(); + serde_json::to_writer(payload.writer(), t)?; + Ok(payload) + } +} + +impl Serialize for ZSerde { + type Output = Result; + + fn serialize(self, t: serde_json::Value) -> Self::Output { + Self.serialize(&t) + } +} + +impl Deserialize for ZSerde { + type Error = serde_json::Error; + + fn deserialize(self, v: &Payload) -> Result { + serde_json::from_reader(v.reader()) + } +} + +impl TryFrom for Payload { + type Error = serde_json::Error; + + fn try_from(value: serde_json::Value) -> Result { + ZSerde.serialize(value) + } +} + +// Yaml +impl Serialize<&serde_yaml::Value> for ZSerde { + type Output = Result; + + fn serialize(self, t: &serde_yaml::Value) -> Self::Output { + let mut payload = Payload::empty(); + serde_yaml::to_writer(payload.writer(), t)?; + Ok(payload) + } +} + +impl Serialize for ZSerde { + type Output = Result; + + fn serialize(self, t: serde_yaml::Value) -> Self::Output { + Self.serialize(&t) + } +} + +impl Deserialize for ZSerde { + type Error = serde_yaml::Error; + + fn deserialize(self, v: &Payload) -> Result { + serde_yaml::from_reader(v.reader()) + } +} + +impl TryFrom for Payload { + type Error = serde_yaml::Error; + + fn try_from(value: serde_yaml::Value) -> Result { + ZSerde.serialize(value) + } +} + +// CBOR +impl Serialize<&serde_cbor::Value> for ZSerde { + type Output = Result; + + fn serialize(self, t: &serde_cbor::Value) -> Self::Output { + let mut payload = Payload::empty(); + serde_cbor::to_writer(payload.writer(), t)?; + Ok(payload) + } +} + +impl Serialize for ZSerde { + type Output = Result; + + fn serialize(self, t: serde_cbor::Value) -> Self::Output { + Self.serialize(&t) + } +} + +impl Deserialize for ZSerde { + type Error = serde_cbor::Error; + + fn deserialize(self, v: &Payload) -> Result { + serde_cbor::from_reader(v.reader()) + } +} + +impl TryFrom for Payload { + type Error = serde_cbor::Error; + + fn try_from(value: serde_cbor::Value) -> Result { + ZSerde.serialize(value) + } +} + +// Pickle +impl Serialize<&serde_pickle::Value> for ZSerde { + type Output = Result; + + fn serialize(self, t: &serde_pickle::Value) -> Self::Output { + let mut payload = Payload::empty(); + serde_pickle::value_to_writer( + &mut payload.writer(), + t, + serde_pickle::SerOptions::default(), + )?; + Ok(payload) + } +} + +impl Serialize for ZSerde { + type Output = Result; + + fn serialize(self, t: serde_pickle::Value) -> Self::Output { + Self.serialize(&t) + } +} + +impl Deserialize for ZSerde { + type Error = serde_pickle::Error; + + fn deserialize(self, v: &Payload) -> Result { + serde_pickle::value_from_reader(v.reader(), serde_pickle::DeOptions::default()) + } +} + +impl TryFrom for Payload { + type Error = serde_pickle::Error; + + fn try_from(value: serde_pickle::Value) -> Result { + ZSerde.serialize(value) + } +} + +// Shared memory conversion +#[cfg(feature = "shared-memory")] +impl Serialize> for ZSerde { + type Output = Payload; + + fn serialize(self, t: Arc) -> Self::Output { + Payload::new(t) + } +} + +#[cfg(feature = "shared-memory")] +impl Serialize> for ZSerde { + type Output = Payload; + + fn serialize(self, t: Box) -> Self::Output { + let smb: Arc = t.into(); + Self.serialize(smb) + } +} + +#[cfg(feature = "shared-memory")] +impl Serialize for ZSerde { + type Output = Payload; + + fn serialize(self, t: SharedMemoryBuf) -> Self::Output { + Payload::new(t) + } +} + +impl From for Payload +where + ZSerde: Serialize, +{ + fn from(t: T) -> Self { + ZSerde.serialize(t) + } +} + +// For convenience to always convert a Value the examples +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum StringOrBase64 { + String(String), + Base64(String), +} + +impl Deref for StringOrBase64 { + type Target = String; + + fn deref(&self) -> &Self::Target { + match self { + Self::String(s) | Self::Base64(s) => s, + } + } +} + +impl std::fmt::Display for StringOrBase64 { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self) + } +} + +impl From for StringOrBase64 { + fn from(v: Payload) -> Self { + use base64::{engine::general_purpose::STANDARD as b64_std_engine, Engine}; + match v.deserialize::() { + Ok(s) => StringOrBase64::String(s), + Err(_) => StringOrBase64::Base64(b64_std_engine.encode(v.contiguous())), + } + } +} + +mod tests { + #[test] + fn serializer() { + use super::Payload; + use rand::Rng; + use zenoh_buffers::ZBuf; + + const NUM: usize = 1_000; + + macro_rules! serialize_deserialize { + ($t:ty, $in:expr) => { + let i = $in; + let t = i.clone(); + let v = Payload::serialize(t); + let o: $t = v.deserialize().unwrap(); + assert_eq!(i, o) + }; + } + + let mut rng = rand::thread_rng(); + + serialize_deserialize!(u8, u8::MIN); + serialize_deserialize!(u16, u16::MIN); + serialize_deserialize!(u32, u32::MIN); + serialize_deserialize!(u64, u64::MIN); + serialize_deserialize!(usize, usize::MIN); + + serialize_deserialize!(u8, u8::MAX); + serialize_deserialize!(u16, u16::MAX); + serialize_deserialize!(u32, u32::MAX); + serialize_deserialize!(u64, u64::MAX); + serialize_deserialize!(usize, usize::MAX); + + for _ in 0..NUM { + serialize_deserialize!(u8, rng.gen::()); + serialize_deserialize!(u16, rng.gen::()); + serialize_deserialize!(u32, rng.gen::()); + serialize_deserialize!(u64, rng.gen::()); + serialize_deserialize!(usize, rng.gen::()); + } + + serialize_deserialize!(i8, i8::MIN); + serialize_deserialize!(i16, i16::MIN); + serialize_deserialize!(i32, i32::MIN); + serialize_deserialize!(i64, i64::MIN); + serialize_deserialize!(isize, isize::MIN); + + serialize_deserialize!(i8, i8::MAX); + serialize_deserialize!(i16, i16::MAX); + serialize_deserialize!(i32, i32::MAX); + serialize_deserialize!(i64, i64::MAX); + serialize_deserialize!(isize, isize::MAX); + + for _ in 0..NUM { + serialize_deserialize!(i8, rng.gen::()); + serialize_deserialize!(i16, rng.gen::()); + serialize_deserialize!(i32, rng.gen::()); + serialize_deserialize!(i64, rng.gen::()); + serialize_deserialize!(isize, rng.gen::()); + } + + serialize_deserialize!(f32, f32::MIN); + serialize_deserialize!(f64, f64::MIN); + + serialize_deserialize!(f32, f32::MAX); + serialize_deserialize!(f64, f64::MAX); + + for _ in 0..NUM { + serialize_deserialize!(f32, rng.gen::()); + serialize_deserialize!(f64, rng.gen::()); + } + + serialize_deserialize!(String, ""); + serialize_deserialize!(String, String::from("abcdefghijklmnopqrstuvwxyz")); + + serialize_deserialize!(Vec, vec![0u8; 0]); + serialize_deserialize!(Vec, vec![0u8; 64]); + + serialize_deserialize!(ZBuf, ZBuf::from(vec![0u8; 0])); + serialize_deserialize!(ZBuf, ZBuf::from(vec![0u8; 64])); + } +} diff --git a/zenoh/src/prelude.rs b/zenoh/src/prelude.rs index ad28470f63..59a4bbd96e 100644 --- a/zenoh/src/prelude.rs +++ b/zenoh/src/prelude.rs @@ -31,21 +31,20 @@ pub(crate) mod common { writer::HasWriter, }; pub use zenoh_core::Resolve; - - pub(crate) type Id = usize; + pub use zenoh_protocol::core::{EndPoint, Locator, ZenohId}; pub use crate::config::{self, Config, ValidatedMap}; pub use crate::handlers::IntoCallbackReceiverPair; - pub use crate::selector::{Parameter, Parameters, Selector}; pub use crate::session::{Session, SessionDeclarations}; - pub use crate::query::{QueryConsolidation, QueryTarget}; + pub use crate::query::{ConsolidationMode, QueryConsolidation, QueryTarget}; + pub use crate::selector::{Parameter, Parameters, Selector}; - pub use crate::value::Value; + pub use crate::encoding::Encoding; /// The encoding of a zenoh `Value`. - pub use zenoh_protocol::core::{Encoding, KnownEncoding}; + pub use crate::payload::{Deserialize, Payload, Serialize}; + pub use crate::value::Value; - pub use crate::query::ConsolidationMode; #[zenoh_macros::unstable] pub use crate::sample::Locality; #[cfg(not(feature = "unstable"))] @@ -56,13 +55,6 @@ pub(crate) mod common { #[zenoh_macros::unstable] pub use crate::publication::PublisherDeclarations; pub use zenoh_protocol::core::{CongestionControl, Reliability, WhatAmI}; - - /// A [`Locator`] contains a choice of protocol, an address and port, as well as optional additional properties to work with. - pub use zenoh_protocol::core::EndPoint; - /// A [`Locator`] contains a choice of protocol, an address and port, as well as optional additional properties to work with. - pub use zenoh_protocol::core::Locator; - /// The global unique id of a zenoh peer. - pub use zenoh_protocol::core::ZenohId; } /// Prelude to import when using Zenoh's sync API. diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 4d45c3919d..9fb4bdf6c3 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -13,21 +13,24 @@ // //! Publishing primitives. -#[zenoh_macros::unstable] -use crate::handlers::Callback; -#[zenoh_macros::unstable] -use crate::handlers::DefaultHandler; +use crate::encoding::Encoding; +use crate::key_expr::KeyExpr; use crate::net::primitives::Primitives; -use crate::prelude::*; +use crate::payload::Payload; #[zenoh_macros::unstable] use crate::sample::Attachment; -use crate::sample::DataInfo; -use crate::sample::QoS; -use crate::Encoding; +use crate::sample::{DataInfo, QoS, Sample, SampleKind}; +use crate::Locality; use crate::SessionRef; use crate::Undeclarable; +#[cfg(feature = "unstable")] +use crate::{ + handlers::{Callback, DefaultHandler, IntoCallbackReceiverPair}, + Id, +}; use std::future::Ready; use zenoh_core::{zread, AsyncResolve, Resolvable, Resolve, SyncResolve}; +use zenoh_keyexpr::keyexpr; use zenoh_protocol::network::push::ext; use zenoh_protocol::network::Mapping; use zenoh_protocol::network::Push; @@ -67,8 +70,8 @@ pub type DeleteBuilder<'a, 'b> = PutBuilder<'a, 'b>; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session -/// .put("key/expression", "value") -/// .encoding(KnownEncoding::TextPlain) +/// .put("key/expression", "payload") +/// .with_encoding(Encoding::TEXT_PLAIN) /// .congestion_control(CongestionControl::Block) /// .res() /// .await @@ -79,22 +82,14 @@ pub type DeleteBuilder<'a, 'b> = PutBuilder<'a, 'b>; #[derive(Debug, Clone)] pub struct PutBuilder<'a, 'b> { pub(crate) publisher: PublisherBuilder<'a, 'b>, - pub(crate) value: Value, + pub(crate) payload: Payload, pub(crate) kind: SampleKind, + pub(crate) encoding: Encoding, #[cfg(feature = "unstable")] pub(crate) attachment: Option, } impl PutBuilder<'_, '_> { - /// Change the encoding of the written data. - #[inline] - pub fn encoding(mut self, encoding: IntoEncoding) -> Self - where - IntoEncoding: Into, - { - self.value.encoding = encoding.into(); - self - } /// Change the `congestion_control` to apply when routing the data. #[inline] pub fn congestion_control(mut self, congestion_control: CongestionControl) -> Self { @@ -118,12 +113,18 @@ impl PutBuilder<'_, '_> { self } - pub fn kind(mut self, kind: SampleKind) -> Self { - self.kind = kind; + /// Set the [`Encoding`] of the written data. + #[inline] + pub fn with_encoding(mut self, encoding: IntoEncoding) -> Self + where + IntoEncoding: Into, + { + self.encoding = encoding.into(); self } #[zenoh_macros::unstable] + /// Attach user-provided data to the written data. pub fn with_attachment(mut self, attachment: Attachment) -> Self { self.attachment = Some(attachment); self @@ -155,8 +156,9 @@ impl SyncResolve for PutBuilder<'_, '_> { resolve_put( &publisher, - self.value, + self.payload, self.kind, + self.encoding, #[cfg(feature = "unstable")] self.attachment, ) @@ -308,11 +310,12 @@ impl<'a> Publisher<'a> { std::sync::Arc::new(self) } - fn _write(&self, kind: SampleKind, value: Value) -> Publication { + fn _write(&self, kind: SampleKind, payload: Payload) -> Publication { Publication { publisher: self, - value, + payload, kind, + encoding: Encoding::ZENOH_BYTES, #[cfg(feature = "unstable")] attachment: None, } @@ -327,12 +330,12 @@ impl<'a> Publisher<'a> { /// /// let session = zenoh::open(config::peer()).res().await.unwrap().into_arc(); /// let publisher = session.declare_publisher("key/expression").res().await.unwrap(); - /// publisher.write(SampleKind::Put, "value").res().await.unwrap(); + /// publisher.write(SampleKind::Put, "payload").res().await.unwrap(); /// # }) /// ``` - pub fn write(&self, kind: SampleKind, value: IntoValue) -> Publication + pub fn write(&self, kind: SampleKind, value: IntoPayload) -> Publication where - IntoValue: Into, + IntoPayload: Into, { self._write(kind, value.into()) } @@ -350,11 +353,11 @@ impl<'a> Publisher<'a> { /// # }) /// ``` #[inline] - pub fn put(&self, value: IntoValue) -> Publication + pub fn put(&self, payload: IntoPayload) -> Publication where - IntoValue: Into, + IntoPayload: Into, { - self._write(SampleKind::Put, value.into()) + self._write(SampleKind::Put, payload.into()) } /// Delete data. @@ -370,7 +373,7 @@ impl<'a> Publisher<'a> { /// # }) /// ``` pub fn delete(&self) -> Publication { - self._write(SampleKind::Delete, Value::empty()) + self._write(SampleKind::Delete, Payload::empty()) } /// Return the [`MatchingStatus`] of the publisher. @@ -597,13 +600,19 @@ impl Drop for Publisher<'_> { #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] pub struct Publication<'a> { publisher: &'a Publisher<'a>, - value: Value, + payload: Payload, kind: SampleKind, + encoding: Encoding, #[cfg(feature = "unstable")] pub(crate) attachment: Option, } impl<'a> Publication<'a> { + pub fn with_encoding(mut self, encoding: Encoding) -> Self { + self.encoding = encoding; + self + } + #[zenoh_macros::unstable] pub fn with_attachment(mut self, attachment: Attachment) -> Self { self.attachment = Some(attachment); @@ -619,8 +628,9 @@ impl SyncResolve for Publication<'_> { fn res_sync(self) -> ::To { resolve_put( self.publisher, - self.value, + self.payload, self.kind, + self.encoding, #[cfg(feature = "unstable")] self.attachment, ) @@ -635,10 +645,7 @@ impl AsyncResolve for Publication<'_> { } } -impl<'a, IntoValue> Sink for Publisher<'a> -where - IntoValue: Into, -{ +impl<'a> Sink for Publisher<'a> { type Error = Error; #[inline] @@ -647,8 +654,16 @@ where } #[inline] - fn start_send(self: Pin<&mut Self>, item: IntoValue) -> Result<(), Self::Error> { - self.put(item.into()).res_sync() + fn start_send(self: Pin<&mut Self>, item: Sample) -> Result<(), Self::Error> { + Publication { + publisher: &self, + payload: item.payload, + kind: item.kind, + encoding: item.encoding, + #[cfg(feature = "unstable")] + attachment: item.attachment, + } + .res_sync() } #[inline] @@ -791,8 +806,9 @@ impl<'a, 'b> AsyncResolve for PublisherBuilder<'a, 'b> { fn resolve_put( publisher: &Publisher<'_>, - value: Value, + payload: Payload, kind: SampleKind, + encoding: Encoding, #[cfg(feature = "unstable")] attachment: Option, ) -> ZResult<()> { log::trace!("write({:?}, [...])", &publisher.key_expr); @@ -825,13 +841,13 @@ fn resolve_put( } PushBody::Put(Put { timestamp, - encoding: value.encoding.clone(), + encoding: encoding.clone().into(), ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, ext_attachment, ext_unknown: vec![], - payload: value.payload.clone(), + payload: payload.clone().into(), }) } SampleKind::Delete => { @@ -856,7 +872,7 @@ fn resolve_put( if publisher.destination != Locality::Remote { let data_info = DataInfo { kind, - encoding: Some(value.encoding), + encoding: Some(encoding), timestamp, source_id: None, source_sn: None, @@ -871,7 +887,7 @@ fn resolve_put( true, &publisher.key_expr.to_wire(&publisher.session), Some(data_info), - value.payload, + payload.into(), #[cfg(feature = "unstable")] attachment, ); @@ -1366,7 +1382,7 @@ mod tests { let sample = sub.recv().unwrap(); assert_eq!(sample.kind, kind); - assert_eq!(sample.value.to_string(), VALUE); + assert_eq!(sample.payload.deserialize::().unwrap(), VALUE); } sample_kind_integrity_in_publication_with(SampleKind::Put); @@ -1392,7 +1408,7 @@ mod tests { assert_eq!(sample.kind, kind); if let SampleKind::Put = kind { - assert_eq!(sample.value.to_string(), VALUE); + assert_eq!(sample.payload.deserialize::().unwrap(), VALUE); } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index b278bcfa26..6bd78d4fc7 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -17,14 +17,12 @@ use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; -#[zenoh_macros::unstable] -use crate::query::ReplyKeyExpr; -#[zenoh_macros::unstable] -use crate::sample::Attachment; use crate::sample::DataInfo; +use crate::Id; use crate::SessionRef; use crate::Undeclarable; - +#[cfg(feature = "unstable")] +use crate::{query::ReplyKeyExpr, sample::Attachment}; use std::fmt; use std::future::Ready; use std::ops::Deref; @@ -190,8 +188,9 @@ impl SyncResolve for ReplyBuilder<'_> { } let Sample { key_expr, - value: Value { payload, encoding }, + payload, kind, + encoding, timestamp, qos, #[cfg(feature = "unstable")] @@ -251,13 +250,13 @@ impl SyncResolve for ReplyBuilder<'_> { payload: match kind { SampleKind::Put => ReplyBody::Put(Put { timestamp: data_info.timestamp, - encoding: data_info.encoding.unwrap_or_default(), + encoding: data_info.encoding.unwrap_or_default().into(), ext_sinfo, #[cfg(feature = "shared-memory")] ext_shm: None, ext_attachment: ext_attachment!(), ext_unknown: vec![], - payload, + payload: payload.into(), }), SampleKind::Delete => ReplyBody::Del(Del { timestamp, @@ -292,8 +291,8 @@ impl SyncResolve for ReplyBuilder<'_> { ext_body: Some(ValueType { #[cfg(feature = "shared-memory")] ext_shm: None, - payload: payload.payload, - encoding: payload.encoding, + payload: payload.payload.into(), + encoding: payload.encoding.into(), }), code: 0, // TODO }), diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 89b787fef5..543dd62e84 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -13,19 +13,19 @@ // //! Sample primitives -use crate::buffers::ZBuf; -use crate::prelude::{KeyExpr, Value, ZenohId}; -use crate::query::Reply; +use crate::encoding::Encoding; +use crate::payload::Payload; +use crate::prelude::{KeyExpr, ZenohId}; use crate::time::{new_reception_timestamp, Timestamp}; use crate::Priority; +use crate::Value; #[zenoh_macros::unstable] use serde::Serialize; use std::{ convert::{TryFrom, TryInto}, fmt, }; -use zenoh_protocol::core::{CongestionControl, Encoding}; -use zenoh_protocol::network::push::ext::QoSType; +use zenoh_protocol::{core::CongestionControl, network::push::ext::QoSType}; pub type SourceSn = u64; @@ -357,10 +357,12 @@ pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; pub struct Sample { /// The key expression on which this Sample was published. pub key_expr: KeyExpr<'static>, - /// The value of this Sample. - pub value: Value, + /// The payload of this Sample. + pub payload: Payload, /// The kind of this Sample. pub kind: SampleKind, + /// The encoding of this sample + pub encoding: Encoding, /// The [`Timestamp`] of this Sample. pub timestamp: Option, /// Quality of service settings this sample was sent with. @@ -390,14 +392,15 @@ pub struct Sample { impl Sample { /// Creates a new Sample. #[inline] - pub fn new(key_expr: IntoKeyExpr, value: IntoValue) -> Self + pub fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self where IntoKeyExpr: Into>, - IntoValue: Into, + IntoPayload: Into, { Sample { key_expr: key_expr.into(), - value: value.into(), + payload: payload.into(), + encoding: Encoding::default(), kind: SampleKind::default(), timestamp: None, qos: QoS::default(), @@ -409,18 +412,19 @@ impl Sample { } /// Creates a new Sample. #[inline] - pub fn try_from( + pub fn try_from( key_expr: TryIntoKeyExpr, - value: IntoValue, + payload: IntoPayload, ) -> Result where TryIntoKeyExpr: TryInto>, >>::Error: Into, - IntoValue: Into, + IntoPayload: Into, { Ok(Sample { key_expr: key_expr.try_into().map_err(Into::into)?, - value: value.into(), + payload: payload.into(), + encoding: Encoding::default(), kind: SampleKind::default(), timestamp: None, qos: QoS::default(), @@ -433,40 +437,30 @@ impl Sample { /// Creates a new Sample with optional data info. #[inline] - pub(crate) fn with_info( - key_expr: KeyExpr<'static>, - payload: ZBuf, - data_info: Option, - ) -> Self { - let mut value: Value = payload.into(); - if let Some(data_info) = data_info { - if let Some(encoding) = &data_info.encoding { - value.encoding = encoding.clone(); + pub(crate) fn with_info(mut self, mut data_info: Option) -> Self { + if let Some(mut data_info) = data_info.take() { + self.kind = data_info.kind; + if let Some(encoding) = data_info.encoding.take() { + self.encoding = encoding; } - Sample { - key_expr, - value, - kind: data_info.kind, - timestamp: data_info.timestamp, - qos: data_info.qos, - #[cfg(feature = "unstable")] - source_info: data_info.into(), - #[cfg(feature = "unstable")] - attachment: None, - } - } else { - Sample { - key_expr, - value, - kind: SampleKind::default(), - timestamp: None, - qos: QoS::default(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, + self.qos = data_info.qos; + self.timestamp = data_info.timestamp; + #[cfg(feature = "unstable")] + { + self.source_info = SourceInfo { + source_id: data_info.source_id, + source_sn: data_info.source_sn, + }; } } + self + } + + /// Sets the encoding of this Sample. + #[inline] + pub fn with_encoding(mut self, encoding: Encoding) -> Self { + self.encoding = encoding; + self } /// Gets the timestamp of this Sample. @@ -522,34 +516,9 @@ impl Sample { } } -impl std::ops::Deref for Sample { - type Target = Value; - - fn deref(&self) -> &Self::Target { - &self.value - } -} - -impl std::ops::DerefMut for Sample { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.value - } -} - -impl std::fmt::Display for Sample { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self.kind { - SampleKind::Delete => write!(f, "{}({})", self.kind, self.key_expr), - _ => write!(f, "{}({}: {})", self.kind, self.key_expr, self.value), - } - } -} - -impl TryFrom for Sample { - type Error = Value; - - fn try_from(value: Reply) -> Result { - value.sample +impl From for Value { + fn from(sample: Sample) -> Self { + Value::new(sample.payload).with_encoding(sample.encoding) } } diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 9ab0242f16..87c416c209 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -11,10 +11,10 @@ // Contributors: // ZettaScale Zenoh Team, // - use crate::admin; use crate::config::Config; use crate::config::Notifier; +use crate::encoding::Encoding; use crate::handlers::{Callback, DefaultHandler}; use crate::info::*; use crate::key_expr::KeyExprInner; @@ -23,6 +23,7 @@ use crate::liveliness::{Liveliness, LivelinessTokenState}; use crate::net::primitives::Primitives; use crate::net::routing::dispatcher::face::Face; use crate::net::runtime::Runtime; +use crate::payload::Payload; use crate::prelude::Locality; use crate::prelude::{KeyExpr, Parameters}; use crate::publication::*; @@ -670,7 +671,7 @@ impl Session { /// # Arguments /// /// * `key_expr` - Key expression matching the resources to put - /// * `value` - The value to put + /// * `payload` - The payload to put /// /// # Examples /// ``` @@ -679,28 +680,29 @@ impl Session { /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session - /// .put("key/expression", "value") - /// .encoding(KnownEncoding::TextPlain) + /// .put("key/expression", "payload") + /// .with_encoding(Encoding::TEXT_PLAIN) /// .res() /// .await /// .unwrap(); /// # }) /// ``` #[inline] - pub fn put<'a, 'b: 'a, TryIntoKeyExpr, IntoValue>( + pub fn put<'a, 'b: 'a, TryIntoKeyExpr, IntoPayload>( &'a self, key_expr: TryIntoKeyExpr, - value: IntoValue, + payload: IntoPayload, ) -> PutBuilder<'a, 'b> where TryIntoKeyExpr: TryInto>, >>::Error: Into, - IntoValue: Into, + IntoPayload: Into, { PutBuilder { publisher: self.declare_publisher(key_expr), - value: value.into(), + payload: payload.into(), kind: SampleKind::Put, + encoding: Encoding::default(), #[cfg(feature = "unstable")] attachment: None, } @@ -732,8 +734,9 @@ impl Session { { PutBuilder { publisher: self.declare_publisher(key_expr), - value: Value::empty(), + payload: Payload::empty(), kind: SampleKind::Delete, + encoding: Encoding::default(), #[cfg(feature = "unstable")] attachment: None, } @@ -1669,7 +1672,7 @@ impl Session { let zenoh_collections::single_or_vec::IntoIter { drain, last } = callbacks.into_iter(); for (cb, key_expr) in drain { #[allow(unused_mut)] - let mut sample = Sample::with_info(key_expr, payload.clone(), info.clone()); + let mut sample = Sample::new(key_expr, payload.clone()).with_info(info.clone()); #[cfg(feature = "unstable")] { sample.attachment = attachment.clone(); @@ -1678,7 +1681,7 @@ impl Session { } if let Some((cb, key_expr)) = last { #[allow(unused_mut)] - let mut sample = Sample::with_info(key_expr, payload, info); + let mut sample = Sample::new(key_expr, payload).with_info(info); #[cfg(feature = "unstable")] { sample.attachment = attachment; @@ -1785,8 +1788,8 @@ impl Session { ); let primitives = state.primitives.as_ref().unwrap().clone(); - drop(state); + if destination != Locality::SessionLocal { #[allow(unused_mut)] let mut ext_attachment = None; @@ -1812,8 +1815,8 @@ impl Session { ext_body: value.as_ref().map(|v| query::ext::QueryBodyType { #[cfg(feature = "shared-memory")] ext_shm: None, - encoding: v.encoding.clone(), - payload: v.payload.clone(), + encoding: v.encoding.clone().into(), + payload: v.payload.clone().into(), }), ext_attachment, ext_unknown: vec![], @@ -1831,8 +1834,8 @@ impl Session { value.as_ref().map(|v| query::ext::QueryBodyType { #[cfg(feature = "shared-memory")] ext_shm: None, - encoding: v.encoding.clone(), - payload: v.payload.clone(), + encoding: v.encoding.clone().into(), + payload: v.payload.clone().into(), }), #[cfg(feature = "unstable")] attachment, @@ -1902,8 +1905,8 @@ impl Session { key_expr, parameters, value: body.map(|b| Value { - payload: b.payload, - encoding: b.encoding, + payload: b.payload.into(), + encoding: b.encoding.into(), }), qid, zid, @@ -2188,7 +2191,7 @@ impl Primitives for Session { PushBody::Put(m) => { let info = DataInfo { kind: SampleKind::Put, - encoding: Some(m.encoding), + encoding: Some(m.encoding.into()), timestamp: m.timestamp, qos: QoS::from(msg.ext_qos), source_id: m.ext_sinfo.as_ref().map(|i| i.zid), @@ -2260,12 +2263,12 @@ impl Primitives for Session { std::mem::drop(state); let value = match e.ext_body { Some(body) => Value { - payload: body.payload, - encoding: body.encoding, + payload: body.payload.into(), + encoding: body.encoding.into(), }, None => Value { - payload: ZBuf::empty(), - encoding: zenoh_protocol::core::Encoding::EMPTY, + payload: Payload::empty(), + encoding: Encoding::default(), }, }; let replier_id = match e.ext_sinfo { @@ -2360,7 +2363,7 @@ impl Primitives for Session { payload, info: DataInfo { kind: SampleKind::Put, - encoding: Some(encoding), + encoding: Some(encoding.into()), timestamp, qos: QoS::from(msg.ext_qos), source_id: ext_sinfo.as_ref().map(|i| i.zid), @@ -2391,7 +2394,7 @@ impl Primitives for Session { #[allow(unused_mut)] let mut sample = - Sample::with_info(key_expr.into_owned(), payload, Some(info)); + Sample::new(key_expr.into_owned(), payload).with_info(Some(info)); #[cfg(feature = "unstable")] { sample.attachment = attachment; diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index e0123ec6b1..c707218017 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -13,9 +13,11 @@ // //! Subscribing primitives. -use crate::handlers::{locked, Callback, DefaultHandler}; +use crate::handlers::{locked, Callback, DefaultHandler, IntoCallbackReceiverPair}; +use crate::key_expr::KeyExpr; use crate::prelude::Locality; -use crate::prelude::{Id, IntoCallbackReceiverPair, KeyExpr, Sample}; +use crate::sample::Sample; +use crate::Id; use crate::Undeclarable; use crate::{Result as ZResult, SessionRef}; use std::fmt; @@ -62,7 +64,7 @@ impl fmt::Debug for SubscriberState { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") -/// .callback(|sample| { println!("Received: {} {}", sample.key_expr, sample.value); }) +/// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload) }) /// .res() /// .await /// .unwrap(); @@ -95,7 +97,7 @@ pub(crate) struct SubscriberInner<'a> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") -/// .callback(|sample| { println!("Received: {} {}", sample.key_expr, sample.value); }) +/// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) /// .pull_mode() /// .res() /// .await @@ -118,7 +120,7 @@ impl<'a> PullSubscriberInner<'a> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") - /// .callback(|sample| { println!("Received: {} {}", sample.key_expr, sample.value); }) + /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) /// .pull_mode() /// .res() /// .await @@ -327,7 +329,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") - /// .callback(|sample| { println!("Received: {} {}", sample.key_expr, sample.value); }) + /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) /// .res() /// .await /// .unwrap(); @@ -402,7 +404,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { /// .await /// .unwrap(); /// while let Ok(sample) = subscriber.recv_async().await { - /// println!("Received: {} {}", sample.key_expr, sample.value); + /// println!("Received: {} {:?}", sample.key_expr, sample.payload); /// } /// # }) /// ``` @@ -631,7 +633,7 @@ where /// .await /// .unwrap(); /// while let Ok(sample) = subscriber.recv_async().await { -/// println!("Received: {} {}", sample.key_expr, sample.value); +/// println!("Received: {} {:?}", sample.key_expr, sample.payload); /// } /// # }) /// ``` diff --git a/zenoh/src/value.rs b/zenoh/src/value.rs index 849cfd57d5..128f0ff605 100644 --- a/zenoh/src/value.rs +++ b/zenoh/src/value.rs @@ -13,693 +13,57 @@ // //! Value primitives. +use crate::{encoding::Encoding, payload::Payload}; -use base64::{engine::general_purpose::STANDARD as b64_std_engine, Engine}; -use std::borrow::Cow; -use std::convert::TryFrom; -#[cfg(feature = "shared-memory")] -use std::sync::Arc; - -use zenoh_collections::Properties; -use zenoh_result::ZError; - -use crate::buffers::ZBuf; -use crate::prelude::{Encoding, KnownEncoding, Sample, SplitBuffer}; -#[cfg(feature = "shared-memory")] -use zenoh_shm::SharedMemoryBuf; - -/// A zenoh Value. +/// A zenoh [`Value`] contains a `payload` and an [`Encoding`] that indicates how the [`Payload`] should be interpreted. #[non_exhaustive] -#[derive(Clone)] +#[derive(Clone, Debug, PartialEq, Eq)] pub struct Value { - /// The payload of this Value. - pub payload: ZBuf, - /// An encoding description indicating how the associated payload is encoded. + /// The binary [`Payload`] of this [`Value`]. + pub payload: Payload, + /// The [`Encoding`] of this [`Value`]. pub encoding: Encoding, } impl Value { - /// Creates a new zenoh Value. - pub fn new(payload: ZBuf) -> Self { + /// Creates a new [`Value`] with default [`Encoding`]. + pub fn new(payload: T) -> Self + where + T: Into, + { Value { - payload, - encoding: KnownEncoding::AppOctetStream.into(), + payload: payload.into(), + encoding: Encoding::default(), } } - /// Creates an empty Value. - pub fn empty() -> Self { + /// Creates an empty [`Value`]. + pub const fn empty() -> Self { Value { - payload: ZBuf::empty(), - encoding: KnownEncoding::AppOctetStream.into(), + payload: Payload::empty(), + encoding: Encoding::default(), } } - /// Sets the encoding of this zenoh Value. + /// Sets the encoding of this [`Value`]`. #[inline(always)] - pub fn encoding(mut self, encoding: Encoding) -> Self { - self.encoding = encoding; + pub fn with_encoding(mut self, encoding: IntoEncoding) -> Self + where + IntoEncoding: Into, + { + self.encoding = encoding.into(); self } } -impl std::fmt::Debug for Value { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!( - f, - "Value{{ payload: {:?}, encoding: {} }}", - self.payload, self.encoding - ) - } -} - -impl std::fmt::Display for Value { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - let payload = self.payload.contiguous(); - write!( - f, - "{}", - String::from_utf8(payload.clone().into_owned()) - .unwrap_or_else(|_| b64_std_engine.encode(payload)) - ) - } -} - -impl std::error::Error for Value {} - -// Shared memory conversion -#[cfg(feature = "shared-memory")] -impl From> for Value { - fn from(smb: Arc) -> Self { - Value { - payload: smb.into(), - encoding: KnownEncoding::AppOctetStream.into(), - } - } -} - -#[cfg(feature = "shared-memory")] -impl From> for Value { - fn from(smb: Box) -> Self { - let smb: Arc = smb.into(); - Self::from(smb) - } -} - -#[cfg(feature = "shared-memory")] -impl From for Value { - fn from(smb: SharedMemoryBuf) -> Self { +impl From for Value +where + T: Into, +{ + fn from(t: T) -> Self { Value { - payload: smb.into(), - encoding: KnownEncoding::AppOctetStream.into(), + payload: t.into(), + encoding: Encoding::default(), } } } - -// Bytes conversion -impl From for Value { - fn from(buf: ZBuf) -> Self { - Value { - payload: buf, - encoding: KnownEncoding::AppOctetStream.into(), - } - } -} - -impl TryFrom<&Value> for ZBuf { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppOctetStream => Ok(v.payload.clone()), - unexpected => Err(zerror!( - "{:?} can not be converted into Cow<'a, [u8]>", - unexpected - )), - } - } -} - -impl TryFrom for ZBuf { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -impl From<&[u8]> for Value { - fn from(buf: &[u8]) -> Self { - Value::from(ZBuf::from(buf.to_vec())) - } -} - -impl<'a> TryFrom<&'a Value> for Cow<'a, [u8]> { - type Error = ZError; - - fn try_from(v: &'a Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppOctetStream => Ok(v.payload.contiguous()), - unexpected => Err(zerror!( - "{:?} can not be converted into Cow<'a, [u8]>", - unexpected - )), - } - } -} - -impl From> for Value { - fn from(buf: Vec) -> Self { - Value::from(ZBuf::from(buf)) - } -} - -impl TryFrom<&Value> for Vec { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppOctetStream => Ok(v.payload.contiguous().to_vec()), - unexpected => Err(zerror!( - "{:?} can not be converted into Vec", - unexpected - )), - } - } -} - -impl TryFrom for Vec { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// String conversion -impl From for Value { - fn from(s: String) -> Self { - Value { - payload: ZBuf::from(s.into_bytes()), - encoding: KnownEncoding::TextPlain.into(), - } - } -} - -impl From<&str> for Value { - fn from(s: &str) -> Self { - Value { - payload: ZBuf::from(Vec::::from(s)), - encoding: KnownEncoding::TextPlain.into(), - } - } -} - -impl TryFrom<&Value> for String { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::TextPlain => { - String::from_utf8(v.payload.contiguous().to_vec()).map_err(|e| zerror!("{}", e)) - } - unexpected => Err(zerror!("{:?} can not be converted into String", unexpected)), - } - } -} - -impl TryFrom for String { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// Sample conversion -impl From for Value { - fn from(s: Sample) -> Self { - s.value - } -} - -// i64 conversion -impl From for Value { - fn from(i: i64) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for i64 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into i64", unexpected)), - } - } -} - -impl TryFrom for i64 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// i32 conversion -impl From for Value { - fn from(i: i32) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for i32 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into i32", unexpected)), - } - } -} - -impl TryFrom for i32 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// i16 conversion -impl From for Value { - fn from(i: i16) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for i16 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into i16", unexpected)), - } - } -} - -impl TryFrom for i16 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// i8 conversion -impl From for Value { - fn from(i: i8) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for i8 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into i8", unexpected)), - } - } -} - -impl TryFrom for i8 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// isize conversion -impl From for Value { - fn from(i: isize) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for isize { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into isize", unexpected)), - } - } -} - -impl TryFrom for isize { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// u64 conversion -impl From for Value { - fn from(i: u64) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for u64 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into u64", unexpected)), - } - } -} - -impl TryFrom for u64 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// u32 conversion -impl From for Value { - fn from(i: u32) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for u32 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into u32", unexpected)), - } - } -} - -impl TryFrom for u32 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// u16 conversion -impl From for Value { - fn from(i: u16) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for u16 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into u16", unexpected)), - } - } -} - -impl TryFrom for u16 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// u8 conversion -impl From for Value { - fn from(i: u8) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for u8 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into u8", unexpected)), - } - } -} - -impl TryFrom for u8 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// usize conversion -impl From for Value { - fn from(i: usize) -> Self { - Value { - payload: ZBuf::from(Vec::::from(i.to_string())), - encoding: KnownEncoding::AppInteger.into(), - } - } -} - -impl TryFrom<&Value> for usize { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppInteger => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into usize", unexpected)), - } - } -} - -impl TryFrom for usize { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// f64 conversion -impl From for Value { - fn from(f: f64) -> Self { - Value { - payload: ZBuf::from(Vec::::from(f.to_string())), - encoding: KnownEncoding::AppFloat.into(), - } - } -} - -impl TryFrom<&Value> for f64 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppFloat => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into f64", unexpected)), - } - } -} - -impl TryFrom for f64 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// f32 conversion -impl From for Value { - fn from(f: f32) -> Self { - Value { - payload: ZBuf::from(Vec::::from(f.to_string())), - encoding: KnownEncoding::AppFloat.into(), - } - } -} - -impl TryFrom<&Value> for f32 { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppFloat => std::str::from_utf8(&v.payload.contiguous()) - .map_err(|e| zerror!("{}", e))? - .parse() - .map_err(|e| zerror!("{}", e)), - unexpected => Err(zerror!("{:?} can not be converted into f32", unexpected)), - } - } -} - -impl TryFrom for f32 { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// JSON conversion -impl From<&serde_json::Value> for Value { - fn from(json: &serde_json::Value) -> Self { - Value { - payload: ZBuf::from(Vec::::from(json.to_string())), - encoding: KnownEncoding::AppJson.into(), - } - } -} - -impl From for Value { - fn from(json: serde_json::Value) -> Self { - Value::from(&json) - } -} - -impl TryFrom<&Value> for serde_json::Value { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match v.encoding.prefix() { - KnownEncoding::AppJson | KnownEncoding::TextJson => { - let r = serde::Deserialize::deserialize(&mut serde_json::Deserializer::from_slice( - &v.payload.contiguous(), - )); - r.map_err(|e| zerror!("{}", e)) - } - unexpected => Err(zerror!( - "{:?} can not be converted into Properties", - unexpected - )), - } - } -} - -impl TryFrom for serde_json::Value { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} - -// Properties conversion -impl From for Value { - fn from(p: Properties) -> Self { - Value { - payload: ZBuf::from(Vec::::from(p.to_string())), - encoding: KnownEncoding::AppProperties.into(), - } - } -} - -impl TryFrom<&Value> for Properties { - type Error = ZError; - - fn try_from(v: &Value) -> Result { - match *v.encoding.prefix() { - KnownEncoding::AppProperties => Ok(Properties::from( - std::str::from_utf8(&v.payload.contiguous()).map_err(|e| zerror!("{}", e))?, - )), - unexpected => Err(zerror!( - "{:?} can not be converted into Properties", - unexpected - )), - } - } -} - -impl TryFrom for Properties { - type Error = ZError; - - fn try_from(v: Value) -> Result { - Self::try_from(&v) - } -} diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index d1fbd1086a..89dd3e231f 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -75,7 +75,7 @@ fn queries() { query .reply(Ok(Sample::new( query.key_expr().clone(), - query.value().unwrap().clone(), + query.value().unwrap().payload.clone(), ) .with_attachment(attachment))) .res() diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index 3b10f12f03..5c96f080f8 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -58,7 +58,7 @@ impl Task { let sub = ztimeout!(session.declare_subscriber(ke).res_async())?; let mut counter = 0; while let Ok(sample) = sub.recv_async().await { - let recv_size = sample.value.payload.len(); + let recv_size = sample.payload.len(); if recv_size != *expected_size { bail!("Received payload size {recv_size} mismatches the expected {expected_size}"); } @@ -75,7 +75,7 @@ impl Task { let value: Value = vec![0u8; *payload_size].into(); while remaining_checkpoints.load(Ordering::Relaxed) > 0 { ztimeout!(session - .put(ke, value.clone()) + .put(ke, value.payload.clone()) .congestion_control(CongestionControl::Block) .res_async())?; } @@ -91,7 +91,7 @@ impl Task { while let Ok(reply) = replies.recv_async().await { match reply.sample { Ok(sample) => { - let recv_size = sample.value.payload.len(); + let recv_size = sample.payload.len(); if recv_size != *expected_size { bail!("Received payload size {recv_size} mismatches the expected {expected_size}"); } @@ -99,7 +99,7 @@ impl Task { Err(err) => { log::warn!( - "Sample got from {} failed to unwrap! Error: {}.", + "Sample got from {} failed to unwrap! Error: {:?}.", ke, err ); diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index f727ad60c3..8a3f4381d2 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -95,7 +95,7 @@ async fn test_session_pubsub(peer01: &Session, peer02: &Session, reliability: Re let sub = ztimeout!(peer01 .declare_subscriber(key_expr) .callback(move |sample| { - assert_eq!(sample.value.payload.len(), size); + assert_eq!(sample.payload.len(), size); c_msgs.fetch_add(1, Ordering::Relaxed); }) .res_async()) @@ -194,7 +194,7 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re while let Ok(s) = ztimeout!(rs.recv_async()) { let s = s.sample.unwrap(); assert_eq!(s.kind, SampleKind::Put); - assert_eq!(s.value.payload.len(), size); + assert_eq!(s.payload.len(), size); cnt += 1; } } @@ -212,7 +212,7 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re while let Ok(s) = ztimeout!(rs.recv_async()) { let s = s.sample.unwrap(); assert_eq!(s.kind, SampleKind::Delete); - assert_eq!(s.value.payload.len(), 0); + assert_eq!(s.payload.len(), 0); cnt += 1; } } diff --git a/zenoh/tests/unicity.rs b/zenoh/tests/unicity.rs index b986c92e8f..76910ee5de 100644 --- a/zenoh/tests/unicity.rs +++ b/zenoh/tests/unicity.rs @@ -114,7 +114,7 @@ async fn test_unicity_pubsub(s01: &Session, s02: &Session, s03: &Session) { let sub1 = ztimeout!(s01 .declare_subscriber(key_expr) .callback(move |sample| { - assert_eq!(sample.value.payload.len(), size); + assert_eq!(sample.payload.len(), size); c_msgs1.fetch_add(1, Ordering::Relaxed); }) .res_async()) @@ -126,7 +126,7 @@ async fn test_unicity_pubsub(s01: &Session, s02: &Session, s03: &Session) { let sub2 = ztimeout!(s02 .declare_subscriber(key_expr) .callback(move |sample| { - assert_eq!(sample.value.payload.len(), size); + assert_eq!(sample.payload.len(), size); c_msgs2.fetch_add(1, Ordering::Relaxed); }) .res_async()) @@ -224,7 +224,7 @@ async fn test_unicity_qryrep(s01: &Session, s02: &Session, s03: &Session) { for _ in 0..msg_count { let rs = ztimeout!(s03.get(key_expr).res_async()).unwrap(); while let Ok(s) = ztimeout!(rs.recv_async()) { - assert_eq!(s.sample.unwrap().value.payload.len(), size); + assert_eq!(s.sample.unwrap().payload.len(), size); cnt += 1; } } From b11a20e11f3c2c7c66644417cb0ed08b852cb88a Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 12 Mar 2024 09:42:32 +0100 Subject: [PATCH 007/124] Fix protocol comment --- commons/zenoh-protocol/src/zenoh/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/commons/zenoh-protocol/src/zenoh/mod.rs b/commons/zenoh-protocol/src/zenoh/mod.rs index 4c8458885b..1284116888 100644 --- a/commons/zenoh-protocol/src/zenoh/mod.rs +++ b/commons/zenoh-protocol/src/zenoh/mod.rs @@ -214,7 +214,7 @@ pub mod ext { /// +-+-+-+-+-+-+-+-+ /// ~ encoding ~ /// +---------------+ - /// ~ pl: ~ -- Payload + /// ~ pl: [u8;z32] ~ -- Payload /// +---------------+ /// ``` #[derive(Debug, Clone, PartialEq, Eq)] From fcbceb07ae9bd4160a294ab5c982b6882eca6a7b Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 12 Mar 2024 12:43:21 +0100 Subject: [PATCH 008/124] Improve Endpoint and Locator doc --- commons/zenoh-protocol/src/core/endpoint.rs | 7 ++++++- commons/zenoh-protocol/src/core/locator.rs | 6 +++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/commons/zenoh-protocol/src/core/endpoint.rs b/commons/zenoh-protocol/src/core/endpoint.rs index 5e921345e4..a8fcb3ae98 100644 --- a/commons/zenoh-protocol/src/core/endpoint.rs +++ b/commons/zenoh-protocol/src/core/endpoint.rs @@ -497,7 +497,12 @@ impl fmt::Debug for ConfigMut<'_> { } } -/// A `String` that respects the [`EndPoint`] canon form: `#`, such that `` is a valid [`Locator`] `` is of the form `=;...;=` where keys are alphabetically sorted. +/// A string that respects the [`EndPoint`] canon form: `[#]`. +/// +/// `` is a valid [`Locator`] and `` is of the form `=;...;=` where keys are alphabetically sorted. +/// `` is optional and can be provided to configure some aspectes for an [`EndPoint`], e.g. the interface to listen on or connect to. +/// +/// A full [`EndPoint`] string is hence in the form of `/
[?][#config]`. #[derive(Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] #[serde(into = "String")] #[serde(try_from = "String")] diff --git a/commons/zenoh-protocol/src/core/locator.rs b/commons/zenoh-protocol/src/core/locator.rs index 42379f2b65..50b909b12f 100644 --- a/commons/zenoh-protocol/src/core/locator.rs +++ b/commons/zenoh-protocol/src/core/locator.rs @@ -16,9 +16,9 @@ use alloc::{borrow::ToOwned, string::String}; use core::{convert::TryFrom, fmt, hash::Hash, str::FromStr}; use zenoh_result::{Error as ZError, ZResult}; -// Locator -/// A `String` that respects the [`Locator`] canon form: `/
[?]`, -/// such that `` is of the form `=;...;=` where keys are alphabetically sorted. +/// A string that respects the [`Locator`] canon form: `/
[?]`. +/// +/// `` is of the form `=;...;=` where keys are alphabetically sorted. #[derive(Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] #[serde(into = "String")] #[serde(try_from = "String")] From 41e25579f9f3c851f44c992946fd1d0c61fccfa9 Mon Sep 17 00:00:00 2001 From: OlivierHecart Date: Tue, 12 Mar 2024 13:42:18 +0100 Subject: [PATCH 009/124] Protocol changes: EntityId (into protocol_changes) (#774) * New Subscribers EntityId behavior for clients and peers * Improve routing logging * New Queryables EntityId behavior for clients and peers * Improve routing logging * Use proper QueryableId in Session and AdminSpace * Sessions use runtime Id generator to avoid collisions * AdminSpace use runtime Id generator to avoid collisions * Use proper ResponderId * Define EntityId type * Add source_eid to SourceInfo * Update source_info_stack_size test * Update source_info_stack_size test * Introduce EntityGlobalId type * Add id() function to Subscriber, Queryable and Publisher * Add Publication::with_source_info() function * Code format * Remove ref to PR #703 * Fix doctests * Add comments * Remove comments --- commons/zenoh-codec/src/network/declare.rs | 13 +- commons/zenoh-codec/src/network/mod.rs | 22 +- commons/zenoh-codec/src/zenoh/mod.rs | 26 +- commons/zenoh-protocol/src/core/mod.rs | 21 + commons/zenoh-protocol/src/core/wire_expr.rs | 4 + commons/zenoh-protocol/src/network/declare.rs | 17 +- commons/zenoh-protocol/src/network/mod.rs | 10 +- .../zenoh-protocol/src/network/response.rs | 2 +- commons/zenoh-protocol/src/zenoh/mod.rs | 10 +- zenoh/src/lib.rs | 2 +- zenoh/src/net/routing/dispatcher/face.rs | 6 +- zenoh/src/net/routing/dispatcher/pubsub.rs | 115 +++-- zenoh/src/net/routing/dispatcher/queries.rs | 103 ++-- zenoh/src/net/routing/dispatcher/resource.rs | 14 +- zenoh/src/net/routing/hat/client/mod.rs | 26 +- zenoh/src/net/routing/hat/client/pubsub.rs | 107 ++-- zenoh/src/net/routing/hat/client/queries.rs | 113 +++-- .../src/net/routing/hat/linkstate_peer/mod.rs | 30 +- .../net/routing/hat/linkstate_peer/pubsub.rs | 136 ++--- .../net/routing/hat/linkstate_peer/queries.rs | 142 +++--- zenoh/src/net/routing/hat/mod.rs | 17 +- zenoh/src/net/routing/hat/p2p_peer/mod.rs | 29 +- zenoh/src/net/routing/hat/p2p_peer/pubsub.rs | 106 ++-- zenoh/src/net/routing/hat/p2p_peer/queries.rs | 113 +++-- zenoh/src/net/routing/hat/router/mod.rs | 31 +- zenoh/src/net/routing/hat/router/pubsub.rs | 213 ++++---- zenoh/src/net/routing/hat/router/queries.rs | 220 +++++---- zenoh/src/net/runtime/adminspace.rs | 8 +- zenoh/src/net/runtime/mod.rs | 8 + zenoh/src/net/tests/tables.rs | 88 +++- zenoh/src/prelude.rs | 5 + zenoh/src/publication.rs | 83 +++- zenoh/src/queryable.rs | 73 +-- zenoh/src/sample.rs | 20 +- zenoh/src/session.rs | 467 +++++++----------- zenoh/src/subscriber.rs | 26 + 36 files changed, 1349 insertions(+), 1077 deletions(-) diff --git a/commons/zenoh-codec/src/network/declare.rs b/commons/zenoh-codec/src/network/declare.rs index 6df25a8d2a..bcc55ed62b 100644 --- a/commons/zenoh-codec/src/network/declare.rs +++ b/commons/zenoh-codec/src/network/declare.rs @@ -441,14 +441,19 @@ where let subscriber::UndeclareSubscriber { id, ext_wire_expr } = x; // Header - let header = declare::id::U_SUBSCRIBER | subscriber::flag::Z; + let mut header = declare::id::U_SUBSCRIBER; + if !ext_wire_expr.is_null() { + header |= subscriber::flag::Z; + } self.write(&mut *writer, header)?; // Body self.write(&mut *writer, id)?; // Extension - self.write(&mut *writer, (ext_wire_expr, false))?; + if !ext_wire_expr.is_null() { + self.write(&mut *writer, (ext_wire_expr, false))?; + } Ok(()) } @@ -483,7 +488,6 @@ where let id: subscriber::SubscriberId = self.codec.read(&mut *reader)?; // Extensions - // WARNING: this is a temporary and mandatory extension used for undeclarations let mut ext_wire_expr = common::ext::WireExprType::null(); let mut has_ext = imsg::has_flag(self.header, subscriber::flag::Z); @@ -665,7 +669,6 @@ where let id: queryable::QueryableId = self.codec.read(&mut *reader)?; // Extensions - // WARNING: this is a temporary and mandatory extension used for undeclarations let mut ext_wire_expr = common::ext::WireExprType::null(); let mut has_ext = imsg::has_flag(self.header, queryable::flag::Z); @@ -813,7 +816,6 @@ where let id: token::TokenId = self.codec.read(&mut *reader)?; // Extensions - // WARNING: this is a temporary and mandatory extension used for undeclarations let mut ext_wire_expr = common::ext::WireExprType::null(); let mut has_ext = imsg::has_flag(self.header, interest::flag::Z); @@ -1032,7 +1034,6 @@ where let id: interest::InterestId = self.codec.read(&mut *reader)?; // Extensions - // WARNING: this is a temporary and mandatory extension used for undeclarations let mut ext_wire_expr = common::ext::WireExprType::null(); let mut has_ext = imsg::has_flag(self.header, interest::flag::Z); diff --git a/commons/zenoh-codec/src/network/mod.rs b/commons/zenoh-codec/src/network/mod.rs index dade13d362..3a227cd42a 100644 --- a/commons/zenoh-codec/src/network/mod.rs +++ b/commons/zenoh-codec/src/network/mod.rs @@ -26,8 +26,8 @@ use zenoh_buffers::{ }; use zenoh_protocol::{ common::{imsg, ZExtZ64, ZExtZBufHeader}, - core::{Reliability, ZenohId}, - network::{ext::EntityIdType, *}, + core::{EntityId, Reliability, ZenohId}, + network::{ext::EntityGlobalIdType, *}, }; // NetworkMessage @@ -218,21 +218,21 @@ where } // Extension: EntityId -impl LCodec<&ext::EntityIdType<{ ID }>> for Zenoh080 { - fn w_len(self, x: &ext::EntityIdType<{ ID }>) -> usize { - let EntityIdType { zid, eid } = x; +impl LCodec<&ext::EntityGlobalIdType<{ ID }>> for Zenoh080 { + fn w_len(self, x: &ext::EntityGlobalIdType<{ ID }>) -> usize { + let EntityGlobalIdType { zid, eid } = x; 1 + self.w_len(zid) + self.w_len(*eid) } } -impl WCodec<(&ext::EntityIdType<{ ID }>, bool), &mut W> for Zenoh080 +impl WCodec<(&ext::EntityGlobalIdType<{ ID }>, bool), &mut W> for Zenoh080 where W: Writer, { type Output = Result<(), DidntWrite>; - fn write(self, writer: &mut W, x: (&ext::EntityIdType<{ ID }>, bool)) -> Self::Output { + fn write(self, writer: &mut W, x: (&ext::EntityGlobalIdType<{ ID }>, bool)) -> Self::Output { let (x, more) = x; let header: ZExtZBufHeader<{ ID }> = ZExtZBufHeader::new(self.w_len(x)); self.write(&mut *writer, (&header, more))?; @@ -248,13 +248,13 @@ where } } -impl RCodec<(ext::EntityIdType<{ ID }>, bool), &mut R> for Zenoh080Header +impl RCodec<(ext::EntityGlobalIdType<{ ID }>, bool), &mut R> for Zenoh080Header where R: Reader, { type Error = DidntRead; - fn read(self, reader: &mut R) -> Result<(ext::EntityIdType<{ ID }>, bool), Self::Error> { + fn read(self, reader: &mut R) -> Result<(ext::EntityGlobalIdType<{ ID }>, bool), Self::Error> { let (_, more): (ZExtZBufHeader<{ ID }>, bool) = self.read(&mut *reader)?; let flags: u8 = self.codec.read(&mut *reader)?; @@ -263,8 +263,8 @@ where let lodec = Zenoh080Length::new(length); let zid: ZenohId = lodec.read(&mut *reader)?; - let eid: u32 = self.codec.read(&mut *reader)?; + let eid: EntityId = self.codec.read(&mut *reader)?; - Ok((ext::EntityIdType { zid, eid }, more)) + Ok((ext::EntityGlobalIdType { zid, eid }, more)) } } diff --git a/commons/zenoh-codec/src/zenoh/mod.rs b/commons/zenoh-codec/src/zenoh/mod.rs index fdff09be94..0d7146dc90 100644 --- a/commons/zenoh-codec/src/zenoh/mod.rs +++ b/commons/zenoh-codec/src/zenoh/mod.rs @@ -32,7 +32,7 @@ use zenoh_buffers::{ use zenoh_protocol::common::{iext, ZExtUnit}; use zenoh_protocol::{ common::{imsg, ZExtZBufHeader}, - core::{Encoding, ZenohId}, + core::{Encoding, EntityGlobalId, EntityId, ZenohId}, zenoh::{ext, id, PushBody, RequestBody, ResponseBody}, }; @@ -150,9 +150,9 @@ where // Extension: SourceInfo impl LCodec<&ext::SourceInfoType<{ ID }>> for Zenoh080 { fn w_len(self, x: &ext::SourceInfoType<{ ID }>) -> usize { - let ext::SourceInfoType { zid, eid, sn } = x; + let ext::SourceInfoType { id, sn } = x; - 1 + self.w_len(zid) + self.w_len(*eid) + self.w_len(*sn) + 1 + self.w_len(&id.zid) + self.w_len(id.eid) + self.w_len(*sn) } } @@ -164,18 +164,18 @@ where fn write(self, writer: &mut W, x: (&ext::SourceInfoType<{ ID }>, bool)) -> Self::Output { let (x, more) = x; - let ext::SourceInfoType { zid, eid, sn } = x; + let ext::SourceInfoType { id, sn } = x; let header: ZExtZBufHeader<{ ID }> = ZExtZBufHeader::new(self.w_len(x)); self.write(&mut *writer, (&header, more))?; - let flags: u8 = (zid.size() as u8 - 1) << 4; + let flags: u8 = (id.zid.size() as u8 - 1) << 4; self.write(&mut *writer, flags)?; - let lodec = Zenoh080Length::new(zid.size()); - lodec.write(&mut *writer, zid)?; + let lodec = Zenoh080Length::new(id.zid.size()); + lodec.write(&mut *writer, &id.zid)?; - self.write(&mut *writer, eid)?; + self.write(&mut *writer, id.eid)?; self.write(&mut *writer, sn)?; Ok(()) } @@ -196,10 +196,16 @@ where let lodec = Zenoh080Length::new(length); let zid: ZenohId = lodec.read(&mut *reader)?; - let eid: u32 = self.codec.read(&mut *reader)?; + let eid: EntityId = self.codec.read(&mut *reader)?; let sn: u32 = self.codec.read(&mut *reader)?; - Ok((ext::SourceInfoType { zid, eid, sn }, more)) + Ok(( + ext::SourceInfoType { + id: EntityGlobalId { zid, eid }, + sn, + }, + more, + )) } } diff --git a/commons/zenoh-protocol/src/core/mod.rs b/commons/zenoh-protocol/src/core/mod.rs index 82658db2fd..20fcf85dd9 100644 --- a/commons/zenoh-protocol/src/core/mod.rs +++ b/commons/zenoh-protocol/src/core/mod.rs @@ -261,6 +261,27 @@ impl<'de> serde::Deserialize<'de> for ZenohId { } } +/// The unique id of a zenoh entity inside it's parent [`Session`]. +pub type EntityId = u32; + +/// The global unique id of a zenoh entity. +#[derive(Debug, Default, Clone, Eq, Hash, PartialEq)] +pub struct EntityGlobalId { + pub zid: ZenohId, + pub eid: EntityId, +} + +impl EntityGlobalId { + #[cfg(feature = "test")] + pub fn rand() -> Self { + use rand::Rng; + Self { + zid: ZenohId::rand(), + eid: rand::thread_rng().gen(), + } + } +} + #[repr(u8)] #[derive(Debug, Default, Copy, Clone, Eq, Hash, PartialEq)] pub enum Priority { diff --git a/commons/zenoh-protocol/src/core/wire_expr.rs b/commons/zenoh-protocol/src/core/wire_expr.rs index 6d9623d6ca..a66b1aa212 100644 --- a/commons/zenoh-protocol/src/core/wire_expr.rs +++ b/commons/zenoh-protocol/src/core/wire_expr.rs @@ -71,6 +71,10 @@ impl<'a> WireExpr<'a> { } } + pub fn is_empty(&self) -> bool { + self.scope == 0 && self.suffix.as_ref().is_empty() + } + pub fn as_str(&'a self) -> &'a str { if self.scope == 0 { self.suffix.as_ref() diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 8164d9440d..2dd8de4ef8 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -177,7 +177,6 @@ pub mod common { pub mod ext { use super::*; - // WARNING: this is a temporary and mandatory extension used for undeclarations pub type WireExprExt = zextzbuf!(0x0f, true); #[derive(Debug, Clone, PartialEq, Eq)] pub struct WireExprType { @@ -195,6 +194,10 @@ pub mod common { } } + pub fn is_null(&self) -> bool { + self.wire_expr.is_empty() + } + #[cfg(feature = "test")] pub fn rand() -> Self { Self { @@ -286,9 +289,11 @@ pub mod keyexpr { } pub mod subscriber { + use crate::core::EntityId; + use super::*; - pub type SubscriberId = u32; + pub type SubscriberId = EntityId; pub mod flag { pub const N: u8 = 1 << 5; // 0x20 Named if N==1 then the key expr has name/suffix @@ -441,7 +446,6 @@ pub mod subscriber { #[derive(Debug, Clone, PartialEq, Eq)] pub struct UndeclareSubscriber { pub id: SubscriberId, - // WARNING: this is a temporary and mandatory extension used for undeclarations pub ext_wire_expr: common::ext::WireExprType, } @@ -460,9 +464,11 @@ pub mod subscriber { } pub mod queryable { + use crate::core::EntityId; + use super::*; - pub type QueryableId = u32; + pub type QueryableId = EntityId; pub mod flag { pub const N: u8 = 1 << 5; // 0x20 Named if N==1 then the key expr has name/suffix @@ -597,7 +603,6 @@ pub mod queryable { #[derive(Debug, Clone, PartialEq, Eq)] pub struct UndeclareQueryable { pub id: QueryableId, - // WARNING: this is a temporary and mandatory extension used for undeclarations pub ext_wire_expr: common::ext::WireExprType, } @@ -683,7 +688,6 @@ pub mod token { #[derive(Debug, Clone, PartialEq, Eq)] pub struct UndeclareToken { pub id: TokenId, - // WARNING: this is a temporary and mandatory extension used for undeclarations pub ext_wire_expr: common::ext::WireExprType, } @@ -1097,7 +1101,6 @@ pub mod interest { #[derive(Debug, Clone, PartialEq, Eq)] pub struct UndeclareInterest { pub id: InterestId, - // WARNING: this is a temporary and mandatory extension used for undeclarations pub ext_wire_expr: common::ext::WireExprType, } diff --git a/commons/zenoh-protocol/src/network/mod.rs b/commons/zenoh-protocol/src/network/mod.rs index bb76cb8946..6af7fef243 100644 --- a/commons/zenoh-protocol/src/network/mod.rs +++ b/commons/zenoh-protocol/src/network/mod.rs @@ -200,7 +200,7 @@ impl From for NetworkMessage { pub mod ext { use crate::{ common::{imsg, ZExtZ64}, - core::{CongestionControl, Priority, ZenohId}, + core::{CongestionControl, EntityId, Priority, ZenohId}, }; use core::fmt; @@ -407,19 +407,19 @@ pub mod ext { /// % eid % /// +---------------+ #[derive(Debug, Clone, PartialEq, Eq)] - pub struct EntityIdType { + pub struct EntityGlobalIdType { pub zid: ZenohId, - pub eid: u32, + pub eid: EntityId, } - impl EntityIdType<{ ID }> { + impl EntityGlobalIdType<{ ID }> { #[cfg(feature = "test")] pub fn rand() -> Self { use rand::Rng; let mut rng = rand::thread_rng(); let zid = ZenohId::rand(); - let eid: u32 = rng.gen(); + let eid: EntityId = rng.gen(); Self { zid, eid } } } diff --git a/commons/zenoh-protocol/src/network/response.rs b/commons/zenoh-protocol/src/network/response.rs index 9ef2c26a10..6f0925429b 100644 --- a/commons/zenoh-protocol/src/network/response.rs +++ b/commons/zenoh-protocol/src/network/response.rs @@ -67,7 +67,7 @@ pub mod ext { pub type TimestampType = crate::network::ext::TimestampType<{ Timestamp::ID }>; pub type ResponderId = zextzbuf!(0x3, false); - pub type ResponderIdType = crate::network::ext::EntityIdType<{ ResponderId::ID }>; + pub type ResponderIdType = crate::network::ext::EntityGlobalIdType<{ ResponderId::ID }>; } impl Response { diff --git a/commons/zenoh-protocol/src/zenoh/mod.rs b/commons/zenoh-protocol/src/zenoh/mod.rs index 1284116888..3e5d573c43 100644 --- a/commons/zenoh-protocol/src/zenoh/mod.rs +++ b/commons/zenoh-protocol/src/zenoh/mod.rs @@ -158,7 +158,7 @@ impl From for ResponseBody { pub mod ext { use zenoh_buffers::ZBuf; - use crate::core::{Encoding, ZenohId}; + use crate::core::{Encoding, EntityGlobalId}; /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ @@ -172,8 +172,7 @@ pub mod ext { /// +---------------+ #[derive(Debug, Clone, PartialEq, Eq)] pub struct SourceInfoType { - pub zid: ZenohId, - pub eid: u32, + pub id: EntityGlobalId, pub sn: u32, } @@ -183,10 +182,9 @@ pub mod ext { use rand::Rng; let mut rng = rand::thread_rng(); - let zid = ZenohId::rand(); - let eid: u32 = rng.gen(); + let id = EntityGlobalId::rand(); let sn: u32 = rng.gen(); - Self { zid, eid, sn } + Self { id, sn } } } diff --git a/zenoh/src/lib.rs b/zenoh/src/lib.rs index bae81d3a54..eb1ba1bcd1 100644 --- a/zenoh/src/lib.rs +++ b/zenoh/src/lib.rs @@ -79,7 +79,7 @@ extern crate zenoh_core; #[macro_use] extern crate zenoh_result; -pub(crate) type Id = usize; +pub(crate) type Id = u32; use git_version::git_version; use handlers::DefaultHandler; diff --git a/zenoh/src/net/routing/dispatcher/face.rs b/zenoh/src/net/routing/dispatcher/face.rs index 6ef5c063d0..79c9da9127 100644 --- a/zenoh/src/net/routing/dispatcher/face.rs +++ b/zenoh/src/net/routing/dispatcher/face.rs @@ -171,6 +171,7 @@ impl Primitives for Face { ctrl_lock.as_ref(), &self.tables, &mut self.state.clone(), + m.id, &m.wire_expr, &m.ext_info, msg.ext_nodeid.node_id, @@ -181,6 +182,7 @@ impl Primitives for Face { ctrl_lock.as_ref(), &self.tables, &mut self.state.clone(), + m.id, &m.ext_wire_expr.wire_expr, msg.ext_nodeid.node_id, ); @@ -190,6 +192,7 @@ impl Primitives for Face { ctrl_lock.as_ref(), &self.tables, &mut self.state.clone(), + m.id, &m.wire_expr, &m.ext_info, msg.ext_nodeid.node_id, @@ -200,6 +203,7 @@ impl Primitives for Face { ctrl_lock.as_ref(), &self.tables, &mut self.state.clone(), + m.id, &m.ext_wire_expr.wire_expr, msg.ext_nodeid.node_id, ); @@ -244,7 +248,7 @@ impl Primitives for Face { pull_data(&self.tables.tables, &self.state.clone(), msg.wire_expr); } _ => { - log::error!("Unsupported request"); + log::error!("{} Unsupported request!", self); } } } diff --git a/zenoh/src/net/routing/dispatcher/pubsub.rs b/zenoh/src/net/routing/dispatcher/pubsub.rs index d6497a80b3..c0d1bb4a34 100644 --- a/zenoh/src/net/routing/dispatcher/pubsub.rs +++ b/zenoh/src/net/routing/dispatcher/pubsub.rs @@ -22,7 +22,7 @@ use std::sync::RwLock; use zenoh_core::zread; use zenoh_protocol::core::key_expr::{keyexpr, OwnedKeyExpr}; use zenoh_protocol::network::declare::subscriber::ext::SubscriberInfo; -use zenoh_protocol::network::declare::Mode; +use zenoh_protocol::network::declare::{Mode, SubscriberId}; use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::{declare::ext, Push}, @@ -34,17 +34,24 @@ pub(crate) fn declare_subscription( hat_code: &(dyn HatTrait + Send + Sync), tables: &TablesLock, face: &mut Arc, + id: SubscriberId, expr: &WireExpr, sub_info: &SubscriberInfo, node_id: NodeId, ) { - log::debug!("Declare subscription {}", face); let rtables = zread!(tables.tables); match rtables .get_mapping(face, &expr.scope, expr.mapping) .cloned() { Some(mut prefix) => { + log::debug!( + "{} Declare subscriber {} ({}{})", + face, + id, + prefix.expr(), + expr.suffix + ); let res = Resource::get_resource(&prefix, &expr.suffix); let (mut res, mut wtables) = if res.as_ref().map(|r| r.context.is_some()).unwrap_or(false) { @@ -66,7 +73,7 @@ pub(crate) fn declare_subscription( (res, wtables) }; - hat_code.declare_subscription(&mut wtables, face, &mut res, sub_info, node_id); + hat_code.declare_subscription(&mut wtables, face, id, &mut res, sub_info, node_id); disable_matches_data_routes(&mut wtables, &mut res); drop(wtables); @@ -86,7 +93,12 @@ pub(crate) fn declare_subscription( } drop(wtables); } - None => log::error!("Declare subscription for unknown scope {}!", expr.scope), + None => log::error!( + "{} Declare subscriber {} for unknown scope {}!", + face, + id, + expr.scope + ), } } @@ -94,41 +106,60 @@ pub(crate) fn undeclare_subscription( hat_code: &(dyn HatTrait + Send + Sync), tables: &TablesLock, face: &mut Arc, + id: SubscriberId, expr: &WireExpr, node_id: NodeId, ) { - log::debug!("Undeclare subscription {}", face); - let rtables = zread!(tables.tables); - match rtables.get_mapping(face, &expr.scope, expr.mapping) { - Some(prefix) => match Resource::get_resource(prefix, expr.suffix.as_ref()) { - Some(mut res) => { - drop(rtables); - let mut wtables = zwrite!(tables.tables); - - hat_code.undeclare_subscription(&mut wtables, face, &mut res, node_id); - - disable_matches_data_routes(&mut wtables, &mut res); - drop(wtables); - - let rtables = zread!(tables.tables); - let matches_data_routes = compute_matches_data_routes(&rtables, &res); - drop(rtables); - - let wtables = zwrite!(tables.tables); - for (mut res, data_routes, matching_pulls) in matches_data_routes { - get_mut_unchecked(&mut res) - .context_mut() - .update_data_routes(data_routes); - get_mut_unchecked(&mut res) - .context_mut() - .update_matching_pulls(matching_pulls); + let res = if expr.is_empty() { + None + } else { + let rtables = zread!(tables.tables); + match rtables.get_mapping(face, &expr.scope, expr.mapping) { + Some(prefix) => match Resource::get_resource(prefix, expr.suffix.as_ref()) { + Some(res) => Some(res), + None => { + log::error!( + "{} Undeclare unknown subscriber {}{}!", + face, + prefix.expr(), + expr.suffix + ); + return; } - Resource::clean(&mut res); - drop(wtables); + }, + None => { + log::error!( + "{} Undeclare subscriber with unknown scope {}", + face, + expr.scope + ); + return; } - None => log::error!("Undeclare unknown subscription!"), - }, - None => log::error!("Undeclare subscription with unknown scope!"), + } + }; + let mut wtables = zwrite!(tables.tables); + if let Some(mut res) = hat_code.undeclare_subscription(&mut wtables, face, id, res, node_id) { + log::debug!("{} Undeclare subscriber {} ({})", face, id, res.expr()); + disable_matches_data_routes(&mut wtables, &mut res); + drop(wtables); + + let rtables = zread!(tables.tables); + let matches_data_routes = compute_matches_data_routes(&rtables, &res); + drop(rtables); + + let wtables = zwrite!(tables.tables); + for (mut res, data_routes, matching_pulls) in matches_data_routes { + get_mut_unchecked(&mut res) + .context_mut() + .update_data_routes(data_routes); + get_mut_unchecked(&mut res) + .context_mut() + .update_matching_pulls(matching_pulls); + } + Resource::clean(&mut res); + drop(wtables); + } else { + log::error!("{} Undeclare unknown subscriber {}", face, id); } } @@ -445,7 +476,8 @@ pub fn full_reentrant_route_data( match tables.get_mapping(face, &expr.scope, expr.mapping).cloned() { Some(prefix) => { log::trace!( - "Route data for res {}{}", + "{} Route data for res {}{}", + face, prefix.expr(), expr.suffix.as_ref() ); @@ -561,7 +593,7 @@ pub fn full_reentrant_route_data( } } None => { - log::error!("Route data with unknown scope {}!", expr.scope); + log::error!("{} Route data with unknown scope {}!", face, expr.scope); } } } @@ -602,14 +634,16 @@ pub fn pull_data(tables_ref: &RwLock, face: &Arc, expr: WireE } None => { log::error!( - "Pull data for unknown subscription {} (no info)!", + "{} Pull data for unknown subscriber {} (no info)!", + face, prefix.expr() + expr.suffix.as_ref() ); } }, None => { log::error!( - "Pull data for unknown subscription {} (no context)!", + "{} Pull data for unknown subscriber {} (no context)!", + face, prefix.expr() + expr.suffix.as_ref() ); } @@ -617,13 +651,14 @@ pub fn pull_data(tables_ref: &RwLock, face: &Arc, expr: WireE } None => { log::error!( - "Pull data for unknown subscription {} (no resource)!", + "{} Pull data for unknown subscriber {} (no resource)!", + face, prefix.expr() + expr.suffix.as_ref() ); } }, None => { - log::error!("Pull data with unknown scope {}!", expr.scope); + log::error!("{} Pull data with unknown scope {}!", face, expr.scope); } }; } diff --git a/zenoh/src/net/routing/dispatcher/queries.rs b/zenoh/src/net/routing/dispatcher/queries.rs index b0f7f7f7ef..287621151a 100644 --- a/zenoh/src/net/routing/dispatcher/queries.rs +++ b/zenoh/src/net/routing/dispatcher/queries.rs @@ -21,16 +21,14 @@ use async_trait::async_trait; use std::collections::HashMap; use std::sync::{Arc, Weak}; use zenoh_config::WhatAmI; -use zenoh_protocol::zenoh::reply::ReplyBody; -use zenoh_protocol::zenoh::Put; use zenoh_protocol::{ core::{key_expr::keyexpr, Encoding, WireExpr}, network::{ - declare::{ext, queryable::ext::QueryableInfo}, + declare::{ext, queryable::ext::QueryableInfo, QueryableId}, request::{ext::TargetType, Request, RequestId}, response::{self, ext::ResponderIdType, Response, ResponseFinal}, }, - zenoh::{query::Consolidation, Reply, RequestBody, ResponseBody}, + zenoh::{query::Consolidation, reply::ReplyBody, Put, Reply, RequestBody, ResponseBody}, }; use zenoh_sync::get_mut_unchecked; use zenoh_util::Timed; @@ -44,17 +42,24 @@ pub(crate) fn declare_queryable( hat_code: &(dyn HatTrait + Send + Sync), tables: &TablesLock, face: &mut Arc, + id: QueryableId, expr: &WireExpr, qabl_info: &QueryableInfo, node_id: NodeId, ) { - log::debug!("Register queryable {}", face); let rtables = zread!(tables.tables); match rtables .get_mapping(face, &expr.scope, expr.mapping) .cloned() { Some(mut prefix) => { + log::debug!( + "{} Declare queryable {} ({}{})", + face, + id, + prefix.expr(), + expr.suffix + ); let res = Resource::get_resource(&prefix, &expr.suffix); let (mut res, mut wtables) = if res.as_ref().map(|r| r.context.is_some()).unwrap_or(false) { @@ -76,7 +81,7 @@ pub(crate) fn declare_queryable( (res, wtables) }; - hat_code.declare_queryable(&mut wtables, face, &mut res, qabl_info, node_id); + hat_code.declare_queryable(&mut wtables, face, id, &mut res, qabl_info, node_id); disable_matches_query_routes(&mut wtables, &mut res); drop(wtables); @@ -93,7 +98,12 @@ pub(crate) fn declare_queryable( } drop(wtables); } - None => log::error!("Declare queryable for unknown scope {}!", expr.scope), + None => log::error!( + "{} Declare queryable {} for unknown scope {}!", + face, + id, + expr.scope + ), } } @@ -101,37 +111,57 @@ pub(crate) fn undeclare_queryable( hat_code: &(dyn HatTrait + Send + Sync), tables: &TablesLock, face: &mut Arc, + id: QueryableId, expr: &WireExpr, node_id: NodeId, ) { - let rtables = zread!(tables.tables); - match rtables.get_mapping(face, &expr.scope, expr.mapping) { - Some(prefix) => match Resource::get_resource(prefix, expr.suffix.as_ref()) { - Some(mut res) => { - drop(rtables); - let mut wtables = zwrite!(tables.tables); - - hat_code.undeclare_queryable(&mut wtables, face, &mut res, node_id); - - disable_matches_query_routes(&mut wtables, &mut res); - drop(wtables); - - let rtables = zread!(tables.tables); - let matches_query_routes = compute_matches_query_routes(&rtables, &res); - drop(rtables); - - let wtables = zwrite!(tables.tables); - for (mut res, query_routes) in matches_query_routes { - get_mut_unchecked(&mut res) - .context_mut() - .update_query_routes(query_routes); + let res = if expr.is_empty() { + None + } else { + let rtables = zread!(tables.tables); + match rtables.get_mapping(face, &expr.scope, expr.mapping) { + Some(prefix) => match Resource::get_resource(prefix, expr.suffix.as_ref()) { + Some(res) => Some(res), + None => { + log::error!( + "{} Undeclare unknown queryable {}{}!", + face, + prefix.expr(), + expr.suffix + ); + return; } - Resource::clean(&mut res); - drop(wtables); + }, + None => { + log::error!( + "{} Undeclare queryable with unknown scope {}", + face, + expr.scope + ); + return; } - None => log::error!("Undeclare unknown queryable!"), - }, - None => log::error!("Undeclare queryable with unknown scope!"), + } + }; + let mut wtables = zwrite!(tables.tables); + if let Some(mut res) = hat_code.undeclare_queryable(&mut wtables, face, id, res, node_id) { + log::debug!("{} Undeclare queryable {} ({})", face, id, res.expr()); + disable_matches_query_routes(&mut wtables, &mut res); + drop(wtables); + + let rtables = zread!(tables.tables); + let matches_query_routes = compute_matches_query_routes(&rtables, &res); + drop(rtables); + + let wtables = zwrite!(tables.tables); + for (mut res, query_routes) in matches_query_routes { + get_mut_unchecked(&mut res) + .context_mut() + .update_query_routes(query_routes); + } + Resource::clean(&mut res); + drop(wtables); + } else { + log::error!("{} Undeclare unknown queryable {}", face, id); } } @@ -586,7 +616,7 @@ pub fn route_query( ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { zid, - eid: 0, // @TODO use proper ResponderId (#703) + eid: 0, // 0 is reserved for routing core }), }, expr.full_expr().to_string(), @@ -701,8 +731,9 @@ pub fn route_query( } None => { log::error!( - "Route query with unknown scope {}! Send final reply.", - expr.scope + "{} Route query with unknown scope {}! Send final reply.", + face, + expr.scope, ); drop(rtables); face.primitives diff --git a/zenoh/src/net/routing/dispatcher/resource.rs b/zenoh/src/net/routing/dispatcher/resource.rs index 813d72a661..9f43841025 100644 --- a/zenoh/src/net/routing/dispatcher/resource.rs +++ b/zenoh/src/net/routing/dispatcher/resource.rs @@ -667,7 +667,11 @@ pub fn register_expr( let mut fullexpr = prefix.expr(); fullexpr.push_str(expr.suffix.as_ref()); if res.expr() != fullexpr { - log::error!("Resource {} remapped. Remapping unsupported!", expr_id); + log::error!( + "{} Resource {} remapped. Remapping unsupported!", + face, + expr_id + ); } } None => { @@ -718,7 +722,11 @@ pub fn register_expr( drop(wtables); } }, - None => log::error!("Declare resource with unknown scope {}!", expr.scope), + None => log::error!( + "{} Declare resource with unknown scope {}!", + face, + expr.scope + ), } } @@ -726,7 +734,7 @@ pub fn unregister_expr(tables: &TablesLock, face: &mut Arc, expr_id: let wtables = zwrite!(tables.tables); match get_mut_unchecked(face).remote_mappings.remove(&expr_id) { Some(mut res) => Resource::clean(&mut res), - None => log::error!("Undeclare unknown resource!"), + None => log::error!("{} Undeclare unknown resource!", face), } drop(wtables); } diff --git a/zenoh/src/net/routing/hat/client/mod.rs b/zenoh/src/net/routing/hat/client/mod.rs index aa83c34f5d..05210bcaee 100644 --- a/zenoh/src/net/routing/hat/client/mod.rs +++ b/zenoh/src/net/routing/hat/client/mod.rs @@ -40,11 +40,11 @@ use super::{ }; use std::{ any::Any, - collections::{HashMap, HashSet}, - sync::Arc, + collections::HashMap, + sync::{atomic::AtomicU32, Arc}, }; use zenoh_config::WhatAmI; -use zenoh_protocol::network::declare::queryable::ext::QueryableInfo; +use zenoh_protocol::network::declare::{queryable::ext::QueryableInfo, QueryableId, SubscriberId}; use zenoh_protocol::network::Oam; use zenoh_result::ZResult; use zenoh_sync::get_mut_unchecked; @@ -131,7 +131,7 @@ impl HatBaseTrait for HatCode { face.local_mappings.clear(); let mut subs_matches = vec![]; - for mut res in face + for (_id, mut res) in face .hat .downcast_mut::() .unwrap() @@ -159,7 +159,7 @@ impl HatBaseTrait for HatCode { } let mut qabls_matches = vec![]; - for mut res in face + for (_id, mut res) in face .hat .downcast_mut::() .unwrap() @@ -290,19 +290,21 @@ impl HatContext { } struct HatFace { - local_subs: HashSet>, - remote_subs: HashSet>, - local_qabls: HashMap, QueryableInfo>, - remote_qabls: HashSet>, + next_id: AtomicU32, // @TODO: manage rollover and uniqueness + local_subs: HashMap, SubscriberId>, + remote_subs: HashMap>, + local_qabls: HashMap, (QueryableId, QueryableInfo)>, + remote_qabls: HashMap>, } impl HatFace { fn new() -> Self { Self { - local_subs: HashSet::new(), - remote_subs: HashSet::new(), + next_id: AtomicU32::new(0), + local_subs: HashMap::new(), + remote_subs: HashMap::new(), local_qabls: HashMap::new(), - remote_qabls: HashSet::new(), + remote_qabls: HashMap::new(), } } } diff --git a/zenoh/src/net/routing/hat/client/pubsub.rs b/zenoh/src/net/routing/hat/client/pubsub.rs index 828915018d..f9f827ecc5 100644 --- a/zenoh/src/net/routing/hat/client/pubsub.rs +++ b/zenoh/src/net/routing/hat/client/pubsub.rs @@ -22,8 +22,10 @@ use crate::net::routing::router::RoutesIndexes; use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::SubscriberId; use zenoh_protocol::{ core::{Reliability, WhatAmI}, network::declare::{ @@ -43,10 +45,11 @@ fn propagate_simple_subscription_to( ) { if (src_face.id != dst_face.id || (dst_face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS))) - && !face_hat!(dst_face).local_subs.contains(res) + && !face_hat!(dst_face).local_subs.contains_key(res) && (src_face.whatami == WhatAmI::Client || dst_face.whatami == WhatAmI::Client) { - face_hat_mut!(dst_face).local_subs.insert(res.clone()); + let id = face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(dst_face).local_subs.insert(res.clone(), id); let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -54,7 +57,7 @@ fn propagate_simple_subscription_to( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id, wire_expr: key_expr, ext_info: *sub_info, }), @@ -83,13 +86,13 @@ fn propagate_simple_subscription( fn register_client_subscription( _tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { // Register subscription { let res = get_mut_unchecked(res); - log::debug!("Register subscription {} for {}", res.expr(), face); match res.session_ctxs.get_mut(&face.id) { Some(ctx) => match &ctx.subs { Some(info) => { @@ -118,16 +121,17 @@ fn register_client_subscription( } } } - face_hat_mut!(face).remote_subs.insert(res.clone()); + face_hat_mut!(face).remote_subs.insert(id, res.clone()); } fn declare_client_subscription( tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { - register_client_subscription(tables, face, res, sub_info); + register_client_subscription(tables, face, id, res, sub_info); let mut propa_sub_info = *sub_info; propa_sub_info.mode = Mode::Push; @@ -144,7 +148,7 @@ fn declare_client_subscription( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id: 0, // @TODO use proper SubscriberId wire_expr: res.expr().into(), ext_info: *sub_info, }), @@ -170,21 +174,19 @@ fn client_subs(res: &Arc) -> Vec> { fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_subs.contains(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - face_hat_mut!(face).local_subs.remove(res); } } } @@ -194,45 +196,48 @@ pub(super) fn undeclare_client_subscription( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client subscription {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).subs = None; - } - face_hat_mut!(face).remote_subs.remove(res); - - let mut client_subs = client_subs(res); - if client_subs.is_empty() { - propagate_forget_simple_subscription(tables, res); - } - if client_subs.len() == 1 { - let face = &mut client_subs[0]; - if face_hat!(face).local_subs.contains(res) - && !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) - { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); + if !face_hat_mut!(face).remote_subs.values().any(|s| *s == *res) { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).subs = None; + } - face_hat_mut!(face).local_subs.remove(res); + let mut client_subs = client_subs(res); + if client_subs.is_empty() { + propagate_forget_simple_subscription(tables, res); + } + if client_subs.len() == 1 { + let face = &mut client_subs[0]; + if !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) { + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } + } } } } + fn forget_client_subscription( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_subscription(tables, face, res); + id: SubscriberId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_subs.remove(&id) { + undeclare_client_subscription(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { @@ -246,7 +251,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { .cloned() .collect::>>() { - for sub in &face_hat!(src_face).remote_subs { + for sub in face_hat!(src_face).remote_subs.values() { propagate_simple_subscription_to(tables, face, sub, &sub_info, &mut src_face.clone()); } } @@ -257,27 +262,29 @@ impl HatPubSubTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, _node_id: NodeId, ) { - declare_client_subscription(tables, face, res, sub_info); + declare_client_subscription(tables, face, id, res, sub_info); } fn undeclare_subscription( &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: SubscriberId, + _res: Option>, _node_id: NodeId, - ) { - forget_client_subscription(tables, face, res); + ) -> Option> { + forget_client_subscription(tables, face, id) } fn get_subscriptions(&self, tables: &Tables) -> Vec> { let mut subs = HashSet::new(); for src_face in tables.faces.values() { - for sub in &face_hat!(src_face).remote_subs { + for sub in face_hat!(src_face).remote_subs.values() { subs.insert(sub.clone()); } } diff --git a/zenoh/src/net/routing/hat/client/queries.rs b/zenoh/src/net/routing/hat/client/queries.rs index c6dfc34eac..4964a8880a 100644 --- a/zenoh/src/net/routing/hat/client/queries.rs +++ b/zenoh/src/net/routing/hat/client/queries.rs @@ -23,10 +23,12 @@ use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use ordered_float::OrderedFloat; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_buffers::ZBuf; use zenoh_protocol::core::key_expr::include::{Includer, DEFAULT_INCLUDER}; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::declare::{ @@ -83,16 +85,19 @@ fn propagate_simple_queryable( let faces = tables.faces.values().cloned(); for mut dst_face in faces { let info = local_qabl_info(tables, res, &dst_face); - let current_info = face_hat!(dst_face).local_qabls.get(res); + let current = face_hat!(dst_face).local_qabls.get(res); if (src_face.is_none() || src_face.as_ref().unwrap().id != dst_face.id) - && (current_info.is_none() || *current_info.unwrap() != info) + && (current.is_none() || current.unwrap().1 != info) && (src_face.is_none() || src_face.as_ref().unwrap().whatami == WhatAmI::Client || dst_face.whatami == WhatAmI::Client) { + let id = current + .map(|c| c.0) + .unwrap_or(face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst)); face_hat_mut!(&mut dst_face) .local_qabls - .insert(res.clone(), info); + .insert(res.clone(), (id, info)); let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -100,7 +105,7 @@ fn propagate_simple_queryable( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -114,13 +119,13 @@ fn propagate_simple_queryable( fn register_client_queryable( _tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { // Register queryable { let res = get_mut_unchecked(res); - log::debug!("Register queryable {} (face: {})", res.expr(), face,); get_mut_unchecked(res.session_ctxs.entry(face.id).or_insert_with(|| { Arc::new(SessionContext { face: face.clone(), @@ -135,16 +140,17 @@ fn register_client_queryable( })) .qabl = Some(*qabl_info); } - face_hat_mut!(face).remote_qabls.insert(res.clone()); + face_hat_mut!(face).remote_qabls.insert(id, res.clone()); } fn declare_client_queryable( tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { - register_client_queryable(tables, face, res, qabl_info); + register_client_queryable(tables, face, id, res, qabl_info); propagate_simple_queryable(tables, res, Some(face)); } @@ -164,22 +170,19 @@ fn client_qabls(res: &Arc) -> Vec> { fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - - face_hat_mut!(face).local_qabls.remove(res); } } } @@ -189,38 +192,37 @@ pub(super) fn undeclare_client_queryable( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client queryable {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).qabl = None; - if ctx.qabl.is_none() { - face_hat_mut!(face).remote_qabls.remove(res); + if !face_hat_mut!(face) + .remote_qabls + .values() + .any(|s| *s == *res) + { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).qabl = None; } - } - let mut client_qabls = client_qabls(res); - if client_qabls.is_empty() { - propagate_forget_simple_queryable(tables, res); - } else { - propagate_simple_queryable(tables, res, None); - } - if client_qabls.len() == 1 { - let face = &mut client_qabls[0]; - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); - - face_hat_mut!(face).local_qabls.remove(res); + let mut client_qabls = client_qabls(res); + if client_qabls.is_empty() { + propagate_forget_simple_queryable(tables, res); + } else { + propagate_simple_queryable(tables, res, None); + } + if client_qabls.len() == 1 { + let face = &mut client_qabls[0]; + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareQueryable(UndeclareQueryable { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } } } } @@ -228,9 +230,14 @@ pub(super) fn undeclare_client_queryable( fn forget_client_queryable( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_queryable(tables, face, res); + id: QueryableId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_qabls.remove(&id) { + undeclare_client_queryable(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn queries_new_face(tables: &mut Tables, _face: &mut Arc) { @@ -240,7 +247,7 @@ pub(super) fn queries_new_face(tables: &mut Tables, _face: &mut Arc) .cloned() .collect::>>() { - for qabl in face_hat!(face).remote_qabls.iter() { + for qabl in face_hat!(face).remote_qabls.values() { propagate_simple_queryable(tables, qabl, Some(&mut face.clone())); } } @@ -255,27 +262,29 @@ impl HatQueriesTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, _node_id: NodeId, ) { - declare_client_queryable(tables, face, res, qabl_info); + declare_client_queryable(tables, face, id, res, qabl_info); } fn undeclare_queryable( &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: QueryableId, + _res: Option>, _node_id: NodeId, - ) { - forget_client_queryable(tables, face, res); + ) -> Option> { + forget_client_queryable(tables, face, id) } fn get_queryables(&self, tables: &Tables) -> Vec> { let mut qabls = HashSet::new(); for src_face in tables.faces.values() { - for qabl in &face_hat!(src_face).remote_qabls { + for qabl in face_hat!(src_face).remote_qabls.values() { qabls.insert(qabl.clone()); } } diff --git a/zenoh/src/net/routing/hat/linkstate_peer/mod.rs b/zenoh/src/net/routing/hat/linkstate_peer/mod.rs index a655d2f0a3..5591ea3b3e 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/mod.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/mod.rs @@ -47,12 +47,16 @@ use async_std::task::JoinHandle; use std::{ any::Any, collections::{HashMap, HashSet}, - sync::Arc, + sync::{atomic::AtomicU32, Arc}, }; use zenoh_config::{unwrap_or_default, ModeDependent, WhatAmI, WhatAmIMatcher, ZenohId}; use zenoh_protocol::{ common::ZExtBody, - network::{declare::queryable::ext::QueryableInfo, oam::id::OAM_LINKSTATE, Oam}, + network::{ + declare::{queryable::ext::QueryableInfo, QueryableId, SubscriberId}, + oam::id::OAM_LINKSTATE, + Oam, + }, }; use zenoh_result::ZResult; use zenoh_sync::get_mut_unchecked; @@ -126,7 +130,6 @@ impl HatTables { } fn schedule_compute_trees(&mut self, tables_ref: Arc) { - log::trace!("Schedule computations"); if self.peers_trees_task.is_none() { let task = Some(async_std::task::spawn(async move { async_std::task::sleep(std::time::Duration::from_millis( @@ -142,7 +145,6 @@ impl HatTables { pubsub::pubsub_tree_change(&mut tables, &new_childs); queries::queries_tree_change(&mut tables, &new_childs); - log::trace!("Computations completed"); hat_mut!(tables).peers_trees_task = None; })); self.peers_trees_task = task; @@ -248,7 +250,7 @@ impl HatBaseTrait for HatCode { face.local_mappings.clear(); let mut subs_matches = vec![]; - for mut res in face + for (_id, mut res) in face .hat .downcast_mut::() .unwrap() @@ -276,7 +278,7 @@ impl HatBaseTrait for HatCode { } let mut qabls_matches = vec![]; - for mut res in face + for (_, mut res) in face .hat .downcast_mut::() .unwrap() @@ -471,20 +473,22 @@ impl HatContext { struct HatFace { link_id: usize, - local_subs: HashSet>, - remote_subs: HashSet>, - local_qabls: HashMap, QueryableInfo>, - remote_qabls: HashSet>, + next_id: AtomicU32, // @TODO: manage rollover and uniqueness + local_subs: HashMap, SubscriberId>, + remote_subs: HashMap>, + local_qabls: HashMap, (QueryableId, QueryableInfo)>, + remote_qabls: HashMap>, } impl HatFace { fn new() -> Self { Self { link_id: 0, - local_subs: HashSet::new(), - remote_subs: HashSet::new(), + next_id: AtomicU32::new(0), + local_subs: HashMap::new(), + remote_subs: HashMap::new(), local_qabls: HashMap::new(), - remote_qabls: HashSet::new(), + remote_qabls: HashMap::new(), } } } diff --git a/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs b/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs index c364f7359f..9a41915333 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs @@ -25,8 +25,10 @@ use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use petgraph::graph::NodeIndex; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::SubscriberId; use zenoh_protocol::{ core::{Reliability, WhatAmI, ZenohId}, network::declare::{ @@ -53,8 +55,6 @@ fn send_sourced_subscription_to_net_childs( if src_face.is_none() || someface.id != src_face.unwrap().id { let key_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send subscription {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -63,7 +63,7 @@ fn send_sourced_subscription_to_net_childs( node_id: routing_context, }, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // TODO + id: 0, // Sourced subscriptions do not use ids wire_expr: key_expr, ext_info: *sub_info, }), @@ -87,10 +87,11 @@ fn propagate_simple_subscription_to( src_face: &mut Arc, ) { if (src_face.id != dst_face.id || res.expr().starts_with(PREFIX_LIVELINESS)) - && !face_hat!(dst_face).local_subs.contains(res) + && !face_hat!(dst_face).local_subs.contains_key(res) && dst_face.whatami == WhatAmI::Client { - face_hat_mut!(dst_face).local_subs.insert(res.clone()); + let id = face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(dst_face).local_subs.insert(res.clone(), id); let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -98,7 +99,7 @@ fn propagate_simple_subscription_to( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // TODO + id, wire_expr: key_expr, ext_info: *sub_info, }), @@ -171,7 +172,6 @@ fn register_peer_subscription( if !res_hat!(res).peer_subs.contains(&peer) { // Register peer subscription { - log::debug!("Register peer subscription {} (peer: {})", res.expr(), peer); res_hat_mut!(res).peer_subs.insert(peer); hat_mut!(tables).peer_subs.insert(res.clone()); } @@ -199,13 +199,13 @@ fn declare_peer_subscription( fn register_client_subscription( _tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { // Register subscription { let res = get_mut_unchecked(res); - log::debug!("Register subscription {} for {}", res.expr(), face); match res.session_ctxs.get_mut(&face.id) { Some(ctx) => match &ctx.subs { Some(info) => { @@ -234,16 +234,17 @@ fn register_client_subscription( } } } - face_hat_mut!(face).remote_subs.insert(res.clone()); + face_hat_mut!(face).remote_subs.insert(id, res.clone()); } fn declare_client_subscription( tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { - register_client_subscription(tables, face, res, sub_info); + register_client_subscription(tables, face, id, res, sub_info); let mut propa_sub_info = *sub_info; propa_sub_info.mode = Mode::Push; let zid = tables.zid; @@ -289,8 +290,6 @@ fn send_forget_sourced_subscription_to_net_childs( if src_face.is_none() || someface.id != src_face.unwrap().id { let wire_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send forget subscription {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -299,7 +298,7 @@ fn send_forget_sourced_subscription_to_net_childs( node_id: routing_context.unwrap_or(0), }, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // TODO + id: 0, // Sourced subscriptions do not use ids ext_wire_expr: WireExprType { wire_expr }, }), }, @@ -315,21 +314,19 @@ fn send_forget_sourced_subscription_to_net_childs( fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_subs.contains(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // TODO - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - face_hat_mut!(face).local_subs.remove(res); } } } @@ -370,11 +367,6 @@ fn propagate_forget_sourced_subscription( } fn unregister_peer_subscription(tables: &mut Tables, res: &mut Arc, peer: &ZenohId) { - log::debug!( - "Unregister peer subscription {} (peer: {})", - res.expr(), - peer - ); res_hat_mut!(res).peer_subs.retain(|sub| sub != peer); if res_hat!(res).peer_subs.is_empty() { @@ -414,37 +406,34 @@ pub(super) fn undeclare_client_subscription( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client subscription {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).subs = None; - } - face_hat_mut!(face).remote_subs.remove(res); - - let mut client_subs = client_subs(res); - let peer_subs = remote_peer_subs(tables, res); - if client_subs.is_empty() { - undeclare_peer_subscription(tables, None, res, &tables.zid.clone()); - } - if client_subs.len() == 1 && !peer_subs { - let face = &mut client_subs[0]; - if face_hat!(face).local_subs.contains(res) - && !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) - { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // TODO - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); + if !face_hat_mut!(face).remote_subs.values().any(|s| *s == *res) { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).subs = None; + } - face_hat_mut!(face).local_subs.remove(res); + let mut client_subs = client_subs(res); + let peer_subs = remote_peer_subs(tables, res); + if client_subs.is_empty() { + undeclare_peer_subscription(tables, None, res, &tables.zid.clone()); + } + if client_subs.len() == 1 && !peer_subs { + let face = &mut client_subs[0]; + if !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) { + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } + } } } } @@ -452,20 +441,26 @@ pub(super) fn undeclare_client_subscription( fn forget_client_subscription( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_subscription(tables, face, res); + id: SubscriberId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_subs.remove(&id) { + undeclare_client_subscription(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let sub_info = SubscriberInfo { - reliability: Reliability::Reliable, // @TODO + reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers mode: Mode::Push, }; if face.whatami == WhatAmI::Client { for sub in &hat!(tables).peer_subs { - face_hat_mut!(face).local_subs.insert(sub.clone()); + let id = face_hat!(face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(face).local_subs.insert(sub.clone(), id); let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -473,7 +468,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // TODO + id, wire_expr: key_expr, ext_info: sub_info, }), @@ -515,7 +510,7 @@ pub(super) fn pubsub_tree_change(tables: &mut Tables, new_childs: &[Vec, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, node_id: NodeId, @@ -585,7 +581,7 @@ impl HatPubSubTrait for HatCode { declare_peer_subscription(tables, face, res, sub_info, peer) } } else { - declare_client_subscription(tables, face, res, sub_info) + declare_client_subscription(tables, face, id, res, sub_info) } } @@ -593,15 +589,23 @@ impl HatPubSubTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: SubscriberId, + res: Option>, node_id: NodeId, - ) { + ) -> Option> { if face.whatami != WhatAmI::Client { - if let Some(peer) = get_peer(tables, face, node_id) { - forget_peer_subscription(tables, face, res, &peer); + if let Some(mut res) = res { + if let Some(peer) = get_peer(tables, face, node_id) { + forget_peer_subscription(tables, face, &mut res, &peer); + Some(res) + } else { + None + } + } else { + None } } else { - forget_client_subscription(tables, face, res); + forget_client_subscription(tables, face, id) } } diff --git a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs index 4192f87e55..51aac2175a 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs @@ -26,10 +26,12 @@ use ordered_float::OrderedFloat; use petgraph::graph::NodeIndex; use std::borrow::Cow; use std::collections::HashMap; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_buffers::ZBuf; use zenoh_protocol::core::key_expr::include::{Includer, DEFAULT_INCLUDER}; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr, ZenohId}, network::declare::{ @@ -133,8 +135,6 @@ fn send_sourced_queryable_to_net_childs( if src_face.is_none() || someface.id != src_face.as_ref().unwrap().id { let key_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send queryable {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -143,7 +143,7 @@ fn send_sourced_queryable_to_net_childs( node_id: routing_context, }, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id: 0, // Sourced queryables do not use ids wire_expr: key_expr, ext_info: *qabl_info, }), @@ -166,14 +166,17 @@ fn propagate_simple_queryable( let faces = tables.faces.values().cloned(); for mut dst_face in faces { let info = local_qabl_info(tables, res, &dst_face); - let current_info = face_hat!(dst_face).local_qabls.get(res); + let current = face_hat!(dst_face).local_qabls.get(res); if (src_face.is_none() || src_face.as_ref().unwrap().id != dst_face.id) - && (current_info.is_none() || *current_info.unwrap() != info) + && (current.is_none() || current.unwrap().1 != info) && dst_face.whatami == WhatAmI::Client { + let id = current + .map(|c| c.0) + .unwrap_or(face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst)); face_hat_mut!(&mut dst_face) .local_qabls - .insert(res.clone(), info); + .insert(res.clone(), (id, info)); let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -181,7 +184,7 @@ fn propagate_simple_queryable( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -240,7 +243,6 @@ fn register_peer_queryable( if current_info.is_none() || current_info.unwrap() != qabl_info { // Register peer queryable { - log::debug!("Register peer queryable {} (peer: {})", res.expr(), peer,); res_hat_mut!(res).peer_qabls.insert(peer, *qabl_info); hat_mut!(tables).peer_qabls.insert(res.clone()); } @@ -269,13 +271,13 @@ fn declare_peer_queryable( fn register_client_queryable( _tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { // Register queryable { let res = get_mut_unchecked(res); - log::debug!("Register queryable {} (face: {})", res.expr(), face,); get_mut_unchecked(res.session_ctxs.entry(face.id).or_insert_with(|| { Arc::new(SessionContext { face: face.clone(), @@ -290,17 +292,17 @@ fn register_client_queryable( })) .qabl = Some(*qabl_info); } - face_hat_mut!(face).remote_qabls.insert(res.clone()); + face_hat_mut!(face).remote_qabls.insert(id, res.clone()); } fn declare_client_queryable( tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { - register_client_queryable(tables, face, res, qabl_info); - + register_client_queryable(tables, face, id, res, qabl_info); let local_details = local_peer_qabl_info(tables, res); let zid = tables.zid; register_peer_queryable(tables, Some(face), res, &local_details, zid); @@ -345,8 +347,6 @@ fn send_forget_sourced_queryable_to_net_childs( if src_face.is_none() || someface.id != src_face.unwrap().id { let wire_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send forget queryable {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -355,7 +355,7 @@ fn send_forget_sourced_queryable_to_net_childs( node_id: routing_context, }, body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id: 0, ext_wire_expr: WireExprType { wire_expr }, }), }, @@ -371,22 +371,19 @@ fn send_forget_sourced_queryable_to_net_childs( fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - - face_hat_mut!(face).local_qabls.remove(res); } } } @@ -427,7 +424,6 @@ fn propagate_forget_sourced_queryable( } fn unregister_peer_queryable(tables: &mut Tables, res: &mut Arc, peer: &ZenohId) { - log::debug!("Unregister peer queryable {} (peer: {})", res.expr(), peer,); res_hat_mut!(res).peer_qabls.remove(peer); if res_hat!(res).peer_qabls.is_empty() { @@ -467,42 +463,41 @@ pub(super) fn undeclare_client_queryable( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client queryable {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).qabl = None; - if ctx.qabl.is_none() { - face_hat_mut!(face).remote_qabls.remove(res); + if !face_hat_mut!(face) + .remote_qabls + .values() + .any(|s| *s == *res) + { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).qabl = None; } - } - let mut client_qabls = client_qabls(res); - let peer_qabls = remote_peer_qabls(tables, res); + let mut client_qabls = client_qabls(res); + let peer_qabls = remote_peer_qabls(tables, res); - if client_qabls.is_empty() { - undeclare_peer_queryable(tables, None, res, &tables.zid.clone()); - } else { - let local_info = local_peer_qabl_info(tables, res); - register_peer_queryable(tables, None, res, &local_info, tables.zid); - } - - if client_qabls.len() == 1 && !peer_qabls { - let face = &mut client_qabls[0]; - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); + if client_qabls.is_empty() { + undeclare_peer_queryable(tables, None, res, &tables.zid.clone()); + } else { + let local_info = local_peer_qabl_info(tables, res); + register_peer_queryable(tables, None, res, &local_info, tables.zid); + } - face_hat_mut!(face).local_qabls.remove(res); + if client_qabls.len() == 1 && !peer_qabls { + let face = &mut client_qabls[0]; + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareQueryable(UndeclareQueryable { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } } } } @@ -510,9 +505,14 @@ pub(super) fn undeclare_client_queryable( fn forget_client_queryable( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_queryable(tables, face, res); + id: QueryableId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_qabls.remove(&id) { + undeclare_client_queryable(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { @@ -520,7 +520,10 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { for qabl in &hat!(tables).peer_qabls { if qabl.context.is_some() { let info = local_qabl_info(tables, qabl, face); - face_hat_mut!(face).local_qabls.insert(qabl.clone(), info); + let id = face_hat!(face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(face) + .local_qabls + .insert(qabl.clone(), (id, info)); let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -528,7 +531,7 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -641,6 +644,7 @@ impl HatQueriesTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, node_id: NodeId, @@ -650,7 +654,7 @@ impl HatQueriesTrait for HatCode { declare_peer_queryable(tables, face, res, qabl_info, peer); } } else { - declare_client_queryable(tables, face, res, qabl_info); + declare_client_queryable(tables, face, id, res, qabl_info); } } @@ -658,15 +662,23 @@ impl HatQueriesTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: QueryableId, + res: Option>, node_id: NodeId, - ) { + ) -> Option> { if face.whatami != WhatAmI::Client { - if let Some(peer) = get_peer(tables, face, node_id) { - forget_peer_queryable(tables, face, res, &peer); + if let Some(mut res) = res { + if let Some(peer) = get_peer(tables, face, node_id) { + forget_peer_queryable(tables, face, &mut res, &peer); + Some(res) + } else { + None + } + } else { + None } } else { - forget_client_queryable(tables, face, res); + forget_client_queryable(tables, face, id) } } diff --git a/zenoh/src/net/routing/hat/mod.rs b/zenoh/src/net/routing/hat/mod.rs index 4fbf9c9e5d..d9feb687f2 100644 --- a/zenoh/src/net/routing/hat/mod.rs +++ b/zenoh/src/net/routing/hat/mod.rs @@ -31,7 +31,10 @@ use zenoh_config::{unwrap_or_default, Config, WhatAmI}; use zenoh_protocol::{ core::WireExpr, network::{ - declare::{queryable::ext::QueryableInfo, subscriber::ext::SubscriberInfo}, + declare::{ + queryable::ext::QueryableInfo, subscriber::ext::SubscriberInfo, QueryableId, + SubscriberId, + }, Oam, }, }; @@ -117,6 +120,7 @@ pub(crate) trait HatPubSubTrait { &self, tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, node_id: NodeId, @@ -125,9 +129,10 @@ pub(crate) trait HatPubSubTrait { &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: SubscriberId, + res: Option>, node_id: NodeId, - ); + ) -> Option>; fn get_subscriptions(&self, tables: &Tables) -> Vec>; @@ -147,6 +152,7 @@ pub(crate) trait HatQueriesTrait { &self, tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, node_id: NodeId, @@ -155,9 +161,10 @@ pub(crate) trait HatQueriesTrait { &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: QueryableId, + res: Option>, node_id: NodeId, - ); + ) -> Option>; fn get_queryables(&self, tables: &Tables) -> Vec>; diff --git a/zenoh/src/net/routing/hat/p2p_peer/mod.rs b/zenoh/src/net/routing/hat/p2p_peer/mod.rs index 8dc4f15ada..1a6c1ba407 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/mod.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/mod.rs @@ -45,11 +45,14 @@ use super::{ }; use std::{ any::Any, - collections::{HashMap, HashSet}, - sync::Arc, + collections::HashMap, + sync::{atomic::AtomicU32, Arc}, }; use zenoh_config::{unwrap_or_default, ModeDependent, WhatAmI, WhatAmIMatcher}; -use zenoh_protocol::network::Oam; +use zenoh_protocol::network::{ + declare::{QueryableId, SubscriberId}, + Oam, +}; use zenoh_protocol::{ common::ZExtBody, network::{declare::queryable::ext::QueryableInfo, oam::id::OAM_LINKSTATE}, @@ -177,7 +180,7 @@ impl HatBaseTrait for HatCode { face.local_mappings.clear(); let mut subs_matches = vec![]; - for mut res in face + for (_id, mut res) in face .hat .downcast_mut::() .unwrap() @@ -205,7 +208,7 @@ impl HatBaseTrait for HatCode { } let mut qabls_matches = vec![]; - for mut res in face + for (_id, mut res) in face .hat .downcast_mut::() .unwrap() @@ -363,19 +366,21 @@ impl HatContext { } struct HatFace { - local_subs: HashSet>, - remote_subs: HashSet>, - local_qabls: HashMap, QueryableInfo>, - remote_qabls: HashSet>, + next_id: AtomicU32, // @TODO: manage rollover and uniqueness + local_subs: HashMap, SubscriberId>, + remote_subs: HashMap>, + local_qabls: HashMap, (QueryableId, QueryableInfo)>, + remote_qabls: HashMap>, } impl HatFace { fn new() -> Self { Self { - local_subs: HashSet::new(), - remote_subs: HashSet::new(), + next_id: AtomicU32::new(0), + local_subs: HashMap::new(), + remote_subs: HashMap::new(), local_qabls: HashMap::new(), - remote_qabls: HashSet::new(), + remote_qabls: HashMap::new(), } } } diff --git a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs index a7d58ce1a5..4f6ce5aeca 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs @@ -22,8 +22,10 @@ use crate::net::routing::router::RoutesIndexes; use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::SubscriberId; use zenoh_protocol::{ core::{Reliability, WhatAmI}, network::declare::{ @@ -43,10 +45,11 @@ fn propagate_simple_subscription_to( ) { if (src_face.id != dst_face.id || (dst_face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS))) - && !face_hat!(dst_face).local_subs.contains(res) + && !face_hat!(dst_face).local_subs.contains_key(res) && (src_face.whatami == WhatAmI::Client || dst_face.whatami == WhatAmI::Client) { - face_hat_mut!(dst_face).local_subs.insert(res.clone()); + let id = face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(dst_face).local_subs.insert(res.clone(), id); let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -54,7 +57,7 @@ fn propagate_simple_subscription_to( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id, wire_expr: key_expr, ext_info: *sub_info, }), @@ -83,13 +86,13 @@ fn propagate_simple_subscription( fn register_client_subscription( _tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { // Register subscription { let res = get_mut_unchecked(res); - log::debug!("Register subscription {} for {}", res.expr(), face); match res.session_ctxs.get_mut(&face.id) { Some(ctx) => match &ctx.subs { Some(info) => { @@ -118,16 +121,17 @@ fn register_client_subscription( } } } - face_hat_mut!(face).remote_subs.insert(res.clone()); + face_hat_mut!(face).remote_subs.insert(id, res.clone()); } fn declare_client_subscription( tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { - register_client_subscription(tables, face, res, sub_info); + register_client_subscription(tables, face, id, res, sub_info); let mut propa_sub_info = *sub_info; propa_sub_info.mode = Mode::Push; @@ -144,7 +148,7 @@ fn declare_client_subscription( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id: 0, // @TODO use proper SubscriberId wire_expr: res.expr().into(), ext_info: *sub_info, }), @@ -170,21 +174,19 @@ fn client_subs(res: &Arc) -> Vec> { fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_subs.contains(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - face_hat_mut!(face).local_subs.remove(res); } } } @@ -194,36 +196,33 @@ pub(super) fn undeclare_client_subscription( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client subscription {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).subs = None; - } - face_hat_mut!(face).remote_subs.remove(res); - - let mut client_subs = client_subs(res); - if client_subs.is_empty() { - propagate_forget_simple_subscription(tables, res); - } - if client_subs.len() == 1 { - let face = &mut client_subs[0]; - if face_hat!(face).local_subs.contains(res) - && !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) - { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); + if !face_hat_mut!(face).remote_subs.values().any(|s| *s == *res) { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).subs = None; + } - face_hat_mut!(face).local_subs.remove(res); + let mut client_subs = client_subs(res); + if client_subs.is_empty() { + propagate_forget_simple_subscription(tables, res); + } + if client_subs.len() == 1 { + let face = &mut client_subs[0]; + if !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) { + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } + } } } } @@ -231,9 +230,14 @@ pub(super) fn undeclare_client_subscription( fn forget_client_subscription( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_subscription(tables, face, res); + id: SubscriberId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_subs.remove(&id) { + undeclare_client_subscription(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { @@ -247,7 +251,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { .cloned() .collect::>>() { - for sub in &face_hat!(src_face).remote_subs { + for sub in face_hat!(src_face).remote_subs.values() { propagate_simple_subscription_to(tables, face, sub, &sub_info, &mut src_face.clone()); } } @@ -258,27 +262,29 @@ impl HatPubSubTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, _node_id: NodeId, ) { - declare_client_subscription(tables, face, res, sub_info); + declare_client_subscription(tables, face, id, res, sub_info); } fn undeclare_subscription( &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: SubscriberId, + _res: Option>, _node_id: NodeId, - ) { - forget_client_subscription(tables, face, res); + ) -> Option> { + forget_client_subscription(tables, face, id) } fn get_subscriptions(&self, tables: &Tables) -> Vec> { let mut subs = HashSet::new(); for src_face in tables.faces.values() { - for sub in &face_hat!(src_face).remote_subs { + for sub in face_hat!(src_face).remote_subs.values() { subs.insert(sub.clone()); } } diff --git a/zenoh/src/net/routing/hat/p2p_peer/queries.rs b/zenoh/src/net/routing/hat/p2p_peer/queries.rs index 68f2669f6f..04b31b41ef 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/queries.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/queries.rs @@ -23,10 +23,12 @@ use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use ordered_float::OrderedFloat; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_buffers::ZBuf; use zenoh_protocol::core::key_expr::include::{Includer, DEFAULT_INCLUDER}; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::declare::{ @@ -83,16 +85,19 @@ fn propagate_simple_queryable( let faces = tables.faces.values().cloned(); for mut dst_face in faces { let info = local_qabl_info(tables, res, &dst_face); - let current_info = face_hat!(dst_face).local_qabls.get(res); + let current = face_hat!(dst_face).local_qabls.get(res); if (src_face.is_none() || src_face.as_ref().unwrap().id != dst_face.id) - && (current_info.is_none() || *current_info.unwrap() != info) + && (current.is_none() || current.unwrap().1 != info) && (src_face.is_none() || src_face.as_ref().unwrap().whatami == WhatAmI::Client || dst_face.whatami == WhatAmI::Client) { + let id = current + .map(|c| c.0) + .unwrap_or(face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst)); face_hat_mut!(&mut dst_face) .local_qabls - .insert(res.clone(), info); + .insert(res.clone(), (id, info)); let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -100,7 +105,7 @@ fn propagate_simple_queryable( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -114,13 +119,13 @@ fn propagate_simple_queryable( fn register_client_queryable( _tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { // Register queryable { let res = get_mut_unchecked(res); - log::debug!("Register queryable {} (face: {})", res.expr(), face,); get_mut_unchecked(res.session_ctxs.entry(face.id).or_insert_with(|| { Arc::new(SessionContext { face: face.clone(), @@ -135,16 +140,17 @@ fn register_client_queryable( })) .qabl = Some(*qabl_info); } - face_hat_mut!(face).remote_qabls.insert(res.clone()); + face_hat_mut!(face).remote_qabls.insert(id, res.clone()); } fn declare_client_queryable( tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { - register_client_queryable(tables, face, res, qabl_info); + register_client_queryable(tables, face, id, res, qabl_info); propagate_simple_queryable(tables, res, Some(face)); } @@ -164,22 +170,19 @@ fn client_qabls(res: &Arc) -> Vec> { fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - - face_hat_mut!(face).local_qabls.remove(res); } } } @@ -189,38 +192,37 @@ pub(super) fn undeclare_client_queryable( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client queryable {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).qabl = None; - if ctx.qabl.is_none() { - face_hat_mut!(face).remote_qabls.remove(res); + if !face_hat_mut!(face) + .remote_qabls + .values() + .any(|s| *s == *res) + { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).qabl = None; } - } - let mut client_qabls = client_qabls(res); - if client_qabls.is_empty() { - propagate_forget_simple_queryable(tables, res); - } else { - propagate_simple_queryable(tables, res, None); - } - if client_qabls.len() == 1 { - let face = &mut client_qabls[0]; - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); - - face_hat_mut!(face).local_qabls.remove(res); + let mut client_qabls = client_qabls(res); + if client_qabls.is_empty() { + propagate_forget_simple_queryable(tables, res); + } else { + propagate_simple_queryable(tables, res, None); + } + if client_qabls.len() == 1 { + let face = &mut client_qabls[0]; + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareQueryable(UndeclareQueryable { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } } } } @@ -228,9 +230,14 @@ pub(super) fn undeclare_client_queryable( fn forget_client_queryable( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_queryable(tables, face, res); + id: QueryableId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_qabls.remove(&id) { + undeclare_client_queryable(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn queries_new_face(tables: &mut Tables, _face: &mut Arc) { @@ -240,7 +247,7 @@ pub(super) fn queries_new_face(tables: &mut Tables, _face: &mut Arc) .cloned() .collect::>>() { - for qabl in face_hat!(face).remote_qabls.iter() { + for qabl in face_hat!(face).remote_qabls.values() { propagate_simple_queryable(tables, qabl, Some(&mut face.clone())); } } @@ -255,27 +262,29 @@ impl HatQueriesTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, _node_id: NodeId, ) { - declare_client_queryable(tables, face, res, qabl_info); + declare_client_queryable(tables, face, id, res, qabl_info); } fn undeclare_queryable( &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: QueryableId, + _res: Option>, _node_id: NodeId, - ) { - forget_client_queryable(tables, face, res); + ) -> Option> { + forget_client_queryable(tables, face, id) } fn get_queryables(&self, tables: &Tables) -> Vec> { let mut qabls = HashSet::new(); for src_face in tables.faces.values() { - for qabl in &face_hat!(src_face).remote_qabls { + for qabl in face_hat!(src_face).remote_qabls.values() { qabls.insert(qabl.clone()); } } diff --git a/zenoh/src/net/routing/hat/router/mod.rs b/zenoh/src/net/routing/hat/router/mod.rs index 24c837e8f5..ff576ae271 100644 --- a/zenoh/src/net/routing/hat/router/mod.rs +++ b/zenoh/src/net/routing/hat/router/mod.rs @@ -52,12 +52,16 @@ use std::{ any::Any, collections::{hash_map::DefaultHasher, HashMap, HashSet}, hash::Hasher, - sync::Arc, + sync::{atomic::AtomicU32, Arc}, }; use zenoh_config::{unwrap_or_default, ModeDependent, WhatAmI, WhatAmIMatcher, ZenohId}; use zenoh_protocol::{ common::ZExtBody, - network::{declare::queryable::ext::QueryableInfo, oam::id::OAM_LINKSTATE, Oam}, + network::{ + declare::{queryable::ext::QueryableInfo, QueryableId, SubscriberId}, + oam::id::OAM_LINKSTATE, + Oam, + }, }; use zenoh_result::ZResult; use zenoh_sync::get_mut_unchecked; @@ -232,14 +236,12 @@ impl HatTables { .as_ref() .map(|net| { let links = net.get_links(peer1); - log::debug!("failover_brokering {} {} ({:?})", peer1, peer2, links); HatTables::failover_brokering_to(links, peer2) }) .unwrap_or(false) } fn schedule_compute_trees(&mut self, tables_ref: Arc, net_type: WhatAmI) { - log::trace!("Schedule computations"); if (net_type == WhatAmI::Router && self.routers_trees_task.is_none()) || (net_type == WhatAmI::Peer && self.peers_trees_task.is_none()) { @@ -264,7 +266,6 @@ impl HatTables { pubsub::pubsub_tree_change(&mut tables, &new_childs, net_type); queries::queries_tree_change(&mut tables, &new_childs, net_type); - log::trace!("Computations completed"); match net_type { WhatAmI::Router => hat_mut!(tables).routers_trees_task = None, _ => hat_mut!(tables).peers_trees_task = None, @@ -418,7 +419,7 @@ impl HatBaseTrait for HatCode { face.local_mappings.clear(); let mut subs_matches = vec![]; - for mut res in face + for (_id, mut res) in face .hat .downcast_mut::() .unwrap() @@ -446,7 +447,7 @@ impl HatBaseTrait for HatCode { } let mut qabls_matches = vec![]; - for mut res in face + for (_, mut res) in face .hat .downcast_mut::() .unwrap() @@ -773,20 +774,22 @@ impl HatContext { struct HatFace { link_id: usize, - local_subs: HashSet>, - remote_subs: HashSet>, - local_qabls: HashMap, QueryableInfo>, - remote_qabls: HashSet>, + next_id: AtomicU32, // @TODO: manage rollover and uniqueness + local_subs: HashMap, SubscriberId>, + remote_subs: HashMap>, + local_qabls: HashMap, (QueryableId, QueryableInfo)>, + remote_qabls: HashMap>, } impl HatFace { fn new() -> Self { Self { link_id: 0, - local_subs: HashSet::new(), - remote_subs: HashSet::new(), + next_id: AtomicU32::new(0), + local_subs: HashMap::new(), + remote_subs: HashMap::new(), local_qabls: HashMap::new(), - remote_qabls: HashSet::new(), + remote_qabls: HashMap::new(), } } } diff --git a/zenoh/src/net/routing/hat/router/pubsub.rs b/zenoh/src/net/routing/hat/router/pubsub.rs index 6030269cfa..da1ca66efd 100644 --- a/zenoh/src/net/routing/hat/router/pubsub.rs +++ b/zenoh/src/net/routing/hat/router/pubsub.rs @@ -25,8 +25,10 @@ use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use petgraph::graph::NodeIndex; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::SubscriberId; use zenoh_protocol::{ core::{Reliability, WhatAmI, ZenohId}, network::declare::{ @@ -53,8 +55,6 @@ fn send_sourced_subscription_to_net_childs( if src_face.is_none() || someface.id != src_face.unwrap().id { let key_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send subscription {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -63,7 +63,7 @@ fn send_sourced_subscription_to_net_childs( node_id: routing_context, }, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id: 0, // Sourced subscriptions do not use ids wire_expr: key_expr, ext_info: *sub_info, }), @@ -89,7 +89,7 @@ fn propagate_simple_subscription_to( ) { if (src_face.id != dst_face.id || (dst_face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS))) - && !face_hat!(dst_face).local_subs.contains(res) + && !face_hat!(dst_face).local_subs.contains_key(res) && if full_peer_net { dst_face.whatami == WhatAmI::Client } else { @@ -99,7 +99,8 @@ fn propagate_simple_subscription_to( || hat!(tables).failover_brokering(src_face.zid, dst_face.zid)) } { - face_hat_mut!(dst_face).local_subs.insert(res.clone()); + let id = face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(dst_face).local_subs.insert(res.clone(), id); let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -107,7 +108,7 @@ fn propagate_simple_subscription_to( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id, wire_expr: key_expr, ext_info: *sub_info, }), @@ -189,11 +190,6 @@ fn register_router_subscription( if !res_hat!(res).router_subs.contains(&router) { // Register router subscription { - log::debug!( - "Register router subscription {} (router: {})", - res.expr(), - router - ); res_hat_mut!(res).router_subs.insert(router); hat_mut!(tables).router_subs.insert(res.clone()); } @@ -230,7 +226,6 @@ fn register_peer_subscription( if !res_hat!(res).peer_subs.contains(&peer) { // Register peer subscription { - log::debug!("Register peer subscription {} (peer: {})", res.expr(), peer); res_hat_mut!(res).peer_subs.insert(peer); hat_mut!(tables).peer_subs.insert(res.clone()); } @@ -257,13 +252,13 @@ fn declare_peer_subscription( fn register_client_subscription( _tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { // Register subscription { let res = get_mut_unchecked(res); - log::debug!("Register subscription {} for {}", res.expr(), face); match res.session_ctxs.get_mut(&face.id) { Some(ctx) => match &ctx.subs { Some(info) => { @@ -292,16 +287,17 @@ fn register_client_subscription( } } } - face_hat_mut!(face).remote_subs.insert(res.clone()); + face_hat_mut!(face).remote_subs.insert(id, res.clone()); } fn declare_client_subscription( tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, ) { - register_client_subscription(tables, face, res, sub_info); + register_client_subscription(tables, face, id, res, sub_info); let mut propa_sub_info = *sub_info; propa_sub_info.mode = Mode::Push; let zid = tables.zid; @@ -356,8 +352,6 @@ fn send_forget_sourced_subscription_to_net_childs( if src_face.is_none() || someface.id != src_face.unwrap().id { let wire_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send forget subscription {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -366,7 +360,7 @@ fn send_forget_sourced_subscription_to_net_childs( node_id: routing_context.unwrap_or(0), }, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id: 0, // Sourced subscriptions do not use ids ext_wire_expr: WireExprType { wire_expr }, }), }, @@ -382,21 +376,19 @@ fn send_forget_sourced_subscription_to_net_childs( fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_subs.contains(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - face_hat_mut!(face).local_subs.remove(res); } } } @@ -413,7 +405,7 @@ fn propagate_forget_simple_subscription_to_peers(tables: &mut Tables, res: &Arc< .collect::>>() { if face.whatami == WhatAmI::Peer - && face_hat!(face).local_subs.contains(res) + && face_hat!(face).local_subs.contains_key(res) && !res.session_ctxs.values().any(|s| { face.zid != s.face.zid && s.subs.is_some() @@ -422,21 +414,20 @@ fn propagate_forget_simple_subscription_to_peers(tables: &mut Tables, res: &Arc< && hat!(tables).failover_brokering(s.face.zid, face.zid))) }) { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); - - face_hat_mut!(&mut face).local_subs.remove(res); + if let Some(id) = face_hat_mut!(&mut face).local_subs.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } } } } @@ -479,11 +470,6 @@ fn propagate_forget_sourced_subscription( } fn unregister_router_subscription(tables: &mut Tables, res: &mut Arc, router: &ZenohId) { - log::debug!( - "Unregister router subscription {} (router: {})", - res.expr(), - router - ); res_hat_mut!(res).router_subs.retain(|sub| sub != router); if res_hat!(res).router_subs.is_empty() { @@ -522,11 +508,6 @@ fn forget_router_subscription( } fn unregister_peer_subscription(tables: &mut Tables, res: &mut Arc, peer: &ZenohId) { - log::debug!( - "Unregister peer subscription {} (peer: {})", - res.expr(), - peer - ); res_hat_mut!(res).peer_subs.retain(|sub| sub != peer); if res_hat!(res).peer_subs.is_empty() { @@ -568,40 +549,37 @@ pub(super) fn undeclare_client_subscription( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client subscription {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).subs = None; - } - face_hat_mut!(face).remote_subs.remove(res); - - let mut client_subs = client_subs(res); - let router_subs = remote_router_subs(tables, res); - let peer_subs = remote_peer_subs(tables, res); - if client_subs.is_empty() && !peer_subs { - undeclare_router_subscription(tables, None, res, &tables.zid.clone()); - } else { - propagate_forget_simple_subscription_to_peers(tables, res); - } - if client_subs.len() == 1 && !router_subs && !peer_subs { - let face = &mut client_subs[0]; - if face_hat!(face).local_subs.contains(res) - && !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) - { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); + if !face_hat_mut!(face).remote_subs.values().any(|s| *s == *res) { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).subs = None; + } - face_hat_mut!(face).local_subs.remove(res); + let mut client_subs = client_subs(res); + let router_subs = remote_router_subs(tables, res); + let peer_subs = remote_peer_subs(tables, res); + if client_subs.is_empty() && !peer_subs { + undeclare_router_subscription(tables, None, res, &tables.zid.clone()); + } else { + propagate_forget_simple_subscription_to_peers(tables, res); + } + if client_subs.len() == 1 && !router_subs && !peer_subs { + let face = &mut client_subs[0]; + if !(face.whatami == WhatAmI::Client && res.expr().starts_with(PREFIX_LIVELINESS)) { + if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } + } } } } @@ -609,9 +587,14 @@ pub(super) fn undeclare_client_subscription( fn forget_client_subscription( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_subscription(tables, face, res); + id: SubscriberId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_subs.remove(&id) { + undeclare_client_subscription(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { @@ -622,7 +605,8 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { if face.whatami == WhatAmI::Client { for sub in &hat!(tables).router_subs { - face_hat_mut!(face).local_subs.insert(sub.clone()); + let id = face_hat!(face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(face).local_subs.insert(sub.clone(), id); let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -630,7 +614,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id, wire_expr: key_expr, ext_info: sub_info, }), @@ -649,7 +633,8 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { && hat!(tables).failover_brokering(s.face.zid, face.zid))) })) { - face_hat_mut!(face).local_subs.insert(sub.clone()); + let id = face_hat!(face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(face).local_subs.insert(sub.clone(), id); let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -657,7 +642,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id, wire_expr: key_expr, ext_info: sub_info, }), @@ -760,7 +745,7 @@ pub(super) fn pubsub_tree_change( pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: &[ZenohId]) { if let Some(src_face) = tables.get_face(zid).cloned() { if hat!(tables).router_peers_failover_brokering && src_face.whatami == WhatAmI::Peer { - for res in &face_hat!(src_face).remote_subs { + for res in face_hat!(src_face).remote_subs.values() { let client_subs = res .session_ctxs .values() @@ -772,7 +757,7 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: { let dst_face = &mut get_mut_unchecked(ctx).face; if dst_face.whatami == WhatAmI::Peer && src_face.zid != dst_face.zid { - if face_hat!(dst_face).local_subs.contains(res) { + if let Some(id) = face_hat!(dst_face).local_subs.get(res).cloned() { let forget = !HatTables::failover_brokering_to(links, dst_face.zid) && { let ctx_links = hat!(tables) @@ -790,7 +775,6 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: }) }; if forget { - let wire_expr = Resource::get_best_key(res, "", dst_face.id); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -798,8 +782,8 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber( UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }, ), }, @@ -810,7 +794,8 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: } } else if HatTables::failover_brokering_to(links, ctx.face.zid) { let dst_face = &mut get_mut_unchecked(ctx).face; - face_hat_mut!(dst_face).local_subs.insert(res.clone()); + let id = face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(dst_face).local_subs.insert(res.clone(), id); let key_expr = Resource::decl_key(res, dst_face); let sub_info = SubscriberInfo { reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers @@ -822,7 +807,7 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id, wire_expr: key_expr, ext_info: sub_info, }), @@ -876,6 +861,7 @@ impl HatPubSubTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, + id: SubscriberId, res: &mut Arc, sub_info: &SubscriberInfo, node_id: NodeId, @@ -892,10 +878,10 @@ impl HatPubSubTrait for HatCode { declare_peer_subscription(tables, face, res, sub_info, peer) } } else { - declare_client_subscription(tables, face, res, sub_info) + declare_client_subscription(tables, face, id, res, sub_info) } } - _ => declare_client_subscription(tables, face, res, sub_info), + _ => declare_client_subscription(tables, face, id, res, sub_info), } } @@ -903,25 +889,40 @@ impl HatPubSubTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: SubscriberId, + res: Option>, node_id: NodeId, - ) { + ) -> Option> { match face.whatami { WhatAmI::Router => { - if let Some(router) = get_router(tables, face, node_id) { - forget_router_subscription(tables, face, res, &router) + if let Some(mut res) = res { + if let Some(router) = get_router(tables, face, node_id) { + forget_router_subscription(tables, face, &mut res, &router); + Some(res) + } else { + None + } + } else { + None } } WhatAmI::Peer => { if hat!(tables).full_net(WhatAmI::Peer) { - if let Some(peer) = get_peer(tables, face, node_id) { - forget_peer_subscription(tables, face, res, &peer) + if let Some(mut res) = res { + if let Some(peer) = get_peer(tables, face, node_id) { + forget_peer_subscription(tables, face, &mut res, &peer); + Some(res) + } else { + None + } + } else { + None } } else { - forget_client_subscription(tables, face, res) + forget_client_subscription(tables, face, id) } } - _ => forget_client_subscription(tables, face, res), + _ => forget_client_subscription(tables, face, id), } } diff --git a/zenoh/src/net/routing/hat/router/queries.rs b/zenoh/src/net/routing/hat/router/queries.rs index 008e71d7af..b76f0adcc6 100644 --- a/zenoh/src/net/routing/hat/router/queries.rs +++ b/zenoh/src/net/routing/hat/router/queries.rs @@ -26,10 +26,12 @@ use ordered_float::OrderedFloat; use petgraph::graph::NodeIndex; use std::borrow::Cow; use std::collections::HashMap; +use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_buffers::ZBuf; use zenoh_protocol::core::key_expr::include::{Includer, DEFAULT_INCLUDER}; use zenoh_protocol::core::key_expr::OwnedKeyExpr; +use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr, ZenohId}, network::declare::{ @@ -204,8 +206,6 @@ fn send_sourced_queryable_to_net_childs( if src_face.is_none() || someface.id != src_face.as_ref().unwrap().id { let key_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send queryable {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -214,7 +214,7 @@ fn send_sourced_queryable_to_net_childs( node_id: routing_context, }, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id: 0, // Sourced queryables do not use ids wire_expr: key_expr, ext_info: *qabl_info, }), @@ -238,9 +238,9 @@ fn propagate_simple_queryable( let faces = tables.faces.values().cloned(); for mut dst_face in faces { let info = local_qabl_info(tables, res, &dst_face); - let current_info = face_hat!(dst_face).local_qabls.get(res); + let current = face_hat!(dst_face).local_qabls.get(res); if (src_face.is_none() || src_face.as_ref().unwrap().id != dst_face.id) - && (current_info.is_none() || *current_info.unwrap() != info) + && (current.is_none() || current.unwrap().1 != info) && if full_peers_net { dst_face.whatami == WhatAmI::Client } else { @@ -252,9 +252,12 @@ fn propagate_simple_queryable( .failover_brokering(src_face.as_ref().unwrap().zid, dst_face.zid)) } { + let id = current + .map(|c| c.0) + .unwrap_or(face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst)); face_hat_mut!(&mut dst_face) .local_qabls - .insert(res.clone(), info); + .insert(res.clone(), (id, info)); let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -262,7 +265,7 @@ fn propagate_simple_queryable( ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -322,11 +325,6 @@ fn register_router_queryable( if current_info.is_none() || current_info.unwrap() != qabl_info { // Register router queryable { - log::debug!( - "Register router queryable {} (router: {})", - res.expr(), - router, - ); res_hat_mut!(res).router_qabls.insert(router, *qabl_info); hat_mut!(tables).router_qabls.insert(res.clone()); } @@ -375,7 +373,6 @@ fn register_peer_queryable( if current_info.is_none() || current_info.unwrap() != qabl_info { // Register peer queryable { - log::debug!("Register peer queryable {} (peer: {})", res.expr(), peer,); res_hat_mut!(res).peer_qabls.insert(peer, *qabl_info); hat_mut!(tables).peer_qabls.insert(res.clone()); } @@ -402,13 +399,13 @@ fn declare_peer_queryable( fn register_client_queryable( _tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { // Register queryable { let res = get_mut_unchecked(res); - log::debug!("Register queryable {} (face: {})", res.expr(), face,); get_mut_unchecked(res.session_ctxs.entry(face.id).or_insert_with(|| { Arc::new(SessionContext { face: face.clone(), @@ -423,16 +420,17 @@ fn register_client_queryable( })) .qabl = Some(*qabl_info); } - face_hat_mut!(face).remote_qabls.insert(res.clone()); + face_hat_mut!(face).remote_qabls.insert(id, res.clone()); } fn declare_client_queryable( tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, ) { - register_client_queryable(tables, face, res, qabl_info); + register_client_queryable(tables, face, id, res, qabl_info); let local_details = local_router_qabl_info(tables, res); let zid = tables.zid; register_router_queryable(tables, Some(face), res, &local_details, zid); @@ -486,8 +484,6 @@ fn send_forget_sourced_queryable_to_net_childs( if src_face.is_none() || someface.id != src_face.unwrap().id { let wire_expr = Resource::decl_key(res, &mut someface); - log::debug!("Send forget queryable {} on {}", res.expr(), someface); - someface.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -496,7 +492,7 @@ fn send_forget_sourced_queryable_to_net_childs( node_id: routing_context, }, body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id: 0, // Sourced queryables do not use ids ext_wire_expr: WireExprType { wire_expr }, }), }, @@ -512,22 +508,19 @@ fn send_forget_sourced_queryable_to_net_childs( fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { for face in tables.faces.values_mut() { - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }), }, res.expr(), )); - - face_hat_mut!(face).local_qabls.remove(res); } } } @@ -553,21 +546,20 @@ fn propagate_forget_simple_queryable_to_peers(tables: &mut Tables, res: &mut Arc && hat!(tables).failover_brokering(s.face.zid, face.zid))) }) { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); - - face_hat_mut!(&mut face).local_qabls.remove(res); + if let Some((id, _)) = face_hat_mut!(&mut face).local_qabls.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareQueryable(UndeclareQueryable { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } } } } @@ -610,11 +602,6 @@ fn propagate_forget_sourced_queryable( } fn unregister_router_queryable(tables: &mut Tables, res: &mut Arc, router: &ZenohId) { - log::debug!( - "Unregister router queryable {} (router: {})", - res.expr(), - router, - ); res_hat_mut!(res).router_qabls.remove(router); if res_hat!(res).router_qabls.is_empty() { @@ -653,7 +640,6 @@ fn forget_router_queryable( } fn unregister_peer_queryable(tables: &mut Tables, res: &mut Arc, peer: &ZenohId) { - log::debug!("Unregister peer queryable {} (peer: {})", res.expr(), peer,); res_hat_mut!(res).peer_qabls.remove(peer); if res_hat!(res).peer_qabls.is_empty() { @@ -699,44 +685,43 @@ pub(super) fn undeclare_client_queryable( face: &mut Arc, res: &mut Arc, ) { - log::debug!("Unregister client queryable {} for {}", res.expr(), face); - if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { - get_mut_unchecked(ctx).qabl = None; - if ctx.qabl.is_none() { - face_hat_mut!(face).remote_qabls.remove(res); + if !face_hat_mut!(face) + .remote_qabls + .values() + .any(|s| *s == *res) + { + if let Some(ctx) = get_mut_unchecked(res).session_ctxs.get_mut(&face.id) { + get_mut_unchecked(ctx).qabl = None; } - } - let mut client_qabls = client_qabls(res); - let router_qabls = remote_router_qabls(tables, res); - let peer_qabls = remote_peer_qabls(tables, res); + let mut client_qabls = client_qabls(res); + let router_qabls = remote_router_qabls(tables, res); + let peer_qabls = remote_peer_qabls(tables, res); - if client_qabls.is_empty() && !peer_qabls { - undeclare_router_queryable(tables, None, res, &tables.zid.clone()); - } else { - let local_info = local_router_qabl_info(tables, res); - register_router_queryable(tables, None, res, &local_info, tables.zid); - propagate_forget_simple_queryable_to_peers(tables, res); - } - - if client_qabls.len() == 1 && !router_qabls && !peer_qabls { - let face = &mut client_qabls[0]; - if face_hat!(face).local_qabls.contains_key(res) { - let wire_expr = Resource::get_best_key(res, "", face.id); - face.primitives.send_declare(RoutingContext::with_expr( - Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }, - res.expr(), - )); + if client_qabls.is_empty() && !peer_qabls { + undeclare_router_queryable(tables, None, res, &tables.zid.clone()); + } else { + let local_info = local_router_qabl_info(tables, res); + register_router_queryable(tables, None, res, &local_info, tables.zid); + propagate_forget_simple_queryable_to_peers(tables, res); + } - face_hat_mut!(face).local_qabls.remove(res); + if client_qabls.len() == 1 && !router_qabls && !peer_qabls { + let face = &mut client_qabls[0]; + if let Some((id, _)) = face_hat_mut!(face).local_qabls.remove(res) { + face.primitives.send_declare(RoutingContext::with_expr( + Declare { + ext_qos: ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareQueryable(UndeclareQueryable { + id, + ext_wire_expr: WireExprType::null(), + }), + }, + res.expr(), + )); + } } } } @@ -744,9 +729,14 @@ pub(super) fn undeclare_client_queryable( fn forget_client_queryable( tables: &mut Tables, face: &mut Arc, - res: &mut Arc, -) { - undeclare_client_queryable(tables, face, res); + id: QueryableId, +) -> Option> { + if let Some(mut res) = face_hat_mut!(face).remote_qabls.remove(&id) { + undeclare_client_queryable(tables, face, &mut res); + Some(res) + } else { + None + } } pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { @@ -754,7 +744,10 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { for qabl in hat!(tables).router_qabls.iter() { if qabl.context.is_some() { let info = local_qabl_info(tables, qabl, face); - face_hat_mut!(face).local_qabls.insert(qabl.clone(), info); + let id = face_hat!(face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(face) + .local_qabls + .insert(qabl.clone(), (id, info)); let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -762,7 +755,7 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -783,7 +776,10 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { })) { let info = local_qabl_info(tables, qabl, face); - face_hat_mut!(face).local_qabls.insert(qabl.clone(), info); + let id = face_hat!(face).next_id.fetch_add(1, Ordering::SeqCst); + face_hat_mut!(face) + .local_qabls + .insert(qabl.clone(), (id, info)); let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -791,7 +787,7 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -853,7 +849,7 @@ pub(super) fn queries_remove_node(tables: &mut Tables, node: &ZenohId, net_type: pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: &[ZenohId]) { if let Some(src_face) = tables.get_face(zid) { if hat!(tables).router_peers_failover_brokering && src_face.whatami == WhatAmI::Peer { - for res in &face_hat!(src_face).remote_qabls { + for res in face_hat!(src_face).remote_qabls.values() { let client_qabls = res .session_ctxs .values() @@ -865,7 +861,7 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links { let dst_face = &mut get_mut_unchecked(ctx).face; if dst_face.whatami == WhatAmI::Peer && src_face.zid != dst_face.zid { - if face_hat!(dst_face).local_qabls.contains_key(res) { + if let Some(id) = face_hat!(dst_face).local_subs.get(res).cloned() { let forget = !HatTables::failover_brokering_to(links, dst_face.zid) && { let ctx_links = hat!(tables) @@ -883,7 +879,6 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links }) }; if forget { - let wire_expr = Resource::get_best_key(res, "", dst_face.id); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { ext_qos: ext::QoSType::DECLARE, @@ -891,8 +886,8 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareQueryable( UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { wire_expr }, + id, + ext_wire_expr: WireExprType::null(), }, ), }, @@ -904,9 +899,10 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links } else if HatTables::failover_brokering_to(links, ctx.face.zid) { let dst_face = &mut get_mut_unchecked(ctx).face; let info = local_qabl_info(tables, res, dst_face); + let id = face_hat!(dst_face).next_id.fetch_add(1, Ordering::SeqCst); face_hat_mut!(dst_face) .local_qabls - .insert(res.clone(), info); + .insert(res.clone(), (id, info)); let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -914,7 +910,7 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id, wire_expr: key_expr, ext_info: info, }), @@ -1024,6 +1020,7 @@ impl HatQueriesTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, + id: QueryableId, res: &mut Arc, qabl_info: &QueryableInfo, node_id: NodeId, @@ -1040,10 +1037,10 @@ impl HatQueriesTrait for HatCode { declare_peer_queryable(tables, face, res, qabl_info, peer) } } else { - declare_client_queryable(tables, face, res, qabl_info) + declare_client_queryable(tables, face, id, res, qabl_info) } } - _ => declare_client_queryable(tables, face, res, qabl_info), + _ => declare_client_queryable(tables, face, id, res, qabl_info), } } @@ -1051,25 +1048,40 @@ impl HatQueriesTrait for HatCode { &self, tables: &mut Tables, face: &mut Arc, - res: &mut Arc, + id: QueryableId, + res: Option>, node_id: NodeId, - ) { + ) -> Option> { match face.whatami { WhatAmI::Router => { - if let Some(router) = get_router(tables, face, node_id) { - forget_router_queryable(tables, face, res, &router) + if let Some(mut res) = res { + if let Some(router) = get_router(tables, face, node_id) { + forget_router_queryable(tables, face, &mut res, &router); + Some(res) + } else { + None + } + } else { + None } } WhatAmI::Peer => { if hat!(tables).full_net(WhatAmI::Peer) { - if let Some(peer) = get_peer(tables, face, node_id) { - forget_peer_queryable(tables, face, res, &peer) + if let Some(mut res) = res { + if let Some(peer) = get_peer(tables, face, node_id) { + forget_peer_queryable(tables, face, &mut res, &peer); + Some(res) + } else { + None + } + } else { + None } } else { - forget_client_queryable(tables, face, res) + forget_client_queryable(tables, face, id) } } - _ => forget_client_queryable(tables, face, res), + _ => forget_client_queryable(tables, face, id), } } diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index 03b447aae0..e76475f447 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -32,6 +32,7 @@ use std::sync::Mutex; use zenoh_buffers::buffer::SplitBuffer; use zenoh_config::{ConfigValidator, ValidatedMap, WhatAmI}; use zenoh_plugin_trait::{PluginControl, PluginStatus}; +use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{ key_expr::{keyexpr, OwnedKeyExpr}, @@ -59,6 +60,7 @@ type Handler = Arc; pub struct AdminSpace { zid: ZenohId, + queryable_id: QueryableId, primitives: Mutex>>, mappings: Mutex>, handlers: HashMap, @@ -189,6 +191,7 @@ impl AdminSpace { }); let admin = Arc::new(AdminSpace { zid: runtime.zid(), + queryable_id: runtime.next_id(), primitives: Mutex::new(None), mappings: Mutex::new(HashMap::new()), handlers, @@ -278,7 +281,7 @@ impl AdminSpace { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) + id: runtime.next_id(), wire_expr: [&root_key, "/**"].concat().into(), ext_info: QueryableInfo { complete: 0, @@ -292,7 +295,7 @@ impl AdminSpace { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) + id: runtime.next_id(), wire_expr: [&root_key, "/config/**"].concat().into(), ext_info: SubscriberInfo::DEFAULT, }), @@ -431,6 +434,7 @@ impl Primitives for AdminSpace { #[cfg(feature = "unstable")] attachment: query.ext_attachment.map(Into::into), }), + eid: self.queryable_id, }; for (key, handler) in &self.handlers { diff --git a/zenoh/src/net/runtime/mod.rs b/zenoh/src/net/runtime/mod.rs index 7061b38622..8b116b1080 100644 --- a/zenoh/src/net/runtime/mod.rs +++ b/zenoh/src/net/runtime/mod.rs @@ -30,6 +30,7 @@ use async_std::task::JoinHandle; use futures::stream::StreamExt; use futures::Future; use std::any::Any; +use std::sync::atomic::{AtomicU32, Ordering}; use std::sync::Arc; use stop_token::future::FutureExt; use stop_token::{StopSource, TimedOutError}; @@ -48,6 +49,7 @@ use zenoh_transport::{ struct RuntimeState { zid: ZenohId, whatami: WhatAmI, + next_id: AtomicU32, metadata: serde_json::Value, router: Arc, config: Notifier, @@ -114,6 +116,7 @@ impl Runtime { state: Arc::new(RuntimeState { zid, whatami, + next_id: AtomicU32::new(1), // 0 is reserved for routing core metadata, router, config: config.clone(), @@ -154,6 +157,11 @@ impl Runtime { zwrite!(self.state.transport_handlers).push(handler); } + #[inline] + pub fn next_id(&self) -> u32 { + self.state.next_id.fetch_add(1, Ordering::SeqCst) + } + pub async fn close(&self) -> ZResult<()> { log::trace!("Runtime::close())"); drop(self.state.stop_source.write().unwrap().take()); diff --git a/zenoh/src/net/tests/tables.rs b/zenoh/src/net/tests/tables.rs index 80a9dd458a..4560eefaae 100644 --- a/zenoh/src/net/tests/tables.rs +++ b/zenoh/src/net/tests/tables.rs @@ -66,6 +66,7 @@ fn base_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face.upgrade().unwrap(), + 0, &WireExpr::from(1).with_suffix("four/five"), &sub_info, NodeId::default(), @@ -166,6 +167,76 @@ fn match_test() { } } +#[test] +fn multisub_test() { + let config = Config::default(); + let router = Router::new( + ZenohId::try_from([1]).unwrap(), + WhatAmI::Client, + Some(Arc::new(HLC::default())), + &config, + ) + .unwrap(); + let tables = router.tables.clone(); + + let primitives = Arc::new(DummyPrimitives {}); + let face0 = Arc::downgrade(&router.new_primitives(primitives).state); + assert!(face0.upgrade().is_some()); + + // -------------- + let sub_info = SubscriberInfo { + reliability: Reliability::Reliable, + mode: Mode::Push, + }; + declare_subscription( + zlock!(tables.ctrl_lock).as_ref(), + &tables, + &mut face0.upgrade().unwrap(), + 0, + &"sub".into(), + &sub_info, + NodeId::default(), + ); + let optres = Resource::get_resource(zread!(tables.tables)._get_root(), "sub") + .map(|res| Arc::downgrade(&res)); + assert!(optres.is_some()); + let res = optres.unwrap(); + assert!(res.upgrade().is_some()); + + declare_subscription( + zlock!(tables.ctrl_lock).as_ref(), + &tables, + &mut face0.upgrade().unwrap(), + 1, + &"sub".into(), + &sub_info, + NodeId::default(), + ); + assert!(res.upgrade().is_some()); + + undeclare_subscription( + zlock!(tables.ctrl_lock).as_ref(), + &tables, + &mut face0.upgrade().unwrap(), + 0, + &WireExpr::empty(), + NodeId::default(), + ); + assert!(res.upgrade().is_some()); + + undeclare_subscription( + zlock!(tables.ctrl_lock).as_ref(), + &tables, + &mut face0.upgrade().unwrap(), + 1, + &WireExpr::empty(), + NodeId::default(), + ); + assert!(res.upgrade().is_none()); + + tables::close_face(&tables, &face0); +} + #[test] fn clean_test() { let config = Config::default(); @@ -241,6 +312,7 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), + 0, &"todrop1/todrop11".into(), &sub_info, NodeId::default(), @@ -255,6 +327,7 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), + 1, &WireExpr::from(1).with_suffix("/todrop12"), &sub_info, NodeId::default(), @@ -270,7 +343,8 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), - &WireExpr::from(1).with_suffix("/todrop12"), + 1, + &WireExpr::empty(), NodeId::default(), ); @@ -284,7 +358,8 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), - &"todrop1/todrop11".into(), + 0, + &WireExpr::empty(), NodeId::default(), ); assert!(res1.upgrade().is_some()); @@ -302,6 +377,7 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), + 2, &"todrop3".into(), &sub_info, NodeId::default(), @@ -316,7 +392,8 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), - &"todrop3".into(), + 2, + &WireExpr::empty(), NodeId::default(), ); assert!(res1.upgrade().is_some()); @@ -331,6 +408,7 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), + 3, &"todrop5".into(), &sub_info, NodeId::default(), @@ -339,6 +417,7 @@ fn clean_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), + 4, &"todrop6".into(), &sub_info, NodeId::default(), @@ -518,6 +597,7 @@ fn client_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face0.upgrade().unwrap(), + 0, &WireExpr::from(11).with_suffix("/**"), &sub_info, NodeId::default(), @@ -565,6 +645,7 @@ fn client_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face1.upgrade().unwrap(), + 0, &WireExpr::from(21).with_suffix("/**"), &sub_info, NodeId::default(), @@ -612,6 +693,7 @@ fn client_test() { zlock!(tables.ctrl_lock).as_ref(), &tables, &mut face2.upgrade().unwrap(), + 0, &WireExpr::from(31).with_suffix("/**"), &sub_info, NodeId::default(), diff --git a/zenoh/src/prelude.rs b/zenoh/src/prelude.rs index 59a4bbd96e..177906e9b1 100644 --- a/zenoh/src/prelude.rs +++ b/zenoh/src/prelude.rs @@ -31,7 +31,10 @@ pub(crate) mod common { writer::HasWriter, }; pub use zenoh_core::Resolve; + pub use zenoh_protocol::core::{EndPoint, Locator, ZenohId}; + #[zenoh_macros::unstable] + pub use zenoh_protocol::core::{EntityGlobalId, EntityId}; pub use crate::config::{self, Config, ValidatedMap}; pub use crate::handlers::IntoCallbackReceiverPair; @@ -49,6 +52,8 @@ pub(crate) mod common { pub use crate::sample::Locality; #[cfg(not(feature = "unstable"))] pub(crate) use crate::sample::Locality; + #[zenoh_macros::unstable] + pub use crate::sample::SourceInfo; pub use crate::sample::{Sample, SampleKind}; pub use crate::publication::Priority; diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 9fb4bdf6c3..2a1a58ebd9 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -13,14 +13,11 @@ // //! Publishing primitives. -use crate::encoding::Encoding; -use crate::key_expr::KeyExpr; use crate::net::primitives::Primitives; -use crate::payload::Payload; +use crate::prelude::*; #[zenoh_macros::unstable] use crate::sample::Attachment; use crate::sample::{DataInfo, QoS, Sample, SampleKind}; -use crate::Locality; use crate::SessionRef; use crate::Undeclarable; #[cfg(feature = "unstable")] @@ -30,10 +27,11 @@ use crate::{ }; use std::future::Ready; use zenoh_core::{zread, AsyncResolve, Resolvable, Resolve, SyncResolve}; -use zenoh_keyexpr::keyexpr; use zenoh_protocol::network::push::ext; use zenoh_protocol::network::Mapping; use zenoh_protocol::network::Push; +#[zenoh_macros::unstable] +use zenoh_protocol::zenoh::ext::SourceInfoType; use zenoh_protocol::zenoh::Del; use zenoh_protocol::zenoh::PushBody; use zenoh_protocol::zenoh::Put; @@ -148,6 +146,8 @@ impl SyncResolve for PutBuilder<'_, '_> { let publisher = Publisher { session, + #[cfg(feature = "unstable")] + eid: 0, // This is a one shot Publisher key_expr: key_expr?, congestion_control, priority, @@ -160,6 +160,8 @@ impl SyncResolve for PutBuilder<'_, '_> { self.kind, self.encoding, #[cfg(feature = "unstable")] + None, + #[cfg(feature = "unstable")] self.attachment, ) } @@ -241,6 +243,8 @@ impl std::fmt::Debug for PublisherRef<'_> { #[derive(Debug, Clone)] pub struct Publisher<'a> { pub(crate) session: SessionRef<'a>, + #[cfg(feature = "unstable")] + pub(crate) eid: EntityId, pub(crate) key_expr: KeyExpr<'a>, pub(crate) congestion_control: CongestionControl, pub(crate) priority: Priority, @@ -248,6 +252,29 @@ pub struct Publisher<'a> { } impl<'a> Publisher<'a> { + /// Returns the [`EntityGlobalId`] of this Publisher. + /// + /// # Examples + /// ``` + /// # async_std::task::block_on(async { + /// use zenoh::prelude::r#async::*; + /// + /// let session = zenoh::open(config::peer()).res().await.unwrap(); + /// let publisher = session.declare_publisher("key/expression") + /// .res() + /// .await + /// .unwrap(); + /// let publisher_id = publisher.id(); + /// # }) + /// ``` + #[zenoh_macros::unstable] + pub fn id(&self) -> EntityGlobalId { + EntityGlobalId { + zid: self.session.zid(), + eid: self.eid, + } + } + pub fn key_expr(&self) -> &KeyExpr<'a> { &self.key_expr } @@ -317,6 +344,8 @@ impl<'a> Publisher<'a> { kind, encoding: Encoding::ZENOH_BYTES, #[cfg(feature = "unstable")] + source_info: None, + #[cfg(feature = "unstable")] attachment: None, } } @@ -604,6 +633,8 @@ pub struct Publication<'a> { kind: SampleKind, encoding: Encoding, #[cfg(feature = "unstable")] + pub(crate) source_info: Option, + #[cfg(feature = "unstable")] pub(crate) attachment: Option, } @@ -618,6 +649,27 @@ impl<'a> Publication<'a> { self.attachment = Some(attachment); self } + + /// Send data with the given [`SourceInfo`]. + /// + /// # Examples + /// ``` + /// # async_std::task::block_on(async { + /// use zenoh::prelude::r#async::*; + /// + /// let session = zenoh::open(config::peer()).res().await.unwrap(); + /// let publisher = session.declare_publisher("key/expression").res().await.unwrap(); + /// publisher.put("Value").with_source_info(SourceInfo { + /// source_id: Some(publisher.id()), + /// source_sn: Some(0), + /// }).res().await.unwrap(); + /// # }) + /// ``` + #[zenoh_macros::unstable] + pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { + self.source_info = Some(source_info); + self + } } impl Resolvable for Publication<'_> { @@ -632,6 +684,8 @@ impl SyncResolve for Publication<'_> { self.kind, self.encoding, #[cfg(feature = "unstable")] + self.source_info, + #[cfg(feature = "unstable")] self.attachment, ) } @@ -661,6 +715,8 @@ impl<'a> Sink for Publisher<'a> { kind: item.kind, encoding: item.encoding, #[cfg(feature = "unstable")] + source_info: None, + #[cfg(feature = "unstable")] attachment: item.attachment, } .res_sync() @@ -784,8 +840,12 @@ impl<'a, 'b> SyncResolve for PublisherBuilder<'a, 'b> { self.session .declare_publication_intent(key_expr.clone()) .res_sync()?; + #[cfg(feature = "unstable")] + let eid = self.session.runtime.next_id(); let publisher = Publisher { session: self.session, + #[cfg(feature = "unstable")] + eid, key_expr, congestion_control: self.congestion_control, priority: self.priority, @@ -809,6 +869,7 @@ fn resolve_put( payload: Payload, kind: SampleKind, encoding: Encoding, + #[cfg(feature = "unstable")] source_info: Option, #[cfg(feature = "unstable")] attachment: Option, ) -> ZResult<()> { log::trace!("write({:?}, [...])", &publisher.key_expr); @@ -842,6 +903,12 @@ fn resolve_put( PushBody::Put(Put { timestamp, encoding: encoding.clone().into(), + #[cfg(feature = "unstable")] + ext_sinfo: source_info.map(|s| SourceInfoType { + id: s.source_id.unwrap_or_default(), + sn: s.source_sn.unwrap_or_default() as u32, + }), + #[cfg(not(feature = "unstable"))] ext_sinfo: None, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -861,6 +928,12 @@ fn resolve_put( } PushBody::Del(Del { timestamp, + #[cfg(feature = "unstable")] + ext_sinfo: source_info.map(|s| SourceInfoType { + id: s.source_id.unwrap_or_default(), + sn: s.source_sn.unwrap_or_default() as u32, + }), + #[cfg(not(feature = "unstable"))] ext_sinfo: None, ext_attachment, ext_unknown: vec![], diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 6bd78d4fc7..bd5ec81101 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -17,7 +17,6 @@ use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; -use crate::sample::DataInfo; use crate::Id; use crate::SessionRef; use crate::Undeclarable; @@ -28,11 +27,9 @@ use std::future::Ready; use std::ops::Deref; use std::sync::Arc; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; -use zenoh_protocol::{ - core::WireExpr, - network::{response, Mapping, RequestId, Response, ResponseFinal}, - zenoh::{self, ext::ValueType, reply::ReplyBody, Del, Put, ResponseBody}, -}; +use zenoh_protocol::core::{EntityId, WireExpr}; +use zenoh_protocol::network::{response, Mapping, RequestId, Response, ResponseFinal}; +use zenoh_protocol::zenoh::{self, ext::ValueType, reply::ReplyBody, Del, Put, ResponseBody}; use zenoh_result::ZResult; pub(crate) struct QueryInner { @@ -64,6 +61,7 @@ impl Drop for QueryInner { #[derive(Clone)] pub struct Query { pub(crate) inner: Arc, + pub(crate) eid: EntityId, } impl Query { @@ -192,22 +190,12 @@ impl SyncResolve for ReplyBuilder<'_> { kind, encoding, timestamp, - qos, #[cfg(feature = "unstable")] source_info, #[cfg(feature = "unstable")] attachment, + .. } = sample; - #[allow(unused_mut)] - let mut data_info = DataInfo { - kind, - encoding: Some(encoding), - timestamp, - qos, - source_id: None, - source_sn: None, - }; - // Use a macro for inferring the proper const extension ID between Put and Del cases macro_rules! ext_attachment { () => {{ @@ -222,21 +210,17 @@ impl SyncResolve for ReplyBuilder<'_> { ext_attachment }}; } - + #[allow(unused_mut)] + let mut ext_sinfo = None; #[cfg(feature = "unstable")] { - data_info.source_id = source_info.source_id; - data_info.source_sn = source_info.source_sn; + if source_info.source_id.is_some() || source_info.source_sn.is_some() { + ext_sinfo = Some(zenoh::put::ext::SourceInfoType { + id: source_info.source_id.unwrap_or_default(), + sn: source_info.source_sn.unwrap_or_default() as u32, + }) + } } - let ext_sinfo = if data_info.source_id.is_some() || data_info.source_sn.is_some() { - Some(zenoh::put::ext::SourceInfoType { - zid: data_info.source_id.unwrap_or_default(), - eid: 0, // @TODO use proper EntityId (#703) - sn: data_info.source_sn.unwrap_or_default() as u32, - }) - } else { - None - }; self.query.inner.primitives.send_response(Response { rid: self.query.inner.qid, wire_expr: WireExpr { @@ -249,8 +233,8 @@ impl SyncResolve for ReplyBuilder<'_> { ext_unknown: vec![], payload: match kind { SampleKind::Put => ReplyBody::Put(Put { - timestamp: data_info.timestamp, - encoding: data_info.encoding.unwrap_or_default().into(), + timestamp, + encoding: encoding.into(), ext_sinfo, #[cfg(feature = "shared-memory")] ext_shm: None, @@ -270,7 +254,7 @@ impl SyncResolve for ReplyBuilder<'_> { ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { zid: self.query.inner.zid, - eid: 0, // @TODO use proper EntityId (#703) + eid: self.query.eid, }), }); Ok(()) @@ -300,7 +284,7 @@ impl SyncResolve for ReplyBuilder<'_> { ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { zid: self.query.inner.zid, - eid: 0, // @TODO use proper EntityId (#703) + eid: self.query.eid, }), }); Ok(()) @@ -607,6 +591,29 @@ pub struct Queryable<'a, Receiver> { } impl<'a, Receiver> Queryable<'a, Receiver> { + /// Returns the [`EntityGlobalId`] of this Queryable. + /// + /// # Examples + /// ``` + /// # async_std::task::block_on(async { + /// use zenoh::prelude::r#async::*; + /// + /// let session = zenoh::open(config::peer()).res().await.unwrap(); + /// let queryable = session.declare_queryable("key/expression") + /// .res() + /// .await + /// .unwrap(); + /// let queryable_id = queryable.id(); + /// # }) + /// ``` + #[zenoh_macros::unstable] + pub fn id(&self) -> EntityGlobalId { + EntityGlobalId { + zid: self.queryable.session.zid(), + eid: self.queryable.state.id, + } + } + #[inline] pub fn undeclare(self) -> impl Resolve> + 'a { Undeclarable::undeclare_inner(self, ()) diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 543dd62e84..af4a58956d 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -15,16 +15,16 @@ //! Sample primitives use crate::encoding::Encoding; use crate::payload::Payload; -use crate::prelude::{KeyExpr, ZenohId}; +use crate::prelude::{KeyExpr, Value}; use crate::time::{new_reception_timestamp, Timestamp}; use crate::Priority; -use crate::Value; #[zenoh_macros::unstable] use serde::Serialize; use std::{ convert::{TryFrom, TryInto}, fmt, }; +use zenoh_protocol::core::EntityGlobalId; use zenoh_protocol::{core::CongestionControl, network::push::ext::QoSType}; pub type SourceSn = u64; @@ -52,7 +52,7 @@ pub(crate) struct DataInfo { pub kind: SampleKind, pub encoding: Option, pub timestamp: Option, - pub source_id: Option, + pub source_id: Option, pub source_sn: Option, pub qos: QoS, } @@ -61,16 +61,24 @@ pub(crate) struct DataInfo { #[zenoh_macros::unstable] #[derive(Debug, Clone)] pub struct SourceInfo { - /// The [`ZenohId`] of the zenoh instance that published the concerned [`Sample`]. - pub source_id: Option, + /// The [`EntityGlobalId`] of the zenoh entity that published the concerned [`Sample`]. + pub source_id: Option, /// The sequence number of the [`Sample`] from the source. pub source_sn: Option, } #[test] #[cfg(feature = "unstable")] +#[cfg(not(all(target_os = "macos", target_arch = "aarch64")))] fn source_info_stack_size() { - assert_eq!(std::mem::size_of::(), 16 * 2); + assert_eq!(std::mem::size_of::(), 40); +} + +#[test] +#[cfg(feature = "unstable")] +#[cfg(all(target_os = "macos", target_arch = "aarch64"))] +fn source_info_stack_size() { + assert_eq!(std::mem::size_of::(), 48); } #[zenoh_macros::unstable] diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 87c416c209..861acf71de 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -48,7 +48,7 @@ use std::convert::TryFrom; use std::convert::TryInto; use std::fmt; use std::ops::Deref; -use std::sync::atomic::{AtomicU16, AtomicUsize, Ordering}; +use std::sync::atomic::{AtomicU16, Ordering}; use std::sync::Arc; use std::sync::RwLock; use std::time::Duration; @@ -57,6 +57,8 @@ use zenoh_buffers::ZBuf; use zenoh_collections::SingleOrVec; use zenoh_config::unwrap_or_default; use zenoh_core::{zconfigurable, zread, Resolve, ResolveClosure, ResolveFuture, SyncResolve}; +#[cfg(feature = "unstable")] +use zenoh_protocol::network::declare::SubscriberId; use zenoh_protocol::network::AtomicRequestId; use zenoh_protocol::network::RequestId; use zenoh_protocol::zenoh::reply::ReplyBody; @@ -97,9 +99,10 @@ pub(crate) struct SessionState { pub(crate) primitives: Option>, // @TODO replace with MaybeUninit ?? pub(crate) expr_id_counter: AtomicExprId, // @TODO: manage rollover and uniqueness pub(crate) qid_counter: AtomicRequestId, - pub(crate) decl_id_counter: AtomicUsize, pub(crate) local_resources: HashMap, pub(crate) remote_resources: HashMap, + #[cfg(feature = "unstable")] + pub(crate) remote_subscribers: HashMap>, //pub(crate) publications: Vec, pub(crate) subscribers: HashMap>, pub(crate) queryables: HashMap>, @@ -121,9 +124,10 @@ impl SessionState { primitives: None, expr_id_counter: AtomicExprId::new(1), // Note: start at 1 because 0 is reserved for NO_RESOURCE qid_counter: AtomicRequestId::new(0), - decl_id_counter: AtomicUsize::new(0), local_resources: HashMap::new(), remote_resources: HashMap::new(), + #[cfg(feature = "unstable")] + remote_subscribers: HashMap::new(), //publications: Vec::new(), subscribers: HashMap::new(), queryables: HashMap::new(), @@ -967,19 +971,20 @@ impl Session { ) -> ZResult> { let mut state = zwrite!(self.state); log::trace!("subscribe({:?})", key_expr); - let id = state.decl_id_counter.fetch_add(1, Ordering::SeqCst); + let id = self.runtime.next_id(); let key_expr = match scope { Some(scope) => scope / key_expr, None => key_expr.clone(), }; - let sub_state = Arc::new(SubscriberState { + let mut sub_state = SubscriberState { id, + remote_id: id, key_expr: key_expr.clone().into_owned(), scope: scope.clone().map(|e| e.into_owned()), origin, callback, - }); + }; #[cfg(not(feature = "unstable"))] let declared_sub = origin != Locality::SessionLocal; @@ -989,29 +994,39 @@ impl Session { .as_str() .starts_with(crate::liveliness::PREFIX_LIVELINESS); - let declared_sub = declared_sub - .then(|| { - match state - .aggregated_subscribers // TODO: can this be an OwnedKeyExpr? - .iter() - .find(|s| s.includes( &key_expr)) - { - Some(join_sub) => { - let joined_sub = state.subscribers.values().any(|s| { - s.origin != Locality::SessionLocal && join_sub.includes(&s.key_expr) - }); - (!joined_sub).then(|| join_sub.clone().into()) - } - None => { - let twin_sub = state - .subscribers - .values() - .any(|s| s.origin != Locality::SessionLocal && s.key_expr == key_expr); - (!twin_sub).then(|| key_expr.clone()) + let declared_sub = + declared_sub + .then(|| { + match state + .aggregated_subscribers + .iter() + .find(|s| s.includes(&key_expr)) + { + Some(join_sub) => { + if let Some(joined_sub) = state.subscribers.values().find(|s| { + s.origin != Locality::SessionLocal && join_sub.includes(&s.key_expr) + }) { + sub_state.remote_id = joined_sub.remote_id; + None + } else { + Some(join_sub.clone().into()) + } + } + None => { + if let Some(twin_sub) = state.subscribers.values().find(|s| { + s.origin != Locality::SessionLocal && s.key_expr == key_expr + }) { + sub_state.remote_id = twin_sub.remote_id; + None + } else { + Some(key_expr.clone()) + } + } } - } - }) - .flatten(); + }) + .flatten(); + + let sub_state = Arc::new(sub_state); state.subscribers.insert(sub_state.id, sub_state.clone()); for res in state @@ -1064,7 +1079,7 @@ impl Session { ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: id as u32, + id, wire_expr: key_expr.to_wire(self).to_owned(), ext_info: *info, }), @@ -1080,7 +1095,7 @@ impl Session { Ok(sub_state) } - pub(crate) fn unsubscribe(&self, sid: usize) -> ZResult<()> { + pub(crate) fn unsubscribe(&self, sid: Id) -> ZResult<()> { let mut state = zwrite!(self.state); if let Some(sub_state) = state.subscribers.remove(&sid) { trace!("unsubscribe({:?})", sub_state); @@ -1110,65 +1125,28 @@ impl Session { if send_forget { // Note: there might be several Subscribers on the same KeyExpr. // Before calling forget_subscriber(key_expr), check if this was the last one. - let key_expr = &sub_state.key_expr; - match state - .aggregated_subscribers - .iter() - .find(|s| s.includes(key_expr)) - { - Some(join_sub) => { - let joined_sub = state.subscribers.values().any(|s| { - s.origin != Locality::SessionLocal && join_sub.includes(&s.key_expr) - }); - if !joined_sub { - let primitives = state.primitives.as_ref().unwrap().clone(); - let wire_expr = WireExpr::from(join_sub).to_owned(); - drop(state); - primitives.send_declare(Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { wire_expr }, - }), - }); - - #[cfg(feature = "unstable")] - { - let state = zread!(self.state); - self.update_status_down(&state, &sub_state.key_expr) - } - } - } - None => { - let twin_sub = state - .subscribers - .values() - .any(|s| s.origin != Locality::SessionLocal && s.key_expr == *key_expr); - if !twin_sub { - let primitives = state.primitives.as_ref().unwrap().clone(); - drop(state); - primitives.send_declare(Declare { - ext_qos: ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { - wire_expr: key_expr.to_wire(self).to_owned(), - }, - }), - }); - - #[cfg(feature = "unstable")] - { - let state = zread!(self.state); - self.update_status_down(&state, &sub_state.key_expr) - } - } + if !state.subscribers.values().any(|s| { + s.origin != Locality::SessionLocal && s.remote_id == sub_state.remote_id + }) { + let primitives = state.primitives.as_ref().unwrap().clone(); + drop(state); + primitives.send_declare(Declare { + ext_qos: declare::ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: declare::ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { + id: sub_state.remote_id, + ext_wire_expr: WireExprType { + wire_expr: WireExpr::empty(), + }, + }), + }); + #[cfg(feature = "unstable")] + { + let state = zread!(self.state); + self.update_status_down(&state, &sub_state.key_expr) } - }; + } } Ok(()) } else { @@ -1185,7 +1163,7 @@ impl Session { ) -> ZResult> { let mut state = zwrite!(self.state); log::trace!("queryable({:?})", key_expr); - let id = state.decl_id_counter.fetch_add(1, Ordering::SeqCst); + let id = self.runtime.next_id(); let qable_state = Arc::new(QueryableState { id, key_expr: key_expr.to_owned(), @@ -1193,158 +1171,48 @@ impl Session { origin, callback, }); - #[cfg(feature = "complete_n")] - { - state.queryables.insert(id, qable_state.clone()); - if origin != Locality::SessionLocal && complete { - let primitives = state.primitives.as_ref().unwrap().clone(); - let complete = Session::complete_twin_qabls(&state, key_expr); - drop(state); - let qabl_info = QueryableInfo { - complete, - distance: 0, - }; - primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::DEFAULT, - body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: id as u32, - wire_expr: key_expr.to_owned(), - ext_info: qabl_info, - }), - }); - } - } - #[cfg(not(feature = "complete_n"))] - { - let twin_qabl = Session::twin_qabl(&state, key_expr); - let complete_twin_qabl = twin_qabl && Session::complete_twin_qabl(&state, key_expr); - - state.queryables.insert(id, qable_state.clone()); + state.queryables.insert(id, qable_state.clone()); - if origin != Locality::SessionLocal && (!twin_qabl || (!complete_twin_qabl && complete)) - { - let primitives = state.primitives.as_ref().unwrap().clone(); - let complete = u8::from(!complete_twin_qabl && complete); - drop(state); - let qabl_info = QueryableInfo { - complete, - distance: 0, - }; - primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::DEFAULT, - body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: id as u32, - wire_expr: key_expr.to_owned(), - ext_info: qabl_info, - }), - }); - } + if origin != Locality::SessionLocal { + let primitives = state.primitives.as_ref().unwrap().clone(); + drop(state); + let qabl_info = QueryableInfo { + complete: if complete { 1 } else { 0 }, + distance: 0, + }; + primitives.send_declare(Declare { + ext_qos: declare::ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: declare::ext::NodeIdType::DEFAULT, + body: DeclareBody::DeclareQueryable(DeclareQueryable { + id, + wire_expr: key_expr.to_owned(), + ext_info: qabl_info, + }), + }); } Ok(qable_state) } - pub(crate) fn twin_qabl(state: &SessionState, key: &WireExpr) -> bool { - state.queryables.values().any(|q| { - q.origin != Locality::SessionLocal - && state.local_wireexpr_to_expr(&q.key_expr).unwrap() - == state.local_wireexpr_to_expr(key).unwrap() - }) - } - - #[cfg(not(feature = "complete_n"))] - pub(crate) fn complete_twin_qabl(state: &SessionState, key: &WireExpr) -> bool { - state.queryables.values().any(|q| { - q.origin != Locality::SessionLocal - && q.complete - && state.local_wireexpr_to_expr(&q.key_expr).unwrap() - == state.local_wireexpr_to_expr(key).unwrap() - }) - } - - #[cfg(feature = "complete_n")] - pub(crate) fn complete_twin_qabls(state: &SessionState, key: &WireExpr) -> u8 { - state - .queryables - .values() - .filter(|q| { - q.origin != Locality::SessionLocal - && q.complete - && state.local_wireexpr_to_expr(&q.key_expr).unwrap() - == state.local_wireexpr_to_expr(key).unwrap() - }) - .count() as u8 - } - - pub(crate) fn close_queryable(&self, qid: usize) -> ZResult<()> { + pub(crate) fn close_queryable(&self, qid: Id) -> ZResult<()> { let mut state = zwrite!(self.state); if let Some(qable_state) = state.queryables.remove(&qid) { trace!("close_queryable({:?})", qable_state); if qable_state.origin != Locality::SessionLocal { let primitives = state.primitives.as_ref().unwrap().clone(); - if Session::twin_qabl(&state, &qable_state.key_expr) { - // There still exist Queryables on the same KeyExpr. - if qable_state.complete { - #[cfg(feature = "complete_n")] - { - let complete = - Session::complete_twin_qabls(&state, &qable_state.key_expr); - drop(state); - let qabl_info = QueryableInfo { - complete, - distance: 0, - }; - primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::DEFAULT, - body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - wire_expr: qable_state.key_expr.clone(), - ext_info: qabl_info, - }), - }); - } - #[cfg(not(feature = "complete_n"))] - { - if !Session::complete_twin_qabl(&state, &qable_state.key_expr) { - drop(state); - let qabl_info = QueryableInfo { - complete: 0, - distance: 0, - }; - primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::DEFAULT, - body: DeclareBody::DeclareQueryable(DeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - wire_expr: qable_state.key_expr.clone(), - ext_info: qabl_info, - }), - }); - } - } - } - } else { - // There are no more Queryables on the same KeyExpr. - drop(state); - primitives.send_declare(Declare { - ext_qos: declare::ext::QoSType::DECLARE, - ext_tstamp: None, - ext_nodeid: declare::ext::NodeIdType::DEFAULT, - body: DeclareBody::UndeclareQueryable(UndeclareQueryable { - id: 0, // @TODO use proper QueryableId (#703) - ext_wire_expr: WireExprType { - wire_expr: qable_state.key_expr.clone(), - }, - }), - }); - } + drop(state); + primitives.send_declare(Declare { + ext_qos: declare::ext::QoSType::DECLARE, + ext_tstamp: None, + ext_nodeid: declare::ext::NodeIdType::DEFAULT, + body: DeclareBody::UndeclareQueryable(UndeclareQueryable { + id: qable_state.id, + ext_wire_expr: WireExprType { + wire_expr: qable_state.key_expr.clone(), + }, + }), + }); } Ok(()) } else { @@ -1359,7 +1227,7 @@ impl Session { ) -> ZResult> { let mut state = zwrite!(self.state); log::trace!("declare_liveliness({:?})", key_expr); - let id = state.decl_id_counter.fetch_add(1, Ordering::SeqCst); + let id = self.runtime.next_id(); let key_expr = KeyExpr::from(*crate::liveliness::KE_PREFIX_LIVELINESS / key_expr); let tok_state = Arc::new(LivelinessTokenState { id, @@ -1374,7 +1242,7 @@ impl Session { ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, body: DeclareBody::DeclareSubscriber(DeclareSubscriber { - id: id as u32, + id, wire_expr: key_expr.to_wire(self).to_owned(), ext_info: SubscriberInfo::DEFAULT, }), @@ -1383,7 +1251,7 @@ impl Session { } #[zenoh_macros::unstable] - pub(crate) fn undeclare_liveliness(&self, tid: usize) -> ZResult<()> { + pub(crate) fn undeclare_liveliness(&self, tid: Id) -> ZResult<()> { let mut state = zwrite!(self.state); if let Some(tok_state) = state.tokens.remove(&tid) { trace!("undeclare_liveliness({:?})", tok_state); @@ -1398,10 +1266,8 @@ impl Session { ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, body: DeclareBody::UndeclareSubscriber(UndeclareSubscriber { - id: 0, // @TODO use proper SubscriberId (#703) - ext_wire_expr: WireExprType { - wire_expr: key_expr.to_wire(self).to_owned(), - }, + id: tok_state.id, + ext_wire_expr: WireExprType::null(), }), }); } @@ -1418,8 +1284,7 @@ impl Session { callback: Callback<'static, MatchingStatus>, ) -> ZResult> { let mut state = zwrite!(self.state); - - let id = state.decl_id_counter.fetch_add(1, Ordering::SeqCst); + let id = self.runtime.next_id(); log::trace!("matches_listener({:?}) => {id}", publisher.key_expr); let listener_state = Arc::new(MatchingListenerState { id, @@ -1554,7 +1419,7 @@ impl Session { } #[zenoh_macros::unstable] - pub(crate) fn undeclare_matches_listener_inner(&self, sid: usize) -> ZResult<()> { + pub(crate) fn undeclare_matches_listener_inner(&self, sid: Id) -> ZResult<()> { let mut state = zwrite!(self.state); if let Some(state) = state.matching_listeners.remove(&sid) { trace!("undeclare_matches_listener_inner({:?})", state); @@ -1856,15 +1721,15 @@ impl Session { body: Option, #[cfg(feature = "unstable")] attachment: Option, ) { - let (primitives, key_expr, callbacks) = { + let (primitives, key_expr, queryables) = { let state = zread!(self.state); match state.wireexpr_to_keyexpr(key_expr, local) { Ok(key_expr) => { - let callbacks = state + let queryables = state .queryables - .values() + .iter() .filter( - |queryable| + |(_, queryable)| (queryable.origin == Locality::Any || (local == (queryable.origin == Locality::SessionLocal))) && @@ -1881,12 +1746,12 @@ impl Session { } } ) - .map(|qable| qable.callback.clone()) - .collect::>>(); + .map(|(id, qable)| (*id, qable.callback.clone())) + .collect::)>>(); ( state.primitives.as_ref().unwrap().clone(), key_expr.into_owned(), - callbacks, + queryables, ) } Err(err) => { @@ -1898,29 +1763,30 @@ impl Session { let parameters = parameters.to_owned(); - let zid = self.runtime.zid(); // @TODO build/use prebuilt specific zid + let zid = self.runtime.zid(); - let query = Query { - inner: Arc::new(QueryInner { - key_expr, - parameters, - value: body.map(|b| Value { - payload: b.payload.into(), - encoding: b.encoding.into(), - }), - qid, - zid, - primitives: if local { - Arc::new(self.clone()) - } else { - primitives - }, - #[cfg(feature = "unstable")] - attachment, + let query_inner = Arc::new(QueryInner { + key_expr, + parameters, + value: body.map(|b| Value { + payload: b.payload.into(), + encoding: b.encoding.into(), }), - }; - for callback in callbacks.iter() { - callback(query.clone()); + qid, + zid, + primitives: if local { + Arc::new(self.clone()) + } else { + primitives + }, + #[cfg(feature = "unstable")] + attachment, + }); + for (eid, callback) in queryables { + callback(Query { + inner: query_inner.clone(), + eid, + }); } } } @@ -2111,9 +1977,13 @@ impl Primitives for Session { trace!("recv DeclareSubscriber {} {:?}", m.id, m.wire_expr); #[cfg(feature = "unstable")] { - let state = zread!(self.state); - match state.wireexpr_to_keyexpr(&m.wire_expr, false) { + let mut state = zwrite!(self.state); + match state + .wireexpr_to_keyexpr(&m.wire_expr, false) + .map(|e| e.into_owned()) + { Ok(expr) => { + state.remote_subscribers.insert(m.id, expr.clone()); self.update_status_up(&state, &expr); if expr @@ -2141,33 +2011,30 @@ impl Primitives for Session { trace!("recv UndeclareSubscriber {:?}", m.id); #[cfg(feature = "unstable")] { - let state = zread!(self.state); - match state.wireexpr_to_keyexpr(&m.ext_wire_expr.wire_expr, false) { - Ok(expr) => { - self.update_status_down(&state, &expr); + let mut state = zwrite!(self.state); + if let Some(expr) = state.remote_subscribers.remove(&m.id) { + self.update_status_down(&state, &expr); - if expr - .as_str() - .starts_with(crate::liveliness::PREFIX_LIVELINESS) - { - drop(state); - let data_info = DataInfo { - kind: SampleKind::Delete, - ..Default::default() - }; - self.handle_data( - false, - &m.ext_wire_expr.wire_expr, - Some(data_info), - ZBuf::default(), - #[cfg(feature = "unstable")] - None, - ); - } - } - Err(err) => { - log::error!("Received Forget Subscriber for unkown key_expr: {}", err) + if expr + .as_str() + .starts_with(crate::liveliness::PREFIX_LIVELINESS) + { + drop(state); + let data_info = DataInfo { + kind: SampleKind::Delete, + ..Default::default() + }; + self.handle_data( + false, + &m.ext_wire_expr.wire_expr, + Some(data_info), + ZBuf::default(), + #[cfg(feature = "unstable")] + None, + ); } + } else { + log::error!("Received Undeclare Subscriber for unkown id: {}", m.id); } } } @@ -2194,7 +2061,7 @@ impl Primitives for Session { encoding: Some(m.encoding.into()), timestamp: m.timestamp, qos: QoS::from(msg.ext_qos), - source_id: m.ext_sinfo.as_ref().map(|i| i.zid), + source_id: m.ext_sinfo.as_ref().map(|i| i.id.clone()), source_sn: m.ext_sinfo.as_ref().map(|i| i.sn as u64), }; self.handle_data( @@ -2212,7 +2079,7 @@ impl Primitives for Session { encoding: None, timestamp: m.timestamp, qos: QoS::from(msg.ext_qos), - source_id: m.ext_sinfo.as_ref().map(|i| i.zid), + source_id: m.ext_sinfo.as_ref().map(|i| i.id.clone()), source_sn: m.ext_sinfo.as_ref().map(|i| i.sn as u64), }; self.handle_data( @@ -2272,7 +2139,7 @@ impl Primitives for Session { }, }; let replier_id = match e.ext_sinfo { - Some(info) => info.zid, + Some(info) => info.id.zid, None => ZenohId::rand(), }; let new_reply = Reply { @@ -2366,7 +2233,7 @@ impl Primitives for Session { encoding: Some(encoding.into()), timestamp, qos: QoS::from(msg.ext_qos), - source_id: ext_sinfo.as_ref().map(|i| i.zid), + source_id: ext_sinfo.as_ref().map(|i| i.id.clone()), source_sn: ext_sinfo.as_ref().map(|i| i.sn as u64), }, #[cfg(feature = "unstable")] @@ -2384,7 +2251,7 @@ impl Primitives for Session { encoding: None, timestamp, qos: QoS::from(msg.ext_qos), - source_id: ext_sinfo.as_ref().map(|i| i.zid), + source_id: ext_sinfo.as_ref().map(|i| i.id.clone()), source_sn: ext_sinfo.as_ref().map(|i| i.sn as u64), }, #[cfg(feature = "unstable")] diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index c707218017..e276d0c6d0 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -25,6 +25,8 @@ use std::future::Ready; use std::ops::{Deref, DerefMut}; use std::sync::Arc; use zenoh_core::{AsyncResolve, Resolvable, Resolve, SyncResolve}; +#[cfg(feature = "unstable")] +use zenoh_protocol::core::EntityGlobalId; use zenoh_protocol::network::declare::{subscriber::ext::SubscriberInfo, Mode}; /// The kind of reliability. @@ -32,6 +34,7 @@ pub use zenoh_protocol::core::Reliability; pub(crate) struct SubscriberState { pub(crate) id: Id, + pub(crate) remote_id: Id, pub(crate) key_expr: KeyExpr<'static>, pub(crate) scope: Option>, pub(crate) origin: Locality, @@ -741,6 +744,29 @@ impl<'a, Receiver> PullSubscriber<'a, Receiver> { } impl<'a, Receiver> Subscriber<'a, Receiver> { + /// Returns the [`EntityGlobalId`] of this Subscriber. + /// + /// # Examples + /// ``` + /// # async_std::task::block_on(async { + /// use zenoh::prelude::r#async::*; + /// + /// let session = zenoh::open(config::peer()).res().await.unwrap(); + /// let subscriber = session.declare_subscriber("key/expression") + /// .res() + /// .await + /// .unwrap(); + /// let subscriber_id = subscriber.id(); + /// # }) + /// ``` + #[zenoh_macros::unstable] + pub fn id(&self) -> EntityGlobalId { + EntityGlobalId { + zid: self.subscriber.session.zid(), + eid: self.subscriber.state.id, + } + } + /// Returns the [`KeyExpr`] this Subscriber subscribes to. pub fn key_expr(&self) -> &KeyExpr<'static> { &self.subscriber.state.key_expr From f12f3382fa38af82cdd8bf75bcbb2bad4eec7f68 Mon Sep 17 00:00:00 2001 From: DenisBiryukov91 <155981813+DenisBiryukov91@users.noreply.github.com> Date: Tue, 12 Mar 2024 19:00:48 +0100 Subject: [PATCH 010/124] refactor Query.reply() (#796) * refactor Query.reply() into seprate methods:reply, reply_del and reply_err * explain #[allow(unused_mut)]; replace unwrap on KeyxExpr.try_from with ? as it was originally for Sample in zenoh/tests/routing.rs * mark Query.reply_sample as unstable * format fix --- examples/examples/z_queryable.rs | 19 +- examples/examples/z_storage.rs | 2 +- plugins/zenoh-backend-traits/Cargo.toml | 2 +- plugins/zenoh-backend-traits/src/lib.rs | 2 +- plugins/zenoh-plugin-example/src/lib.rs | 2 +- .../zenoh-plugin-rest/examples/z_serve_sse.rs | 6 +- .../src/replica/align_queryable.rs | 53 +-- .../src/replica/storage.rs | 6 +- zenoh-ext/src/group.rs | 6 +- zenoh-ext/src/publication_cache.rs | 6 +- zenoh/src/admin.rs | 6 +- zenoh/src/net/runtime/adminspace.rs | 30 +- zenoh/src/queryable.rs | 355 ++++++++++++------ zenoh/src/sample.rs | 6 + zenoh/src/session.rs | 12 +- zenoh/tests/attachments.rs | 4 +- zenoh/tests/routing.rs | 4 +- zenoh/tests/session.rs | 19 +- zenoh/tests/unicity.rs | 16 +- 19 files changed, 340 insertions(+), 216 deletions(-) diff --git a/examples/examples/z_queryable.rs b/examples/examples/z_queryable.rs index d7376835b7..12c1fc3f20 100644 --- a/examples/examples/z_queryable.rs +++ b/examples/examples/z_queryable.rs @@ -54,25 +54,28 @@ async fn main() { println!(">> [Queryable ] Received Query '{}' with value '{}'", query.selector(), payload); }, } - let reply = if send_errors.swap(false, Relaxed) { + if send_errors.swap(false, Relaxed) { println!( ">> [Queryable ] Replying (ERROR: '{}')", value, ); - Err(value.clone().into()) + query + .reply_err(value.clone()) + .res() + .await + .unwrap_or_else(|e| println!(">> [Queryable ] Error sending reply: {e}")); } else { println!( ">> [Queryable ] Responding ('{}': '{}')", key_expr.as_str(), value, ); - Ok(Sample::new(key_expr.clone(), value.clone())) + query + .reply(key_expr.clone(), value.clone()) + .res() + .await + .unwrap_or_else(|e| println!(">> [Queryable ] Error sending reply: {e}")); }; - query - .reply(reply) - .res() - .await - .unwrap_or_else(|e| println!(">> [Queryable ] Error sending reply: {e}")); }, _ = stdin.read_exact(&mut input).fuse() => { diff --git a/examples/examples/z_storage.rs b/examples/examples/z_storage.rs index 5e0eaabd44..857181751b 100644 --- a/examples/examples/z_storage.rs +++ b/examples/examples/z_storage.rs @@ -67,7 +67,7 @@ async fn main() { println!(">> [Queryable ] Received Query '{}'", query.selector()); for (stored_name, sample) in stored.iter() { if query.selector().key_expr.intersects(unsafe {keyexpr::from_str_unchecked(stored_name)}) { - query.reply(Ok(sample.clone())).res().await.unwrap(); + query.reply(sample.key_expr.clone(), sample.payload.clone()).res().await.unwrap(); } } }, diff --git a/plugins/zenoh-backend-traits/Cargo.toml b/plugins/zenoh-backend-traits/Cargo.toml index f2b8a4a1eb..b3926ab955 100644 --- a/plugins/zenoh-backend-traits/Cargo.toml +++ b/plugins/zenoh-backend-traits/Cargo.toml @@ -31,7 +31,7 @@ async-std = { workspace = true, features = ["default"] } async-trait = { workspace = true } derive_more = { workspace = true } serde_json = { workspace = true } -zenoh = { workspace = true } +zenoh = { workspace = true, features = ["unstable"] } zenoh-result = { workspace = true } zenoh-util = { workspace = true } schemars = { workspace = true } diff --git a/plugins/zenoh-backend-traits/src/lib.rs b/plugins/zenoh-backend-traits/src/lib.rs index 8b9fa359e0..d17e6dfd77 100644 --- a/plugins/zenoh-backend-traits/src/lib.rs +++ b/plugins/zenoh-backend-traits/src/lib.rs @@ -325,6 +325,6 @@ impl Query { sample }; // Send reply - self.q.reply(Ok(sample)) + self.q.reply_sample(sample) } } diff --git a/plugins/zenoh-plugin-example/src/lib.rs b/plugins/zenoh-plugin-example/src/lib.rs index 592a08ca9b..12cc6ffa84 100644 --- a/plugins/zenoh-plugin-example/src/lib.rs +++ b/plugins/zenoh-plugin-example/src/lib.rs @@ -174,7 +174,7 @@ async fn run(runtime: Runtime, selector: KeyExpr<'_>, flag: Arc) { info!("Handling query '{}'", query.selector()); for (key_expr, sample) in stored.iter() { if query.selector().key_expr.intersects(unsafe{keyexpr::from_str_unchecked(key_expr)}) { - query.reply(Ok(sample.clone())).res().await.unwrap(); + query.reply_sample(sample.clone()).res().await.unwrap(); } } } diff --git a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs index c5bdcc4c73..bb76005d6e 100644 --- a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs +++ b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs @@ -49,11 +49,7 @@ async fn main() { let receiver = queryable.receiver.clone(); async move { while let Ok(request) = receiver.recv_async().await { - request - .reply(Ok(Sample::new(key, HTML))) - .res() - .await - .unwrap(); + request.reply(key, HTML).res().await.unwrap(); } } }); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index 359b8dd7e8..5fda8b576d 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -95,34 +95,43 @@ impl AlignQueryable { for value in values { match value { AlignData::Interval(i, c) => { - let sample = Sample::new( - query.key_expr().clone(), - serde_json::to_string(&(i, c)).unwrap(), - ); - query.reply(Ok(sample)).res().await.unwrap(); + query + .reply( + query.key_expr().clone(), + serde_json::to_string(&(i, c)).unwrap(), + ) + .res() + .await + .unwrap(); } AlignData::Subinterval(i, c) => { - let sample = Sample::new( - query.key_expr().clone(), - serde_json::to_string(&(i, c)).unwrap(), - ); - query.reply(Ok(sample)).res().await.unwrap(); + query + .reply( + query.key_expr().clone(), + serde_json::to_string(&(i, c)).unwrap(), + ) + .res() + .await + .unwrap(); } AlignData::Content(i, c) => { - let sample = Sample::new( - query.key_expr().clone(), - serde_json::to_string(&(i, c)).unwrap(), - ); - query.reply(Ok(sample)).res().await.unwrap(); + query + .reply( + query.key_expr().clone(), + serde_json::to_string(&(i, c)).unwrap(), + ) + .res() + .await + .unwrap(); } AlignData::Data(k, (v, ts)) => { - let Value { - payload, encoding, .. - } = v; - let sample = Sample::new(k, payload) - .with_encoding(encoding) - .with_timestamp(ts); - query.reply(Ok(sample)).res().await.unwrap(); + query + .reply(k, v.payload) + .with_encoding(v.encoding) + .with_timestamp(ts) + .res() + .await + .unwrap(); } } } diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 1ef7e65390..6b48895612 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -532,7 +532,7 @@ impl StorageService { } else { sample }; - if let Err(e) = q.reply(Ok(sample)).res().await { + if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", self.name, @@ -570,7 +570,7 @@ impl StorageService { } else { sample }; - if let Err(e) = q.reply(Ok(sample)).res().await { + if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", self.name, @@ -583,7 +583,7 @@ impl StorageService { let err_message = format!("Storage '{}' raised an error on query: {}", self.name, e); log::warn!("{}", err_message); - if let Err(e) = q.reply(Err(err_message.into())).res().await { + if let Err(e) = q.reply_err(err_message).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", self.name, diff --git a/zenoh-ext/src/group.rs b/zenoh-ext/src/group.rs index 9078e61741..75a435e8f4 100644 --- a/zenoh-ext/src/group.rs +++ b/zenoh-ext/src/group.rs @@ -237,11 +237,7 @@ async fn query_handler(z: Arc, state: Arc) { while let Ok(query) = queryable.recv_async().await { log::trace!("Serving query for: {}", &qres); - query - .reply(Ok(Sample::new(qres.clone(), buf.clone()))) - .res() - .await - .unwrap(); + query.reply(qres.clone(), buf.clone()).res().await.unwrap(); } } diff --git a/zenoh-ext/src/publication_cache.rs b/zenoh-ext/src/publication_cache.rs index cd5ed964ad..1c9a286800 100644 --- a/zenoh-ext/src/publication_cache.rs +++ b/zenoh-ext/src/publication_cache.rs @@ -201,7 +201,7 @@ impl<'a> PublicationCache<'a> { } }, - // on query, reply with cach content + // on query, reply with cache content query = quer_recv.recv_async() => { if let Ok(query) = query { if !query.selector().key_expr.as_str().contains('*') { @@ -212,7 +212,7 @@ impl<'a> PublicationCache<'a> { continue; } } - if let Err(e) = query.reply(Ok(sample.clone())).res_async().await { + if let Err(e) = query.reply_sample(sample.clone()).res_async().await { log::warn!("Error replying to query: {}", e); } } @@ -226,7 +226,7 @@ impl<'a> PublicationCache<'a> { continue; } } - if let Err(e) = query.reply(Ok(sample.clone())).res_async().await { + if let Err(e) = query.reply_sample(sample.clone()).res_async().await { log::warn!("Error replying to query: {}", e); } } diff --git a/zenoh/src/admin.rs b/zenoh/src/admin.rs index 5a242d51b7..268997d687 100644 --- a/zenoh/src/admin.rs +++ b/zenoh/src/admin.rs @@ -17,7 +17,7 @@ use crate::{ prelude::sync::{KeyExpr, Locality, SampleKind}, queryable::Query, sample::DataInfo, - Payload, Sample, Session, ZResult, + Payload, Session, ZResult, }; use async_std::task; use std::{ @@ -71,7 +71,7 @@ pub(crate) fn on_admin_query(session: &Session, query: Query) { if let Ok(value) = serde_json::value::to_value(peer.clone()) { match Payload::try_from(value) { Ok(zbuf) => { - let _ = query.reply(Ok(Sample::new(key_expr, zbuf))).res_sync(); + let _ = query.reply(key_expr, zbuf).res_sync(); } Err(e) => log::debug!("Admin query error: {}", e), } @@ -88,7 +88,7 @@ pub(crate) fn on_admin_query(session: &Session, query: Query) { if let Ok(value) = serde_json::value::to_value(link) { match Payload::try_from(value) { Ok(zbuf) => { - let _ = query.reply(Ok(Sample::new(key_expr, zbuf))).res_sync(); + let _ = query.reply(key_expr, zbuf).res_sync(); } Err(e) => log::debug!("Admin query error: {}", e), } diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index e76475f447..b67692e704 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -17,7 +17,7 @@ use crate::key_expr::KeyExpr; use crate::net::primitives::Primitives; use crate::payload::Payload; use crate::plugins::sealed::{self as plugins}; -use crate::prelude::sync::{Sample, SyncResolve}; +use crate::prelude::sync::SyncResolve; use crate::queryable::Query; use crate::queryable::QueryInner; use crate::value::Value; @@ -577,9 +577,8 @@ fn router_data(context: &AdminContext, query: Query) { } }; if let Err(e) = query - .reply(Ok( - Sample::new(reply_key, payload).with_encoding(Encoding::APPLICATION_JSON) - )) + .reply(reply_key, payload) + .with_encoding(Encoding::APPLICATION_JSON) .res_sync() { log::error!("Error sending AdminSpace reply: {:?}", e); @@ -609,7 +608,7 @@ zenoh_build{{version="{}"}} 1 .openmetrics_text(), ); - if let Err(e) = query.reply(Ok(Sample::new(reply_key, metrics))).res() { + if let Err(e) = query.reply(reply_key, metrics).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -622,10 +621,7 @@ fn routers_linkstate_data(context: &AdminContext, query: Query) { let tables = zread!(context.runtime.state.router.tables.tables); if let Err(e) = query - .reply(Ok(Sample::new( - reply_key, - tables.hat_code.info(&tables, WhatAmI::Router), - ))) + .reply(reply_key, tables.hat_code.info(&tables, WhatAmI::Router)) .res() { log::error!("Error sending AdminSpace reply: {:?}", e); @@ -640,10 +636,7 @@ fn peers_linkstate_data(context: &AdminContext, query: Query) { let tables = zread!(context.runtime.state.router.tables.tables); if let Err(e) = query - .reply(Ok(Sample::new( - reply_key, - tables.hat_code.info(&tables, WhatAmI::Peer), - ))) + .reply(reply_key, tables.hat_code.info(&tables, WhatAmI::Peer)) .res() { log::error!("Error sending AdminSpace reply: {:?}", e); @@ -660,7 +653,7 @@ fn subscribers_data(context: &AdminContext, query: Query) { )) .unwrap(); if query.key_expr().intersects(&key) { - if let Err(e) = query.reply(Ok(Sample::new(key, Payload::empty()))).res() { + if let Err(e) = query.reply(key, Payload::empty()).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -677,7 +670,7 @@ fn queryables_data(context: &AdminContext, query: Query) { )) .unwrap(); if query.key_expr().intersects(&key) { - if let Err(e) = query.reply(Ok(Sample::new(key, Payload::empty()))).res() { + if let Err(e) = query.reply(key, Payload::empty()).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -697,7 +690,7 @@ fn plugins_data(context: &AdminContext, query: Query) { let status = serde_json::to_value(status).unwrap(); match Payload::try_from(status) { Ok(zbuf) => { - if let Err(e) = query.reply(Ok(Sample::new(key, zbuf))).res_sync() { + if let Err(e) = query.reply(key, zbuf).res_sync() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -718,8 +711,7 @@ fn plugins_status(context: &AdminContext, query: Query) { with_extended_string(plugin_key, &["/__path__"], |plugin_path_key| { if let Ok(key_expr) = KeyExpr::try_from(plugin_path_key.clone()) { if query.key_expr().intersects(&key_expr) { - if let Err(e) = query.reply(Ok(Sample::new(key_expr, plugin.path()))).res() - { + if let Err(e) = query.reply(key_expr, plugin.path()).res() { log::error!("Error sending AdminSpace reply: {:?}", e); } } @@ -743,7 +735,7 @@ fn plugins_status(context: &AdminContext, query: Query) { if let Ok(key_expr) = KeyExpr::try_from(response.key) { match Payload::try_from(response.value) { Ok(zbuf) => { - if let Err(e) = query.reply(Ok(Sample::new(key_expr, zbuf))).res_sync() { + if let Err(e) = query.reply(key_expr, zbuf).res_sync() { log::error!("Error sending AdminSpace reply: {:?}", e); } }, diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index bd5ec81101..ed3bd63b6a 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -14,9 +14,12 @@ //! Queryable primitives. +use crate::encoding::Encoding; use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; +use crate::sample::QoS; +use crate::sample::SourceInfo; use crate::Id; use crate::SessionRef; use crate::Undeclarable; @@ -26,6 +29,7 @@ use std::fmt; use std::future::Ready; use std::ops::Deref; use std::sync::Arc; +use uhlc::Timestamp; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; use zenoh_protocol::core::{EntityId, WireExpr}; use zenoh_protocol::network::{response, Mapping, RequestId, Response, ResponseFinal}; @@ -96,6 +100,42 @@ impl Query { pub fn attachment(&self) -> Option<&Attachment> { self.inner.attachment.as_ref() } + /// Sends a reply in the form of [`Sample`] to this Query. + /// + /// By default, queries only accept replies whose key expression intersects with the query's. + /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), + /// replying on a disjoint key expression will result in an error when resolving the reply. + /// This api is for internal use only. + #[inline(always)] + #[cfg(feature = "unstable")] + #[doc(hidden)] + pub fn reply_sample(&self, sample: Sample) -> ReplyBuilder<'_> { + let Sample { + key_expr, + payload, + kind, + encoding, + timestamp, + qos, + #[cfg(feature = "unstable")] + source_info, + #[cfg(feature = "unstable")] + attachment, + } = sample; + ReplyBuilder { + query: self, + key_expr, + payload, + kind, + encoding, + timestamp, + qos, + #[cfg(feature = "unstable")] + source_info, + #[cfg(feature = "unstable")] + attachment, + } + } /// Sends a reply to this Query. /// @@ -103,10 +143,64 @@ impl Query { /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), /// replying on a disjoint key expression will result in an error when resolving the reply. #[inline(always)] - pub fn reply(&self, result: Result) -> ReplyBuilder<'_> { + pub fn reply( + &self, + key_expr: IntoKeyExpr, + payload: IntoPayload, + ) -> ReplyBuilder<'_> + where + IntoKeyExpr: Into>, + IntoPayload: Into, + { ReplyBuilder { query: self, - result, + key_expr: key_expr.into(), + payload: payload.into(), + kind: SampleKind::Put, + timestamp: None, + encoding: Encoding::default(), + qos: response::ext::QoSType::RESPONSE.into(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + } + } + /// Sends a error reply to this Query. + /// + #[inline(always)] + pub fn reply_err(&self, value: IntoValue) -> ReplyErrBuilder<'_> + where + IntoValue: Into, + { + ReplyErrBuilder { + query: self, + value: value.into(), + } + } + + /// Sends a delete reply to this Query. + /// + /// By default, queries only accept replies whose key expression intersects with the query's. + /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), + /// replying on a disjoint key expression will result in an error when resolving the reply. + #[inline(always)] + pub fn reply_del(&self, key_expr: IntoKeyExpr) -> ReplyBuilder<'_> + where + IntoKeyExpr: Into>, + { + ReplyBuilder { + query: self, + key_expr: key_expr.into(), + payload: Payload::empty(), + kind: SampleKind::Delete, + timestamp: None, + encoding: Encoding::default(), + qos: response::ext::QoSType::RESPONSE.into(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, } } @@ -149,25 +243,50 @@ impl fmt::Display for Query { } } -/// A builder returned by [`Query::reply()`](Query::reply). +/// A builder returned by [`Query::reply()`](Query::reply) or [`Query::reply()`](Query::reply). #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] pub struct ReplyBuilder<'a> { query: &'a Query, - result: Result, + key_expr: KeyExpr<'static>, + payload: Payload, + kind: SampleKind, + encoding: Encoding, + timestamp: Option, + qos: QoS, + #[cfg(feature = "unstable")] + source_info: SourceInfo, + #[cfg(feature = "unstable")] + attachment: Option, +} + +/// A builder returned by [`Query::reply_err()`](Query::reply_err). +#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] +#[derive(Debug)] +pub struct ReplyErrBuilder<'a> { + query: &'a Query, + value: Value, } impl<'a> ReplyBuilder<'a> { - #[allow(clippy::result_large_err)] #[zenoh_macros::unstable] - pub fn with_attachment(mut self, attachment: Attachment) -> Result { - match &mut self.result { - Ok(sample) => { - sample.attachment = Some(attachment); - Ok(self) - } - Err(_) => Err((self, attachment)), - } + pub fn with_attachment(mut self, attachment: Attachment) -> Self { + self.attachment = Some(attachment); + self + } + #[zenoh_macros::unstable] + pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { + self.source_info = source_info; + self + } + pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { + self.timestamp = Some(timestamp); + self + } + + pub fn with_encoding(mut self, encoding: Encoding) -> Self { + self.encoding = encoding; + self } } @@ -177,119 +296,65 @@ impl<'a> Resolvable for ReplyBuilder<'a> { impl SyncResolve for ReplyBuilder<'_> { fn res_sync(self) -> ::To { - match self.result { - Ok(sample) => { - if !self.query._accepts_any_replies().unwrap_or(false) - && !self.query.key_expr().intersects(&sample.key_expr) - { - bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", sample.key_expr, self.query.key_expr()) - } - let Sample { - key_expr, - payload, - kind, - encoding, - timestamp, - #[cfg(feature = "unstable")] - source_info, - #[cfg(feature = "unstable")] - attachment, - .. - } = sample; - // Use a macro for inferring the proper const extension ID between Put and Del cases - macro_rules! ext_attachment { - () => {{ - #[allow(unused_mut)] - let mut ext_attachment = None; + if !self.query._accepts_any_replies().unwrap_or(false) + && !self.query.key_expr().intersects(&self.key_expr) + { + bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", self.key_expr, self.query.key_expr()) + } + #[allow(unused_mut)] // will be unused if feature = "unstable" is not enabled + let mut ext_sinfo = None; + #[cfg(feature = "unstable")] + { + if self.source_info.source_id.is_some() || self.source_info.source_sn.is_some() { + ext_sinfo = Some(zenoh::put::ext::SourceInfoType { + id: self.source_info.source_id.unwrap_or_default(), + sn: self.source_info.source_sn.unwrap_or_default() as u32, + }) + } + } + self.query.inner.primitives.send_response(Response { + rid: self.query.inner.qid, + wire_expr: WireExpr { + scope: 0, + suffix: std::borrow::Cow::Owned(self.key_expr.into()), + mapping: Mapping::Sender, + }, + payload: ResponseBody::Reply(zenoh::Reply { + consolidation: zenoh::Consolidation::DEFAULT, + ext_unknown: vec![], + payload: match self.kind { + SampleKind::Put => ReplyBody::Put(Put { + timestamp: self.timestamp, + encoding: self.encoding.into(), + ext_sinfo, + #[cfg(feature = "shared-memory")] + ext_shm: None, #[cfg(feature = "unstable")] - { - if let Some(attachment) = attachment { - ext_attachment = Some(attachment.into()); - } - } - ext_attachment - }}; - } - #[allow(unused_mut)] - let mut ext_sinfo = None; - #[cfg(feature = "unstable")] - { - if source_info.source_id.is_some() || source_info.source_sn.is_some() { - ext_sinfo = Some(zenoh::put::ext::SourceInfoType { - id: source_info.source_id.unwrap_or_default(), - sn: source_info.source_sn.unwrap_or_default() as u32, - }) - } - } - self.query.inner.primitives.send_response(Response { - rid: self.query.inner.qid, - wire_expr: WireExpr { - scope: 0, - suffix: std::borrow::Cow::Owned(key_expr.into()), - mapping: Mapping::Sender, - }, - payload: ResponseBody::Reply(zenoh::Reply { - consolidation: zenoh::Consolidation::DEFAULT, + ext_attachment: self.attachment.map(|a| a.into()), + #[cfg(not(feature = "unstable"))] + ext_attachment: None, ext_unknown: vec![], - payload: match kind { - SampleKind::Put => ReplyBody::Put(Put { - timestamp, - encoding: encoding.into(), - ext_sinfo, - #[cfg(feature = "shared-memory")] - ext_shm: None, - ext_attachment: ext_attachment!(), - ext_unknown: vec![], - payload: payload.into(), - }), - SampleKind::Delete => ReplyBody::Del(Del { - timestamp, - ext_sinfo, - ext_attachment: ext_attachment!(), - ext_unknown: vec![], - }), - }, + payload: self.payload.into(), }), - ext_qos: response::ext::QoSType::RESPONSE, - ext_tstamp: None, - ext_respid: Some(response::ext::ResponderIdType { - zid: self.query.inner.zid, - eid: self.query.eid, - }), - }); - Ok(()) - } - Err(payload) => { - self.query.inner.primitives.send_response(Response { - rid: self.query.inner.qid, - wire_expr: WireExpr { - scope: 0, - suffix: std::borrow::Cow::Owned(self.query.key_expr().as_str().to_owned()), - mapping: Mapping::Sender, - }, - payload: ResponseBody::Err(zenoh::Err { - timestamp: None, - is_infrastructure: false, - ext_sinfo: None, + SampleKind::Delete => ReplyBody::Del(Del { + timestamp: self.timestamp, + ext_sinfo, + #[cfg(feature = "unstable")] + ext_attachment: self.attachment.map(|a| a.into()), + #[cfg(not(feature = "unstable"))] + ext_attachment: None, ext_unknown: vec![], - ext_body: Some(ValueType { - #[cfg(feature = "shared-memory")] - ext_shm: None, - payload: payload.payload.into(), - encoding: payload.encoding.into(), - }), - code: 0, // TODO }), - ext_qos: response::ext::QoSType::RESPONSE, - ext_tstamp: None, - ext_respid: Some(response::ext::ResponderIdType { - zid: self.query.inner.zid, - eid: self.query.eid, - }), - }); - Ok(()) - } - } + }, + }), + ext_qos: self.qos.into(), + ext_tstamp: None, + ext_respid: Some(response::ext::ResponderIdType { + zid: self.query.inner.zid, + eid: self.query.eid, + }), + }); + Ok(()) } } @@ -301,6 +366,50 @@ impl<'a> AsyncResolve for ReplyBuilder<'a> { } } +impl<'a> Resolvable for ReplyErrBuilder<'a> { + type To = ZResult<()>; +} + +impl SyncResolve for ReplyErrBuilder<'_> { + fn res_sync(self) -> ::To { + self.query.inner.primitives.send_response(Response { + rid: self.query.inner.qid, + wire_expr: WireExpr { + scope: 0, + suffix: std::borrow::Cow::Owned(self.query.key_expr().as_str().to_owned()), + mapping: Mapping::Sender, + }, + payload: ResponseBody::Err(zenoh::Err { + timestamp: None, + is_infrastructure: false, + ext_sinfo: None, + ext_unknown: vec![], + ext_body: Some(ValueType { + #[cfg(feature = "shared-memory")] + ext_shm: None, + payload: self.value.payload.into(), + encoding: self.value.encoding.into(), + }), + code: 0, // TODO + }), + ext_qos: response::ext::QoSType::RESPONSE, + ext_tstamp: None, + ext_respid: Some(response::ext::ResponderIdType { + zid: self.query.inner.zid, + eid: self.query.eid, + }), + }); + Ok(()) + } +} +impl<'a> AsyncResolve for ReplyErrBuilder<'a> { + type Future = Ready; + + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) + } +} + pub(crate) struct QueryableState { pub(crate) id: Id, pub(crate) key_expr: WireExpr<'static>, @@ -338,7 +447,7 @@ impl fmt::Debug for QueryableState { /// let queryable = session.declare_queryable("key/expression").res().await.unwrap(); /// while let Ok(query) = queryable.recv_async().await { /// println!(">> Handling query '{}'", query.selector()); -/// query.reply(Ok(Sample::try_from("key/expression", "value").unwrap())) +/// query.reply(KeyExpr::try_from("key/expression").unwrap(), "value") /// .res() /// .await /// .unwrap(); @@ -576,7 +685,7 @@ impl<'a, 'b, Handler> QueryableBuilder<'a, 'b, Handler> { /// .unwrap(); /// while let Ok(query) = queryable.recv_async().await { /// println!(">> Handling query '{}'", query.selector()); -/// query.reply(Ok(Sample::try_from("key/expression", "value").unwrap())) +/// query.reply(KeyExpr::try_from("key/expression").unwrap(), "value") /// .res() /// .await /// .unwrap(); diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index af4a58956d..9c68b460d9 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -585,3 +585,9 @@ impl From for QoS { QoS { inner: qos } } } + +impl From for QoSType { + fn from(qos: QoS) -> Self { + qos.inner + } +} diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 861acf71de..ba67e173bd 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -1852,10 +1852,10 @@ impl<'s> SessionDeclarations<'s, 'static> for Arc { /// .unwrap(); /// async_std::task::spawn(async move { /// while let Ok(query) = queryable.recv_async().await { - /// query.reply(Ok(Sample::try_from( - /// "key/expression", + /// query.reply( + /// KeyExpr::try_from("key/expression").unwrap(), /// "value", - /// ).unwrap())).res().await.unwrap(); + /// ).res().await.unwrap(); /// } /// }).await; /// # }) @@ -2481,10 +2481,10 @@ pub trait SessionDeclarations<'s, 'a> { /// .unwrap(); /// async_std::task::spawn(async move { /// while let Ok(query) = queryable.recv_async().await { - /// query.reply(Ok(Sample::try_from( - /// "key/expression", + /// query.reply( + /// KeyExpr::try_from("key/expression").unwrap(), /// "value", - /// ).unwrap())).res().await.unwrap(); + /// ).res().await.unwrap(); /// } /// }).await; /// # }) diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index 89dd3e231f..0e7c1c0de7 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -73,11 +73,11 @@ fn queries() { attachment.insert(&k, &k); } query - .reply(Ok(Sample::new( + .reply( query.key_expr().clone(), query.value().unwrap().payload.clone(), ) - .with_attachment(attachment))) + .with_attachment(attachment) .res() .unwrap(); }) diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index 5c96f080f8..82053b4f1d 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -115,12 +115,12 @@ impl Task { // The Queryable task keeps replying to requested messages until all checkpoints are finished. Self::Queryable(ke, payload_size) => { let queryable = session.declare_queryable(ke).res_async().await?; - let sample = Sample::try_from(ke.clone(), vec![0u8; *payload_size])?; + let payload = vec![0u8; *payload_size]; loop { futures::select! { query = queryable.recv_async() => { - query?.reply(Ok(sample.clone())).res_async().await?; + query?.reply(KeyExpr::try_from(ke.to_owned())?, payload.clone()).res_async().await?; }, _ = async_std::task::sleep(Duration::from_millis(100)).fuse() => { diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index 8a3f4381d2..077c58298d 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -157,23 +157,28 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re c_msgs.fetch_add(1, Ordering::Relaxed); match query.parameters() { "ok_put" => { - let mut rep = Sample::try_from(key_expr, vec![0u8; size]).unwrap(); - rep.kind = SampleKind::Put; task::block_on(async { - ztimeout!(query.reply(Ok(rep)).res_async()).unwrap() + ztimeout!(query + .reply( + KeyExpr::try_from(key_expr).unwrap(), + vec![0u8; size].to_vec() + ) + .res_async()) + .unwrap() }); } "ok_del" => { - let mut rep = Sample::try_from(key_expr, vec![0u8; size]).unwrap(); - rep.kind = SampleKind::Delete; task::block_on(async { - ztimeout!(query.reply(Ok(rep)).res_async()).unwrap() + ztimeout!(query + .reply_del(KeyExpr::try_from(key_expr).unwrap()) + .res_async()) + .unwrap() }); } "err" => { let rep = Value::from(vec![0u8; size]); task::block_on(async { - ztimeout!(query.reply(Err(rep)).res_async()).unwrap() + ztimeout!(query.reply_err(rep).res_async()).unwrap() }); } _ => panic!("Unknown query parameter"), diff --git a/zenoh/tests/unicity.rs b/zenoh/tests/unicity.rs index 76910ee5de..def0dffe33 100644 --- a/zenoh/tests/unicity.rs +++ b/zenoh/tests/unicity.rs @@ -196,8 +196,12 @@ async fn test_unicity_qryrep(s01: &Session, s02: &Session, s03: &Session) { .declare_queryable(key_expr) .callback(move |sample| { c_msgs1.fetch_add(1, Ordering::Relaxed); - let rep = Sample::try_from(key_expr, vec![0u8; size]).unwrap(); - task::block_on(async { ztimeout!(sample.reply(Ok(rep)).res_async()).unwrap() }); + task::block_on(async { + ztimeout!(sample + .reply(KeyExpr::try_from(key_expr).unwrap(), vec![0u8; size]) + .res_async()) + .unwrap() + }); }) .res_async()) .unwrap(); @@ -209,8 +213,12 @@ async fn test_unicity_qryrep(s01: &Session, s02: &Session, s03: &Session) { .declare_queryable(key_expr) .callback(move |sample| { c_msgs2.fetch_add(1, Ordering::Relaxed); - let rep = Sample::try_from(key_expr, vec![0u8; size]).unwrap(); - task::block_on(async { ztimeout!(sample.reply(Ok(rep)).res_async()).unwrap() }); + task::block_on(async { + ztimeout!(sample + .reply(KeyExpr::try_from(key_expr).unwrap(), vec![0u8; size]) + .res_async()) + .unwrap() + }); }) .res_async()) .unwrap(); From e06b46d4e39b723fb17f9cf6015e07c58b2ec710 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 13 Mar 2024 10:03:45 +0100 Subject: [PATCH 011/124] Simplify Error message (#813) --- commons/zenoh-codec/src/zenoh/err.rs | 57 +++++++++------------ commons/zenoh-protocol/src/zenoh/err.rs | 52 +++++++------------ io/zenoh-transport/src/shm.rs | 28 ++-------- zenoh/src/net/routing/dispatcher/queries.rs | 2 +- zenoh/src/queryable.rs | 19 +++---- zenoh/src/session.rs | 12 ++--- 6 files changed, 56 insertions(+), 114 deletions(-) diff --git a/commons/zenoh-codec/src/zenoh/err.rs b/commons/zenoh-codec/src/zenoh/err.rs index 5cef1a6389..b459f67b3f 100644 --- a/commons/zenoh-codec/src/zenoh/err.rs +++ b/commons/zenoh-codec/src/zenoh/err.rs @@ -11,14 +11,16 @@ // Contributors: // ZettaScale Zenoh Team, // -use crate::{common::extension, RCodec, WCodec, Zenoh080, Zenoh080Header}; +use crate::{common::extension, RCodec, WCodec, Zenoh080, Zenoh080Bounded, Zenoh080Header}; use alloc::vec::Vec; use zenoh_buffers::{ reader::{DidntRead, Reader}, writer::{DidntWrite, Writer}, + ZBuf, }; use zenoh_protocol::{ common::{iext, imsg}, + core::Encoding, zenoh::{ err::{ext, flag, Err}, id, @@ -33,33 +35,26 @@ where fn write(self, writer: &mut W, x: &Err) -> Self::Output { let Err { - code, - is_infrastructure, - timestamp, + encoding, ext_sinfo, - ext_body, ext_unknown, + payload, } = x; // Header let mut header = id::ERR; - if timestamp.is_some() { - header |= flag::T; + if encoding != &Encoding::empty() { + header |= flag::E; } - if *is_infrastructure { - header |= flag::I; - } - let mut n_exts = - (ext_sinfo.is_some() as u8) + (ext_body.is_some() as u8) + (ext_unknown.len() as u8); + let mut n_exts = (ext_sinfo.is_some() as u8) + (ext_unknown.len() as u8); if n_exts != 0 { header |= flag::Z; } self.write(&mut *writer, header)?; // Body - self.write(&mut *writer, code)?; - if let Some(ts) = timestamp.as_ref() { - self.write(&mut *writer, ts)?; + if encoding != &Encoding::empty() { + self.write(&mut *writer, encoding)?; } // Extensions @@ -67,15 +62,15 @@ where n_exts -= 1; self.write(&mut *writer, (sinfo, n_exts != 0))?; } - if let Some(body) = ext_body.as_ref() { - n_exts -= 1; - self.write(&mut *writer, (body, n_exts != 0))?; - } for u in ext_unknown.iter() { n_exts -= 1; self.write(&mut *writer, (u, n_exts != 0))?; } + // Payload + let bodec = Zenoh080Bounded::::new(); + bodec.write(&mut *writer, payload)?; + Ok(()) } } @@ -105,16 +100,13 @@ where } // Body - let code: u16 = self.codec.read(&mut *reader)?; - let is_infrastructure = imsg::has_flag(self.header, flag::I); - let mut timestamp: Option = None; - if imsg::has_flag(self.header, flag::T) { - timestamp = Some(self.codec.read(&mut *reader)?); + let mut encoding = Encoding::empty(); + if imsg::has_flag(self.header, flag::E) { + encoding = self.codec.read(&mut *reader)?; } // Extensions let mut ext_sinfo: Option = None; - let mut ext_body: Option = None; let mut ext_unknown = Vec::new(); let mut has_ext = imsg::has_flag(self.header, flag::Z); @@ -127,11 +119,6 @@ where ext_sinfo = Some(s); has_ext = ext; } - ext::ErrBodyType::VID | ext::ErrBodyType::SID => { - let (s, ext): (ext::ErrBodyType, bool) = eodec.read(&mut *reader)?; - ext_body = Some(s); - has_ext = ext; - } _ => { let (u, ext) = extension::read(reader, "Err", ext)?; ext_unknown.push(u); @@ -140,13 +127,15 @@ where } } + // Payload + let bodec = Zenoh080Bounded::::new(); + let payload: ZBuf = bodec.read(&mut *reader)?; + Ok(Err { - code, - is_infrastructure, - timestamp, + encoding, ext_sinfo, - ext_body, ext_unknown, + payload, }) } } diff --git a/commons/zenoh-protocol/src/zenoh/err.rs b/commons/zenoh-protocol/src/zenoh/err.rs index 648efff441..eacbb26596 100644 --- a/commons/zenoh-protocol/src/zenoh/err.rs +++ b/commons/zenoh-protocol/src/zenoh/err.rs @@ -11,43 +11,41 @@ // Contributors: // ZettaScale Zenoh Team, // -use crate::common::ZExtUnknown; +use crate::{common::ZExtUnknown, core::Encoding}; use alloc::vec::Vec; -use uhlc::Timestamp; +use zenoh_buffers::ZBuf; /// # Err message /// /// ```text /// Flags: -/// - T: Timestamp If T==1 then the timestamp if present -/// - I: Infrastructure If I==1 then the error is related to the infrastructure else to the user +/// - X: Reserved +/// - E: Encoding If E==1 then the encoding is present /// - Z: Extension If Z==1 then at least one extension is present /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ -/// |Z|I|T| ERR | +/// |Z|E|X| ERR | /// +-+-+-+---------+ -/// % code:z16 % -/// +---------------+ -/// ~ ts: ~ if T==1 +/// ~ encoding ~ if E==1 /// +---------------+ /// ~ [err_exts] ~ if Z==1 /// +---------------+ +/// ~ pl: ~ -- Payload +/// +---------------+ /// ``` pub mod flag { - pub const T: u8 = 1 << 5; // 0x20 Timestamp if T==0 then the timestamp if present - pub const I: u8 = 1 << 6; // 0x40 Infrastructure if I==1 then the error is related to the infrastructure else to the user + // pub const X: u8 = 1 << 5; // 0x20 Reserved + pub const E: u8 = 1 << 6; // 0x40 Encoding if E==1 then the encoding is present pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Err { - pub code: u16, - pub is_infrastructure: bool, - pub timestamp: Option, + pub encoding: Encoding, pub ext_sinfo: Option, - pub ext_body: Option, pub ext_unknown: Vec, + pub payload: ZBuf, } pub mod ext { @@ -57,45 +55,31 @@ pub mod ext { /// Used to carry additional information about the source of data pub type SourceInfo = zextzbuf!(0x1, false); pub type SourceInfoType = crate::zenoh::ext::SourceInfoType<{ SourceInfo::ID }>; - - /// # ErrBody extension - /// Used to carry a body attached to the query - /// Shared Memory extension is automatically defined by ValueType extension if - /// #[cfg(feature = "shared-memory")] is defined. - pub type ErrBodyType = crate::zenoh::ext::ValueType<{ ZExtZBuf::<0x02>::id(false) }, 0x03>; } impl Err { #[cfg(feature = "test")] pub fn rand() -> Self { - use crate::{common::iext, core::ZenohId}; + use crate::common::iext; use rand::Rng; let mut rng = rand::thread_rng(); - let code: u16 = rng.gen(); - let is_infrastructure = rng.gen_bool(0.5); - let timestamp = rng.gen_bool(0.5).then_some({ - let time = uhlc::NTP64(rng.gen()); - let id = uhlc::ID::try_from(ZenohId::rand().to_le_bytes()).unwrap(); - Timestamp::new(time, id) - }); + let encoding = Encoding::rand(); let ext_sinfo = rng.gen_bool(0.5).then_some(ext::SourceInfoType::rand()); - let ext_body = rng.gen_bool(0.5).then_some(ext::ErrBodyType::rand()); let mut ext_unknown = Vec::new(); for _ in 0..rng.gen_range(0..4) { ext_unknown.push(ZExtUnknown::rand2( - iext::mid(ext::ErrBodyType::SID) + 1, + iext::mid(ext::SourceInfo::ID) + 1, false, )); } + let payload = ZBuf::rand(rng.gen_range(0..=64)); Self { - code, - is_infrastructure, - timestamp, + encoding, ext_sinfo, - ext_body, ext_unknown, + payload, } } } diff --git a/io/zenoh-transport/src/shm.rs b/io/zenoh-transport/src/shm.rs index 6f98cafc14..31910f51ae 100644 --- a/io/zenoh-transport/src/shm.rs +++ b/io/zenoh-transport/src/shm.rs @@ -18,7 +18,7 @@ use zenoh_core::{zasyncread, zasyncwrite, zerror}; use zenoh_protocol::{ network::{NetworkBody, NetworkMessage, Push, Request, Response}, zenoh::{ - err::{ext::ErrBodyType, Err}, + err::Err, ext::ShmType, query::{ext::QueryBodyType, Query}, reply::ReplyBody, @@ -123,31 +123,11 @@ impl MapShm for Reply { // Impl - Err impl MapShm for Err { fn map_to_shminfo(&mut self) -> ZResult { - if let Self { - ext_body: Some(ErrBodyType { - payload, ext_shm, .. - }), - .. - } = self - { - map_to_shminfo!(payload, ext_shm) - } else { - Ok(false) - } + Ok(false) } - fn map_to_shmbuf(&mut self, shmr: &RwLock) -> ZResult { - if let Self { - ext_body: Some(ErrBodyType { - payload, ext_shm, .. - }), - .. - } = self - { - map_to_shmbuf!(payload, ext_shm, shmr) - } else { - Ok(false) - } + fn map_to_shmbuf(&mut self, _shmr: &RwLock) -> ZResult { + Ok(false) } } diff --git a/zenoh/src/net/routing/dispatcher/queries.rs b/zenoh/src/net/routing/dispatcher/queries.rs index 287621151a..721a98b8c2 100644 --- a/zenoh/src/net/routing/dispatcher/queries.rs +++ b/zenoh/src/net/routing/dispatcher/queries.rs @@ -521,7 +521,7 @@ macro_rules! inc_res_stats { ResponseBody::Err(e) => { stats.[<$txrx _z_reply_msgs>].[](1); stats.[<$txrx _z_reply_pl_bytes>].[]( - e.ext_body.as_ref().map(|b| b.payload.len()).unwrap_or(0), + e.payload.len() ); } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index ed3bd63b6a..d98df046b7 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -31,9 +31,11 @@ use std::ops::Deref; use std::sync::Arc; use uhlc::Timestamp; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; -use zenoh_protocol::core::{EntityId, WireExpr}; -use zenoh_protocol::network::{response, Mapping, RequestId, Response, ResponseFinal}; -use zenoh_protocol::zenoh::{self, ext::ValueType, reply::ReplyBody, Del, Put, ResponseBody}; +use zenoh_protocol::{ + core::{EntityId, WireExpr}, + network::{response, Mapping, RequestId, Response, ResponseFinal}, + zenoh::{self, reply::ReplyBody, Del, Put, ResponseBody}, +}; use zenoh_result::ZResult; pub(crate) struct QueryInner { @@ -380,17 +382,10 @@ impl SyncResolve for ReplyErrBuilder<'_> { mapping: Mapping::Sender, }, payload: ResponseBody::Err(zenoh::Err { - timestamp: None, - is_infrastructure: false, + encoding: self.value.encoding.into(), ext_sinfo: None, ext_unknown: vec![], - ext_body: Some(ValueType { - #[cfg(feature = "shared-memory")] - ext_shm: None, - payload: self.value.payload.into(), - encoding: self.value.encoding.into(), - }), - code: 0, // TODO + payload: self.value.payload.into(), }), ext_qos: response::ext::QoSType::RESPONSE, ext_tstamp: None, diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index ba67e173bd..4c303ae974 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -2128,15 +2128,9 @@ impl Primitives for Session { Some(query) => { let callback = query.callback.clone(); std::mem::drop(state); - let value = match e.ext_body { - Some(body) => Value { - payload: body.payload.into(), - encoding: body.encoding.into(), - }, - None => Value { - payload: Payload::empty(), - encoding: Encoding::default(), - }, + let value = Value { + payload: e.payload.into(), + encoding: e.encoding.into(), }; let replier_id = match e.ext_sinfo { Some(info) => info.id.zid, From 55119a5001f0080bfd78a91733760a45a959674c Mon Sep 17 00:00:00 2001 From: Denis Biryukov Date: Wed, 13 Mar 2024 15:07:59 +0100 Subject: [PATCH 012/124] make Sample ields pub(crate) provide accessors for external users --- examples/examples/z_get.rs | 4 +- examples/examples/z_pong.rs | 2 +- examples/examples/z_pull.rs | 6 +- examples/examples/z_storage.rs | 15 ++- examples/examples/z_sub.rs | 4 +- plugins/zenoh-plugin-example/src/lib.rs | 6 +- plugins/zenoh-plugin-rest/src/lib.rs | 22 ++--- .../src/replica/align_queryable.rs | 10 +- .../src/replica/aligner.rs | 16 ++-- .../src/replica/mod.rs | 11 ++- .../src/replica/storage.rs | 84 +++++++++-------- .../tests/operations.rs | 8 +- .../tests/wildcard.rs | 20 ++-- zenoh-ext/examples/z_query_sub.rs | 4 +- zenoh-ext/src/group.rs | 4 +- zenoh-ext/src/publication_cache.rs | 8 +- zenoh-ext/src/querying_subscriber.rs | 4 +- zenoh/src/liveliness.rs | 14 +-- zenoh/src/payload.rs | 4 +- zenoh/src/sample.rs | 93 +++++++++++++------ zenoh/src/subscriber.rs | 12 +-- zenoh/tests/attachments.rs | 4 +- zenoh/tests/events.rs | 20 ++-- zenoh/tests/interceptors.rs | 6 +- zenoh/tests/liveliness.rs | 8 +- zenoh/tests/qos.rs | 4 +- zenoh/tests/routing.rs | 4 +- zenoh/tests/session.rs | 10 +- zenoh/tests/unicity.rs | 6 +- 29 files changed, 224 insertions(+), 189 deletions(-) diff --git a/examples/examples/z_get.rs b/examples/examples/z_get.rs index 0fff95c250..dce74d367b 100644 --- a/examples/examples/z_get.rs +++ b/examples/examples/z_get.rs @@ -41,12 +41,12 @@ async fn main() { match reply.sample { Ok(sample) => { let payload = sample - .payload + .payload() .deserialize::() .unwrap_or_else(|e| format!("{}", e)); println!( ">> Received ('{}': '{}')", - sample.key_expr.as_str(), + sample.key_expr().as_str(), payload, ); } diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index 1f06c7abb9..6c333cbbeb 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -41,7 +41,7 @@ fn main() { let _sub = session .declare_subscriber(key_expr_ping) - .callback(move |sample| publisher.put(sample.payload).res().unwrap()) + .callback(move |sample| publisher.put(sample.payload().clone()).res().unwrap()) .res() .unwrap(); for _ in stdin().bytes().take_while(|b| !matches!(b, Ok(b'q'))) {} diff --git a/examples/examples/z_pull.rs b/examples/examples/z_pull.rs index ed2a90f1a6..5ba4f413bd 100644 --- a/examples/examples/z_pull.rs +++ b/examples/examples/z_pull.rs @@ -45,13 +45,13 @@ async fn main() { let subs = async { while let Ok(sample) = subscriber.recv_async().await { let payload = sample - .payload + .payload() .deserialize::() .unwrap_or_else(|e| format!("{}", e)); println!( ">> [Subscriber] Received {} ('{}': '{}')", - sample.kind, - sample.key_expr.as_str(), + sample.kind(), + sample.key_expr().as_str(), payload, ); } diff --git a/examples/examples/z_storage.rs b/examples/examples/z_storage.rs index 857181751b..ab62785f18 100644 --- a/examples/examples/z_storage.rs +++ b/examples/examples/z_storage.rs @@ -53,13 +53,12 @@ async fn main() { select!( sample = subscriber.recv_async() => { let sample = sample.unwrap(); - let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); - println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind, sample.key_expr.as_str(),payload); - if sample.kind == SampleKind::Delete { - stored.remove(&sample.key_expr.to_string()); - } else { - stored.insert(sample.key_expr.to_string(), sample); - } + let payload = sample.payload().deserialize::().unwrap_or_else(|e| format!("{}", e)); + println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind(), sample.key_expr().as_str(),payload); + match sample.kind() { + SampleKind::Delete => stored.remove(&sample.key_expr().to_string()), + SampleKind::Put => stored.insert(sample.key_expr().to_string(), sample), + }; }, query = queryable.recv_async() => { @@ -67,7 +66,7 @@ async fn main() { println!(">> [Queryable ] Received Query '{}'", query.selector()); for (stored_name, sample) in stored.iter() { if query.selector().key_expr.intersects(unsafe {keyexpr::from_str_unchecked(stored_name)}) { - query.reply(sample.key_expr.clone(), sample.payload.clone()).res().await.unwrap(); + query.reply(sample.key_expr().clone(), sample.payload().clone()).res().await.unwrap(); } } }, diff --git a/examples/examples/z_sub.rs b/examples/examples/z_sub.rs index 195e2f7640..f2d337a7cf 100644 --- a/examples/examples/z_sub.rs +++ b/examples/examples/z_sub.rs @@ -46,8 +46,8 @@ async fn main() { select!( sample = subscriber.recv_async() => { let sample = sample.unwrap(); - let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); - println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind, sample.key_expr.as_str(), payload); + let payload = sample.payload().deserialize::().unwrap_or_else(|e| format!("{}", e)); + println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind(), sample.key_expr().as_str(), payload); }, _ = stdin.read_exact(&mut input).fuse() => { match input[0] { diff --git a/plugins/zenoh-plugin-example/src/lib.rs b/plugins/zenoh-plugin-example/src/lib.rs index 12cc6ffa84..04f49b4739 100644 --- a/plugins/zenoh-plugin-example/src/lib.rs +++ b/plugins/zenoh-plugin-example/src/lib.rs @@ -164,9 +164,9 @@ async fn run(runtime: Runtime, selector: KeyExpr<'_>, flag: Arc) { // on sample received by the Subscriber sample = sub.recv_async() => { let sample = sample.unwrap(); - let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); - info!("Received data ('{}': '{}')", sample.key_expr, payload); - stored.insert(sample.key_expr.to_string(), sample); + let payload = sample.payload().deserialize::().unwrap_or_else(|e| format!("{}", e)); + info!("Received data ('{}': '{}')", sample.key_expr(), payload); + stored.insert(sample.key_expr().to_string(), sample); }, // on query received by the Queryable query = queryable.recv_async() => { diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index 1a99d7b5a4..c689bc7d7d 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -46,7 +46,7 @@ lazy_static::lazy_static! { } const RAW_KEY: &str = "_raw"; -fn payload_to_json(payload: Payload) -> String { +fn payload_to_json(payload: &Payload) -> String { payload .deserialize::() .unwrap_or_else(|_| format!(r#""{}""#, b64_std_engine.encode(payload.contiguous()))) @@ -55,10 +55,10 @@ fn payload_to_json(payload: Payload) -> String { fn sample_to_json(sample: Sample) -> String { format!( r#"{{ "key": "{}", "value": {}, "encoding": "{}", "time": "{}" }}"#, - sample.key_expr.as_str(), - payload_to_json(sample.payload), - sample.encoding, - if let Some(ts) = sample.timestamp { + sample.key_expr().as_str(), + payload_to_json(sample.payload()), + sample.encoding(), + if let Some(ts) = sample.timestamp() { ts.to_string() } else { "None".to_string() @@ -72,7 +72,7 @@ fn result_to_json(sample: Result) -> String { Err(err) => { format!( r#"{{ "key": "ERROR", "value": {}, "encoding": "{}"}}"#, - payload_to_json(err.payload), + payload_to_json(&err.payload), err.encoding, ) } @@ -100,8 +100,8 @@ async fn to_json_response(results: flume::Receiver) -> Response { fn sample_to_html(sample: Sample) -> String { format!( "
{}
\n
{}
\n", - sample.key_expr.as_str(), - String::from_utf8_lossy(&sample.payload.contiguous()) + sample.key_expr().as_str(), + String::from_utf8_lossy(&sample.payload().contiguous()) ) } @@ -136,8 +136,8 @@ async fn to_raw_response(results: flume::Receiver) -> Response { Ok(reply) => match reply.sample { Ok(sample) => response( StatusCode::Ok, - Cow::from(&sample.encoding).as_ref(), - String::from_utf8_lossy(&sample.payload.contiguous()).as_ref(), + Cow::from(sample.encoding()).as_ref(), + String::from_utf8_lossy(&sample.payload().contiguous()).as_ref(), ), Err(value) => response( StatusCode::Ok, @@ -322,7 +322,7 @@ async fn query(mut req: Request<(Arc, String)>) -> tide::Result { log::trace!( "[ALIGN QUERYABLE] Received ('{}': '{}')", - sample.key_expr.as_str(), - StringOrBase64::from(sample.payload.clone()) + sample.key_expr().as_str(), + StringOrBase64::from(sample.payload()) ); - if let Some(timestamp) = sample.timestamp { + if let Some(timestamp) = sample.timestamp() { match timestamp.cmp(&logentry.timestamp) { Ordering::Greater => return None, Ordering::Less => { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 03c6fa949a..b11a94e4f2 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -141,10 +141,10 @@ impl Aligner { for sample in replies { result.insert( - sample.key_expr.into(), + sample.key_expr().clone().into(), ( - sample.timestamp.unwrap(), - Value::new(sample.payload).with_encoding(sample.encoding), + sample.timestamp().unwrap().clone(), + Value::from(sample), ), ); } @@ -213,7 +213,7 @@ impl Aligner { let mut other_intervals: HashMap = HashMap::new(); // expecting sample.payload to be a vec of intervals with their checksum for each in reply_content { - match serde_json::from_str(&StringOrBase64::from(each.payload)) { + match serde_json::from_str(&StringOrBase64::from(each.payload())) { Ok((i, c)) => { other_intervals.insert(i, c); } @@ -259,7 +259,7 @@ impl Aligner { let (reply_content, mut no_err) = self.perform_query(other_rep, properties).await; let mut other_subintervals: HashMap = HashMap::new(); for each in reply_content { - match serde_json::from_str(&StringOrBase64::from(each.payload)) { + match serde_json::from_str(&StringOrBase64::from(each.payload())) { Ok((i, c)) => { other_subintervals.insert(i, c); } @@ -300,7 +300,7 @@ impl Aligner { let (reply_content, mut no_err) = self.perform_query(other_rep, properties).await; let mut other_content: HashMap> = HashMap::new(); for each in reply_content { - match serde_json::from_str(&StringOrBase64::from(each.payload)) { + match serde_json::from_str(&StringOrBase64::from(each.payload())) { Ok((i, c)) => { other_content.insert(i, c); } @@ -340,8 +340,8 @@ impl Aligner { Ok(sample) => { log::trace!( "[ALIGNER] Received ('{}': '{}')", - sample.key_expr.as_str(), - StringOrBase64::from(sample.payload.clone()) + sample.key_expr().as_str(), + StringOrBase64::from(sample.payload()) ); return_val.push(sample); } diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs b/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs index 78254213f7..5dda032029 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/mod.rs @@ -220,16 +220,17 @@ impl Replica { continue; } }; - let from = &sample.key_expr.as_str() + let from = &sample.key_expr().as_str() [Replica::get_digest_key(&self.key_expr, ALIGN_PREFIX).len() + 1..]; log::trace!( "[DIGEST_SUB] From {} Received {} ('{}': '{}')", from, - sample.kind, - sample.key_expr.as_str(), - StringOrBase64::from(sample.payload.clone()) + sample.kind(), + sample.key_expr().as_str(), + StringOrBase64::from(sample.payload()) ); - let digest: Digest = match serde_json::from_str(&StringOrBase64::from(sample.payload)) { + let digest: Digest = match serde_json::from_str(&StringOrBase64::from(sample.payload())) + { Ok(digest) => digest, Err(e) => { log::error!("[DIGEST_SUB] Error in decoding the digest: {}", e); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 6b48895612..895f2e1914 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -179,7 +179,7 @@ impl StorageService { }; // log error if the sample is not timestamped // This is to reduce down the line inconsistencies of having duplicate samples stored - if sample.get_timestamp().is_none() { + if sample.timestamp().is_none() { log::error!("Sample {:?} is not timestamped. Please timestamp samples meant for replicated storage.", sample); } else { @@ -271,28 +271,28 @@ impl StorageService { }; // if wildcard, update wildcard_updates - if sample.key_expr.is_wild() { + if sample.key_expr().is_wild() { self.register_wildcard_update(sample.clone()).await; } - let matching_keys = if sample.key_expr.is_wild() { - self.get_matching_keys(&sample.key_expr).await + let matching_keys = if sample.key_expr().is_wild() { + self.get_matching_keys(&sample.key_expr()).await } else { - vec![sample.key_expr.clone().into()] + vec![sample.key_expr().clone().into()] }; log::trace!( "The list of keys matching `{}` is : {:?}", - sample.key_expr, + sample.key_expr(), matching_keys ); for k in matching_keys { if !self - .is_deleted(&k.clone(), sample.get_timestamp().unwrap()) + .is_deleted(&k.clone(), sample.timestamp().unwrap()) .await && (self.capability.history.eq(&History::All) || (self.capability.history.eq(&History::Latest) - && self.is_latest(&k, sample.get_timestamp().unwrap()).await)) + && self.is_latest(&k, sample.timestamp().unwrap()).await)) { log::trace!( "Sample `{:?}` identified as neded processing for key {}", @@ -302,30 +302,30 @@ impl StorageService { // there might be the case that the actual update was outdated due to a wild card update, but not stored yet in the storage. // get the relevant wild card entry and use that value and timestamp to update the storage let sample_to_store = match self - .ovderriding_wild_update(&k, sample.get_timestamp().unwrap()) + .ovderriding_wild_update(&k, sample.timestamp().unwrap()) .await { Some(overriding_update) => { let Value { payload, encoding, .. } = overriding_update.data.value; - let mut sample_to_store = Sample::new(KeyExpr::from(k.clone()), payload) + let sample_to_store = Sample::new(KeyExpr::from(k.clone()), payload) .with_encoding(encoding) - .with_timestamp(overriding_update.data.timestamp); - sample_to_store.kind = overriding_update.kind; + .with_timestamp(overriding_update.data.timestamp) + .with_kind(overriding_update.kind); sample_to_store } None => { - let mut sample_to_store = - Sample::new(KeyExpr::from(k.clone()), sample.payload.clone()) - .with_encoding(sample.encoding.clone()) - .with_timestamp(sample.timestamp.unwrap()); - sample_to_store.kind = sample.kind; + let sample_to_store = + Sample::new(KeyExpr::from(k.clone()), sample.payload().clone()) + .with_encoding(sample.encoding().clone()) + .with_timestamp(sample.timestamp().unwrap().clone()) + .with_kind(sample.kind()); sample_to_store } }; - let stripped_key = match self.strip_prefix(&sample_to_store.key_expr) { + let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { Ok(stripped) => stripped, Err(e) => { log::error!("{}", e); @@ -333,24 +333,25 @@ impl StorageService { } }; let mut storage = self.storage.lock().await; - let result = if sample.kind == SampleKind::Put { - storage - .put( - stripped_key, - Value::new(sample_to_store.payload.clone()) - .with_encoding(sample_to_store.encoding.clone()), - sample_to_store.timestamp.unwrap(), - ) - .await - } else if sample.kind == SampleKind::Delete { - // register a tombstone - self.mark_tombstone(&k, sample_to_store.timestamp.unwrap()) - .await; - storage - .delete(stripped_key, sample_to_store.timestamp.unwrap()) - .await - } else { - Err("sample kind not implemented".into()) + let result = match sample.kind() { + SampleKind::Put => { + storage + .put( + stripped_key, + Value::new(sample_to_store.payload().clone()) + .with_encoding(sample_to_store.encoding().clone()), + sample_to_store.timestamp().unwrap().clone(), + ) + .await + } + SampleKind::Delete => { + // register a tombstone + self.mark_tombstone(&k, sample_to_store.timestamp().unwrap().clone()) + .await; + storage + .delete(stripped_key, sample_to_store.timestamp().unwrap().clone()) + .await + } }; drop(storage); if self.replication.is_some() @@ -362,7 +363,7 @@ impl StorageService { .as_ref() .unwrap() .log_propagation - .send((k.clone(), *sample_to_store.get_timestamp().unwrap())); + .send((k.clone(), sample_to_store.timestamp().unwrap().clone())); match sending { Ok(_) => (), Err(e) => { @@ -395,15 +396,16 @@ impl StorageService { async fn register_wildcard_update(&self, sample: Sample) { // @TODO: change into a better store that does incremental writes - let key = sample.clone().key_expr; + let key = sample.key_expr().clone(); let mut wildcards = self.wildcard_updates.write().await; + let timestamp = sample.timestamp().unwrap().clone(); wildcards.insert( &key, Update { - kind: sample.kind, + kind: sample.kind(), data: StoredData { - value: Value::new(sample.payload).with_encoding(sample.encoding), - timestamp: sample.timestamp.unwrap(), + value: Value::from(sample), + timestamp, }, }, ); diff --git a/plugins/zenoh-plugin-storage-manager/tests/operations.rs b/plugins/zenoh-plugin-storage-manager/tests/operations.rs index 81029e2fa7..36162f01c2 100644 --- a/plugins/zenoh-plugin-storage-manager/tests/operations.rs +++ b/plugins/zenoh-plugin-storage-manager/tests/operations.rs @@ -101,7 +101,7 @@ async fn test_updates_in_order() { // expects exactly one sample let data = get_data(&session, "operation/test/a").await; assert_eq!(data.len(), 1); - assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "1"); + assert_eq!(StringOrBase64::from(data[0].payload()).as_str(), "1"); put_data( &session, @@ -117,7 +117,7 @@ async fn test_updates_in_order() { // expects exactly one sample let data = get_data(&session, "operation/test/b").await; assert_eq!(data.len(), 1); - assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "2"); + assert_eq!(StringOrBase64::from(data[0].payload()).as_str(), "2"); delete_data( &session, @@ -136,8 +136,8 @@ async fn test_updates_in_order() { // expects exactly one sample let data = get_data(&session, "operation/test/b").await; assert_eq!(data.len(), 1); - assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "2"); - assert_eq!(data[0].key_expr.as_str(), "operation/test/b"); + assert_eq!(StringOrBase64::from(data[0].payload()).as_str(), "2"); + assert_eq!(data[0].key_expr().as_str(), "operation/test/b"); drop(storage); } diff --git a/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs b/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs index 4808ec246f..5a71dc23f0 100644 --- a/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs +++ b/plugins/zenoh-plugin-storage-manager/tests/wildcard.rs @@ -117,8 +117,8 @@ async fn test_wild_card_in_order() { // expected single entry let data = get_data(&session, "wild/test/*").await; assert_eq!(data.len(), 1); - assert_eq!(data[0].key_expr.as_str(), "wild/test/a"); - assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "2"); + assert_eq!(data[0].key_expr().as_str(), "wild/test/a"); + assert_eq!(StringOrBase64::from(data[0].payload()).as_str(), "2"); put_data( &session, @@ -134,10 +134,10 @@ async fn test_wild_card_in_order() { // expected two entries let data = get_data(&session, "wild/test/*").await; assert_eq!(data.len(), 2); - assert!(["wild/test/a", "wild/test/b"].contains(&data[0].key_expr.as_str())); - assert!(["wild/test/a", "wild/test/b"].contains(&data[1].key_expr.as_str())); - assert!(["2", "3"].contains(&StringOrBase64::from(data[0].payload.clone()).as_str())); - assert!(["2", "3"].contains(&StringOrBase64::from(data[1].payload.clone()).as_str())); + assert!(["wild/test/a", "wild/test/b"].contains(&data[0].key_expr().as_str())); + assert!(["wild/test/a", "wild/test/b"].contains(&data[1].key_expr().as_str())); + assert!(["2", "3"].contains(&StringOrBase64::from(data[0].payload()).as_str())); + assert!(["2", "3"].contains(&StringOrBase64::from(data[1].payload()).as_str())); put_data( &session, @@ -153,10 +153,10 @@ async fn test_wild_card_in_order() { // expected two entries let data = get_data(&session, "wild/test/*").await; assert_eq!(data.len(), 2); - assert!(["wild/test/a", "wild/test/b"].contains(&data[0].key_expr.as_str())); - assert!(["wild/test/a", "wild/test/b"].contains(&data[1].key_expr.as_str())); - assert_eq!(StringOrBase64::from(data[0].payload.clone()).as_str(), "4"); - assert_eq!(StringOrBase64::from(data[1].payload.clone()).as_str(), "4"); + assert!(["wild/test/a", "wild/test/b"].contains(&data[0].key_expr().as_str())); + assert!(["wild/test/a", "wild/test/b"].contains(&data[1].key_expr().as_str())); + assert_eq!(StringOrBase64::from(data[0].payload()).as_str(), "4"); + assert_eq!(StringOrBase64::from(data[1].payload()).as_str(), "4"); delete_data( &session, diff --git a/zenoh-ext/examples/z_query_sub.rs b/zenoh-ext/examples/z_query_sub.rs index 80efc0854f..8c1307d712 100644 --- a/zenoh-ext/examples/z_query_sub.rs +++ b/zenoh-ext/examples/z_query_sub.rs @@ -60,8 +60,8 @@ async fn main() { select!( sample = subscriber.recv_async() => { let sample = sample.unwrap(); - let payload = sample.payload.deserialize::().unwrap_or_else(|e| format!("{}", e)); - println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind, sample.key_expr.as_str(), payload); + let payload = sample.payload().deserialize::().unwrap_or_else(|e| format!("{}", e)); + println!(">> [Subscriber] Received {} ('{}': '{}')", sample.kind(), sample.key_expr().as_str(), payload); }, _ = stdin.read_exact(&mut input).fuse() => { diff --git a/zenoh-ext/src/group.rs b/zenoh-ext/src/group.rs index 75a435e8f4..41007d8b87 100644 --- a/zenoh-ext/src/group.rs +++ b/zenoh-ext/src/group.rs @@ -248,7 +248,7 @@ async fn net_event_handler(z: Arc, state: Arc) { .await .unwrap(); while let Ok(s) = sub.recv_async().await { - match bincode::deserialize::(&(s.payload.contiguous())) { + match bincode::deserialize::(&(s.payload().contiguous())) { Ok(evt) => match evt { GroupNetEvent::Join(je) => { log::debug!("Member join: {:?}", &je.member); @@ -308,7 +308,7 @@ async fn net_event_handler(z: Arc, state: Arc) { match reply.sample { Ok(sample) => { match bincode::deserialize::( - &sample.payload.contiguous(), + &sample.payload().contiguous(), ) { Ok(m) => { let mut expiry = Instant::now(); diff --git a/zenoh-ext/src/publication_cache.rs b/zenoh-ext/src/publication_cache.rs index 1c9a286800..85cb96cce2 100644 --- a/zenoh-ext/src/publication_cache.rs +++ b/zenoh-ext/src/publication_cache.rs @@ -180,9 +180,9 @@ impl<'a> PublicationCache<'a> { sample = sub_recv.recv_async() => { if let Ok(sample) = sample { let queryable_key_expr: KeyExpr<'_> = if let Some(prefix) = &queryable_prefix { - prefix.join(&sample.key_expr).unwrap().into() + prefix.join(sample.key_expr()).unwrap().into() } else { - sample.key_expr.clone() + sample.key_expr().clone() }; if let Some(queue) = cache.get_mut(queryable_key_expr.as_keyexpr()) { @@ -207,7 +207,7 @@ impl<'a> PublicationCache<'a> { if !query.selector().key_expr.as_str().contains('*') { if let Some(queue) = cache.get(query.selector().key_expr.as_keyexpr()) { for sample in queue { - if let (Ok(Some(time_range)), Some(timestamp)) = (query.selector().time_range(), sample.timestamp) { + if let (Ok(Some(time_range)), Some(timestamp)) = (query.selector().time_range(), sample.timestamp()) { if !time_range.contains(timestamp.get_time().to_system_time()){ continue; } @@ -221,7 +221,7 @@ impl<'a> PublicationCache<'a> { for (key_expr, queue) in cache.iter() { if query.selector().key_expr.intersects(unsafe{ keyexpr::from_str_unchecked(key_expr) }) { for sample in queue { - if let (Ok(Some(time_range)), Some(timestamp)) = (query.selector().time_range(), sample.timestamp) { + if let (Ok(Some(time_range)), Some(timestamp)) = (query.selector().time_range(), sample.timestamp()) { if !time_range.contains(timestamp.get_time().to_system_time()){ continue; } diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 2c89ec82ae..470f795f2b 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -304,8 +304,8 @@ impl MergeQueue { } fn push(&mut self, sample: Sample) { - if let Some(ts) = sample.timestamp { - self.timstamped.entry(ts).or_insert(sample); + if let Some(ts) = sample.timestamp() { + self.timstamped.entry(ts.clone()).or_insert(sample); } else { self.untimestamped.push_back(sample); } diff --git a/zenoh/src/liveliness.rs b/zenoh/src/liveliness.rs index 9f14866363..d4229db4cc 100644 --- a/zenoh/src/liveliness.rs +++ b/zenoh/src/liveliness.rs @@ -131,9 +131,9 @@ impl<'a> Liveliness<'a> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session.liveliness().declare_subscriber("key/expression").res().await.unwrap(); /// while let Ok(sample) = subscriber.recv_async().await { - /// match sample.kind { - /// SampleKind::Put => println!("New liveliness: {}", sample.key_expr), - /// SampleKind::Delete => println!("Lost liveliness: {}", sample.key_expr), + /// match sample.kind() { + /// SampleKind::Put => println!("New liveliness: {}", sample.key_expr()), + /// SampleKind::Delete => println!("Lost liveliness: {}", sample.key_expr()), /// } /// } /// # }) @@ -169,7 +169,7 @@ impl<'a> Liveliness<'a> { /// let replies = session.liveliness().get("key/expression").res().await.unwrap(); /// while let Ok(reply) = replies.recv_async().await { /// if let Ok(sample) = reply.sample { - /// println!(">> Liveliness token {}", sample.key_expr); + /// println!(">> Liveliness token {}", sample.key_expr()); /// } /// } /// # }) @@ -425,7 +425,7 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") - /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) + /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr(), sample.payload()); }) /// .res() /// .await /// .unwrap(); @@ -499,7 +499,7 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { /// .await /// .unwrap(); /// while let Ok(sample) = subscriber.recv_async().await { - /// println!("Received: {} {:?}", sample.key_expr, sample.payload); + /// println!("Received: {} {:?}", sample.key_expr(), sample.payload()); /// } /// # }) /// ``` @@ -593,7 +593,7 @@ where /// .unwrap(); /// while let Ok(token) = tokens.recv_async().await { /// match token.sample { -/// Ok(sample) => println!("Alive token ('{}')", sample.key_expr.as_str()), +/// Ok(sample) => println!("Alive token ('{}')", sample.key_expr().as_str()), /// Err(err) => println!("Received (ERROR: '{:?}')", err.payload), /// } /// } diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index f499db50da..62f40f9294 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -579,8 +579,8 @@ impl std::fmt::Display for StringOrBase64 { } } -impl From for StringOrBase64 { - fn from(v: Payload) -> Self { +impl From<&Payload> for StringOrBase64 { + fn from(v: &Payload) -> Self { use base64::{engine::general_purpose::STANDARD as b64_std_engine, Engine}; match v.deserialize::() { Ok(s) => StringOrBase64::String(s), diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 9c68b460d9..1ac04313ab 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -363,38 +363,18 @@ pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; #[non_exhaustive] #[derive(Clone, Debug)] pub struct Sample { - /// The key expression on which this Sample was published. - pub key_expr: KeyExpr<'static>, - /// The payload of this Sample. - pub payload: Payload, - /// The kind of this Sample. - pub kind: SampleKind, - /// The encoding of this sample - pub encoding: Encoding, - /// The [`Timestamp`] of this Sample. - pub timestamp: Option, - /// Quality of service settings this sample was sent with. - pub qos: QoS, + pub(crate) key_expr: KeyExpr<'static>, + pub(crate) payload: Payload, + pub(crate) kind: SampleKind, + pub(crate) encoding: Encoding, + pub(crate) timestamp: Option, + pub(crate) qos: QoS, #[cfg(feature = "unstable")] - ///
- /// 🔬 - /// This API has been marked as unstable: it works as advertised, but we may change it in a future release. - /// To use it, you must enable zenoh's unstable feature flag. - ///
- /// - /// Infos on the source of this Sample. - pub source_info: SourceInfo, + pub(crate) source_info: SourceInfo, #[cfg(feature = "unstable")] - ///
- /// 🔬 - /// This API has been marked as unstable: it works as advertised, but we may change it in a future release. - /// To use it, you must enable zenoh's unstable feature flag. - ///
- /// - /// A map of key-value pairs, where each key and value are byte-slices. - pub attachment: Option, + pub(crate) attachment: Option, } impl Sample { @@ -471,19 +451,67 @@ impl Sample { self } + /// Gets the key expression on which this Sample was published. + #[inline] + pub fn key_expr(&self) -> &KeyExpr<'static> { + &self.key_expr + } + + /// Gets the payload of this Sample. + #[inline] + pub fn payload(&self) -> &Payload { + &self.payload + } + + /// Gets the kind of this Sample. + #[inline] + pub fn kind(&self) -> SampleKind { + self.kind + } + + /// Sets the kind of this Sample. + #[inline] + #[doc(hidden)] + #[zenoh_macros::unstable] + pub fn with_kind(mut self, kind: SampleKind) -> Self { + self.kind = kind; + self + } + + /// Gets the encoding of this sample + #[inline] + pub fn encoding(&self) -> &Encoding { + &self.encoding + } + /// Gets the timestamp of this Sample. #[inline] - pub fn get_timestamp(&self) -> Option<&Timestamp> { + pub fn timestamp(&self) -> Option<&Timestamp> { self.timestamp.as_ref() } /// Sets the timestamp of this Sample. #[inline] + #[doc(hidden)] + #[zenoh_macros::unstable] pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { self.timestamp = Some(timestamp); self } + /// Gets the quality of service settings this Sample was sent with. + #[inline] + pub fn qos(&self) -> &QoS { + &self.qos + } + + /// Gets infos on the source of this Sample. + #[zenoh_macros::unstable] + #[inline] + pub fn source_info(&self) -> &SourceInfo { + &self.source_info + } + /// Sets the source info of this Sample. #[zenoh_macros::unstable] #[inline] @@ -506,17 +534,22 @@ impl Sample { } } + /// Gets the sample attachment: a map of key-value pairs, where each key and value are byte-slices. #[zenoh_macros::unstable] + #[inline] pub fn attachment(&self) -> Option<&Attachment> { self.attachment.as_ref() } + /// Gets the mutable sample attachment: a map of key-value pairs, where each key and value are byte-slices. #[zenoh_macros::unstable] + #[inline] pub fn attachment_mut(&mut self) -> &mut Option { &mut self.attachment } - #[allow(clippy::result_large_err)] + #[inline] + #[doc(hidden)] #[zenoh_macros::unstable] pub fn with_attachment(mut self, attachment: Attachment) -> Self { self.attachment = Some(attachment); diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index e276d0c6d0..d4c3257472 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -67,7 +67,7 @@ impl fmt::Debug for SubscriberState { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") -/// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload) }) +/// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr(), sample.payload()) }) /// .res() /// .await /// .unwrap(); @@ -100,7 +100,7 @@ pub(crate) struct SubscriberInner<'a> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") -/// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) +/// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr(), sample.payload()); }) /// .pull_mode() /// .res() /// .await @@ -123,7 +123,7 @@ impl<'a> PullSubscriberInner<'a> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") - /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) + /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr(), sample.payload()); }) /// .pull_mode() /// .res() /// .await @@ -332,7 +332,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let subscriber = session /// .declare_subscriber("key/expression") - /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr, sample.payload); }) + /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr(), sample.payload()); }) /// .res() /// .await /// .unwrap(); @@ -407,7 +407,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { /// .await /// .unwrap(); /// while let Ok(sample) = subscriber.recv_async().await { - /// println!("Received: {} {:?}", sample.key_expr, sample.payload); + /// println!("Received: {} {:?}", sample.key_expr(), sample.payload()); /// } /// # }) /// ``` @@ -636,7 +636,7 @@ where /// .await /// .unwrap(); /// while let Ok(sample) = subscriber.recv_async().await { -/// println!("Received: {} {:?}", sample.key_expr, sample.payload); +/// println!("Received: {} {:?}", sample.key_expr(), sample.payload()); /// } /// # }) /// ``` diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index 0e7c1c0de7..38d03b0a84 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -9,9 +9,9 @@ fn pubsub() { .callback(|sample| { println!( "{}", - std::str::from_utf8(&sample.payload.contiguous()).unwrap() + std::str::from_utf8(&sample.payload().contiguous()).unwrap() ); - for (k, v) in &sample.attachment.unwrap() { + for (k, v) in sample.attachment().unwrap() { assert!(k.iter().rev().zip(v.as_slice()).all(|(k, v)| k == v)) } }) diff --git a/zenoh/tests/events.rs b/zenoh/tests/events.rs index 0ea775784a..5823b16150 100644 --- a/zenoh/tests/events.rs +++ b/zenoh/tests/events.rs @@ -69,15 +69,15 @@ fn zenoh_events() { let sample = ztimeout!(sub1.recv_async()); assert!(sample.is_ok()); - let key_expr = sample.as_ref().unwrap().key_expr.as_str(); + let key_expr = sample.as_ref().unwrap().key_expr().as_str(); assert!(key_expr.eq(&format!("@/session/{zid}/transport/unicast/{zid2}"))); - assert!(sample.as_ref().unwrap().kind == SampleKind::Put); + assert!(sample.as_ref().unwrap().kind() == SampleKind::Put); let sample = ztimeout!(sub2.recv_async()); assert!(sample.is_ok()); - let key_expr = sample.as_ref().unwrap().key_expr.as_str(); + let key_expr = sample.as_ref().unwrap().key_expr().as_str(); assert!(key_expr.starts_with(&format!("@/session/{zid}/transport/unicast/{zid2}/link/"))); - assert!(sample.as_ref().unwrap().kind == SampleKind::Put); + assert!(sample.as_ref().unwrap().kind() == SampleKind::Put); let replies: Vec = ztimeout!(session .get(format!("@/session/{zid}/transport/unicast/*")) @@ -87,7 +87,7 @@ fn zenoh_events() { .collect(); assert!(replies.len() == 1); assert!(replies[0].sample.is_ok()); - let key_expr = replies[0].sample.as_ref().unwrap().key_expr.as_str(); + let key_expr = replies[0].sample.as_ref().unwrap().key_expr().as_str(); assert!(key_expr.eq(&format!("@/session/{zid}/transport/unicast/{zid2}"))); let replies: Vec = ztimeout!(session @@ -98,22 +98,22 @@ fn zenoh_events() { .collect(); assert!(replies.len() == 1); assert!(replies[0].sample.is_ok()); - let key_expr = replies[0].sample.as_ref().unwrap().key_expr.as_str(); + let key_expr = replies[0].sample.as_ref().unwrap().key_expr().as_str(); assert!(key_expr.starts_with(&format!("@/session/{zid}/transport/unicast/{zid2}/link/"))); close_session(session2).await; let sample = ztimeout!(sub1.recv_async()); assert!(sample.is_ok()); - let key_expr = sample.as_ref().unwrap().key_expr.as_str(); + let key_expr = sample.as_ref().unwrap().key_expr().as_str(); assert!(key_expr.eq(&format!("@/session/{zid}/transport/unicast/{zid2}"))); - assert!(sample.as_ref().unwrap().kind == SampleKind::Delete); + assert!(sample.as_ref().unwrap().kind() == SampleKind::Delete); let sample = ztimeout!(sub2.recv_async()); assert!(sample.is_ok()); - let key_expr = sample.as_ref().unwrap().key_expr.as_str(); + let key_expr = sample.as_ref().unwrap().key_expr().as_str(); assert!(key_expr.starts_with(&format!("@/session/{zid}/transport/unicast/{zid2}/link/"))); - assert!(sample.as_ref().unwrap().kind == SampleKind::Delete); + assert!(sample.as_ref().unwrap().kind() == SampleKind::Delete); sub2.undeclare().res().await.unwrap(); sub1.undeclare().res().await.unwrap(); diff --git a/zenoh/tests/interceptors.rs b/zenoh/tests/interceptors.rs index 2a5c30e7b8..1f502138e4 100644 --- a/zenoh/tests/interceptors.rs +++ b/zenoh/tests/interceptors.rs @@ -83,9 +83,9 @@ fn downsampling_by_keyexpr_impl(egress: bool) { .callback(move |sample| { let mut count = zlock!(total_count_clone); *count += 1; - if sample.key_expr.as_str() == "test/downsamples_by_keyexp/r100" { + if sample.key_expr().as_str() == "test/downsamples_by_keyexp/r100" { zlock!(counter_r100).tick(); - } else if sample.key_expr.as_str() == "test/downsamples_by_keyexp/r50" { + } else if sample.key_expr().as_str() == "test/downsamples_by_keyexp/r50" { zlock!(counter_r50).tick(); } }) @@ -191,7 +191,7 @@ fn downsampling_by_interface_impl(egress: bool) { .callback(move |sample| { let mut count = zlock!(total_count_clone); *count += 1; - if sample.key_expr.as_str() == "test/downsamples_by_interface/r100" { + if sample.key_expr().as_str() == "test/downsamples_by_interface/r100" { zlock!(counter_r100).tick(); } }) diff --git a/zenoh/tests/liveliness.rs b/zenoh/tests/liveliness.rs index 96cca533df..c55eed4bc4 100644 --- a/zenoh/tests/liveliness.rs +++ b/zenoh/tests/liveliness.rs @@ -72,14 +72,14 @@ fn zenoh_liveliness() { .res_async()) .unwrap(); let sample = ztimeout!(replies.recv_async()).unwrap().sample.unwrap(); - assert!(sample.kind == SampleKind::Put); - assert!(sample.key_expr.as_str() == "zenoh_liveliness_test"); + assert!(sample.kind() == SampleKind::Put); + assert!(sample.key_expr().as_str() == "zenoh_liveliness_test"); assert!(ztimeout!(replies.recv_async()).is_err()); let sample = ztimeout!(sub.recv_async()).unwrap(); - assert!(sample.kind == SampleKind::Put); - assert!(sample.key_expr.as_str() == "zenoh_liveliness_test"); + assert!(sample.kind() == SampleKind::Put); + assert!(sample.key_expr().as_str() == "zenoh_liveliness_test"); drop(token); diff --git a/zenoh/tests/qos.rs b/zenoh/tests/qos.rs index 475d8d7a1b..24119e7b1e 100644 --- a/zenoh/tests/qos.rs +++ b/zenoh/tests/qos.rs @@ -52,13 +52,13 @@ fn pubsub() { task::sleep(SLEEP).await; ztimeout!(publisher1.put("qos").res_async()).unwrap(); - let qos = ztimeout!(subscriber.recv_async()).unwrap().qos; + let qos = ztimeout!(subscriber.recv_async()).unwrap().qos().clone(); assert_eq!(qos.priority(), Priority::DataHigh); assert_eq!(qos.congestion_control(), CongestionControl::Drop); ztimeout!(publisher2.put("qos").res_async()).unwrap(); - let qos = ztimeout!(subscriber.recv_async()).unwrap().qos; + let qos = ztimeout!(subscriber.recv_async()).unwrap().qos().clone(); assert_eq!(qos.priority(), Priority::DataLow); assert_eq!(qos.congestion_control(), CongestionControl::Block); diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index 82053b4f1d..06a8f5da45 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -58,7 +58,7 @@ impl Task { let sub = ztimeout!(session.declare_subscriber(ke).res_async())?; let mut counter = 0; while let Ok(sample) = sub.recv_async().await { - let recv_size = sample.payload.len(); + let recv_size = sample.payload().len(); if recv_size != *expected_size { bail!("Received payload size {recv_size} mismatches the expected {expected_size}"); } @@ -91,7 +91,7 @@ impl Task { while let Ok(reply) = replies.recv_async().await { match reply.sample { Ok(sample) => { - let recv_size = sample.payload.len(); + let recv_size = sample.payload().len(); if recv_size != *expected_size { bail!("Received payload size {recv_size} mismatches the expected {expected_size}"); } diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index 077c58298d..e3f5e2df63 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -95,7 +95,7 @@ async fn test_session_pubsub(peer01: &Session, peer02: &Session, reliability: Re let sub = ztimeout!(peer01 .declare_subscriber(key_expr) .callback(move |sample| { - assert_eq!(sample.payload.len(), size); + assert_eq!(sample.payload().len(), size); c_msgs.fetch_add(1, Ordering::Relaxed); }) .res_async()) @@ -198,8 +198,8 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re let rs = ztimeout!(peer02.get(selector).res_async()).unwrap(); while let Ok(s) = ztimeout!(rs.recv_async()) { let s = s.sample.unwrap(); - assert_eq!(s.kind, SampleKind::Put); - assert_eq!(s.payload.len(), size); + assert_eq!(s.kind(), SampleKind::Put); + assert_eq!(s.payload().len(), size); cnt += 1; } } @@ -216,8 +216,8 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re let rs = ztimeout!(peer02.get(selector).res_async()).unwrap(); while let Ok(s) = ztimeout!(rs.recv_async()) { let s = s.sample.unwrap(); - assert_eq!(s.kind, SampleKind::Delete); - assert_eq!(s.payload.len(), 0); + assert_eq!(s.kind(), SampleKind::Delete); + assert_eq!(s.payload().len(), 0); cnt += 1; } } diff --git a/zenoh/tests/unicity.rs b/zenoh/tests/unicity.rs index def0dffe33..8eb007b0c0 100644 --- a/zenoh/tests/unicity.rs +++ b/zenoh/tests/unicity.rs @@ -114,7 +114,7 @@ async fn test_unicity_pubsub(s01: &Session, s02: &Session, s03: &Session) { let sub1 = ztimeout!(s01 .declare_subscriber(key_expr) .callback(move |sample| { - assert_eq!(sample.payload.len(), size); + assert_eq!(sample.payload().len(), size); c_msgs1.fetch_add(1, Ordering::Relaxed); }) .res_async()) @@ -126,7 +126,7 @@ async fn test_unicity_pubsub(s01: &Session, s02: &Session, s03: &Session) { let sub2 = ztimeout!(s02 .declare_subscriber(key_expr) .callback(move |sample| { - assert_eq!(sample.payload.len(), size); + assert_eq!(sample.payload().len(), size); c_msgs2.fetch_add(1, Ordering::Relaxed); }) .res_async()) @@ -232,7 +232,7 @@ async fn test_unicity_qryrep(s01: &Session, s02: &Session, s03: &Session) { for _ in 0..msg_count { let rs = ztimeout!(s03.get(key_expr).res_async()).unwrap(); while let Ok(s) = ztimeout!(rs.recv_async()) { - assert_eq!(s.sample.unwrap().payload.len(), size); + assert_eq!(s.sample.unwrap().payload().len(), size); cnt += 1; } } From cc68ffb8f0f3d8b429ffcdab6230d1a5cbb79a8a Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 13 Mar 2024 15:10:45 +0100 Subject: [PATCH 013/124] remove Publisher::write (#819) * build plugins with default zenoh features * update documentation to the new api for keformat's generated Parsed (#783) * fix: Relax dependency requirements (#758) - async-io - unix-named-pipe - filepath - advisory-lock * feat: Improve release workflow (#756) * wip: Improve Release workflow * feat: Add DockerHub & GHCR releases * feat: Refactor checks and tests into pre-release workflow * chore: Remove crates_check.sh and crates_publish.sh * fix: Remove Dockerfile * restore SN in case of frame drops caused by congestion control (#815) * remove Publisher::write * test fix * remove unrelated changes added by rebasing --------- Co-authored-by: Pierre Avital Co-authored-by: Mahmoud Mazouz Co-authored-by: Dmitrii Bannov <104833606+yellowhatter@users.noreply.github.com> --- zenoh/src/publication.rs | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 2a1a58ebd9..f12842d081 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -350,25 +350,6 @@ impl<'a> Publisher<'a> { } } - /// Send data with [`kind`](SampleKind) (Put or Delete). - /// - /// # Examples - /// ``` - /// # async_std::task::block_on(async { - /// use zenoh::prelude::r#async::*; - /// - /// let session = zenoh::open(config::peer()).res().await.unwrap().into_arc(); - /// let publisher = session.declare_publisher("key/expression").res().await.unwrap(); - /// publisher.write(SampleKind::Put, "payload").res().await.unwrap(); - /// # }) - /// ``` - pub fn write(&self, kind: SampleKind, value: IntoPayload) -> Publication - where - IntoPayload: Into, - { - self._write(kind, value.into()) - } - /// Put data. /// /// # Examples @@ -1451,11 +1432,17 @@ mod tests { let session = open(Config::default()).res().unwrap(); let sub = session.declare_subscriber(KEY_EXPR).res().unwrap(); let pub_ = session.declare_publisher(KEY_EXPR).res().unwrap(); - pub_.write(kind, VALUE).res().unwrap(); + + match kind { + SampleKind::Put => pub_.put(VALUE).res().unwrap(), + SampleKind::Delete => pub_.delete().res().unwrap(), + } let sample = sub.recv().unwrap(); assert_eq!(sample.kind, kind); - assert_eq!(sample.payload.deserialize::().unwrap(), VALUE); + if let SampleKind::Put = kind { + assert_eq!(sample.payload.deserialize::().unwrap(), VALUE); + } } sample_kind_integrity_in_publication_with(SampleKind::Put); From f3af52ac0f1787d3eff29ef82c5f00e695c249e2 Mon Sep 17 00:00:00 2001 From: Denis Biryukov Date: Wed, 13 Mar 2024 15:17:37 +0100 Subject: [PATCH 014/124] format and clippy --- examples/examples/z_get_liveliness.rs | 2 +- examples/examples/z_sub_liveliness.rs | 6 ++-- .../src/replica/align_queryable.rs | 5 +--- .../src/replica/aligner.rs | 5 +--- .../src/replica/storage.rs | 29 ++++++++----------- zenoh-ext/src/querying_subscriber.rs | 2 +- zenoh/tests/qos.rs | 4 +-- 7 files changed, 21 insertions(+), 32 deletions(-) diff --git a/examples/examples/z_get_liveliness.rs b/examples/examples/z_get_liveliness.rs index 036dc0ab98..66de570356 100644 --- a/examples/examples/z_get_liveliness.rs +++ b/examples/examples/z_get_liveliness.rs @@ -37,7 +37,7 @@ async fn main() { .unwrap(); while let Ok(reply) = replies.recv_async().await { match reply.sample { - Ok(sample) => println!(">> Alive token ('{}')", sample.key_expr.as_str(),), + Ok(sample) => println!(">> Alive token ('{}')", sample.key_expr().as_str(),), Err(err) => { let payload = err .payload diff --git a/examples/examples/z_sub_liveliness.rs b/examples/examples/z_sub_liveliness.rs index 52ba53875c..02e2e71ba4 100644 --- a/examples/examples/z_sub_liveliness.rs +++ b/examples/examples/z_sub_liveliness.rs @@ -46,13 +46,13 @@ async fn main() { select!( sample = subscriber.recv_async() => { let sample = sample.unwrap(); - match sample.kind { + match sample.kind() { SampleKind::Put => println!( ">> [LivelinessSubscriber] New alive token ('{}')", - sample.key_expr.as_str()), + sample.key_expr().as_str()), SampleKind::Delete => println!( ">> [LivelinessSubscriber] Dropped token ('{}')", - sample.key_expr.as_str()), + sample.key_expr().as_str()), } }, diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index fc361d77f2..32be4a5534 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -180,10 +180,7 @@ impl AlignQueryable { let entry = entry.unwrap(); result.push(AlignData::Data( OwnedKeyExpr::from(entry.key_expr().clone()), - ( - Value::from(entry), - each.timestamp, - ), + (Value::from(entry), each.timestamp), )); } } diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index b11a94e4f2..fb46b78082 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -142,10 +142,7 @@ impl Aligner { for sample in replies { result.insert( sample.key_expr().clone().into(), - ( - sample.timestamp().unwrap().clone(), - Value::from(sample), - ), + (*sample.timestamp().unwrap(), Value::from(sample)), ); } (result, no_err) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 895f2e1914..0708dcabd9 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -276,7 +276,7 @@ impl StorageService { } let matching_keys = if sample.key_expr().is_wild() { - self.get_matching_keys(&sample.key_expr()).await + self.get_matching_keys(sample.key_expr()).await } else { vec![sample.key_expr().clone().into()] }; @@ -309,20 +309,15 @@ impl StorageService { let Value { payload, encoding, .. } = overriding_update.data.value; - let sample_to_store = Sample::new(KeyExpr::from(k.clone()), payload) + Sample::new(KeyExpr::from(k.clone()), payload) .with_encoding(encoding) .with_timestamp(overriding_update.data.timestamp) - .with_kind(overriding_update.kind); - sample_to_store - } - None => { - let sample_to_store = - Sample::new(KeyExpr::from(k.clone()), sample.payload().clone()) - .with_encoding(sample.encoding().clone()) - .with_timestamp(sample.timestamp().unwrap().clone()) - .with_kind(sample.kind()); - sample_to_store + .with_kind(overriding_update.kind) } + None => Sample::new(KeyExpr::from(k.clone()), sample.payload().clone()) + .with_encoding(sample.encoding().clone()) + .with_timestamp(*sample.timestamp().unwrap()) + .with_kind(sample.kind()), }; let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { @@ -340,16 +335,16 @@ impl StorageService { stripped_key, Value::new(sample_to_store.payload().clone()) .with_encoding(sample_to_store.encoding().clone()), - sample_to_store.timestamp().unwrap().clone(), + *sample_to_store.timestamp().unwrap(), ) .await } SampleKind::Delete => { // register a tombstone - self.mark_tombstone(&k, sample_to_store.timestamp().unwrap().clone()) + self.mark_tombstone(&k, *sample_to_store.timestamp().unwrap()) .await; storage - .delete(stripped_key, sample_to_store.timestamp().unwrap().clone()) + .delete(stripped_key, *sample_to_store.timestamp().unwrap()) .await } }; @@ -363,7 +358,7 @@ impl StorageService { .as_ref() .unwrap() .log_propagation - .send((k.clone(), sample_to_store.timestamp().unwrap().clone())); + .send((k.clone(), *sample_to_store.timestamp().unwrap())); match sending { Ok(_) => (), Err(e) => { @@ -398,7 +393,7 @@ impl StorageService { // @TODO: change into a better store that does incremental writes let key = sample.key_expr().clone(); let mut wildcards = self.wildcard_updates.write().await; - let timestamp = sample.timestamp().unwrap().clone(); + let timestamp = *sample.timestamp().unwrap(); wildcards.insert( &key, Update { diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 470f795f2b..480e490fdd 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -305,7 +305,7 @@ impl MergeQueue { fn push(&mut self, sample: Sample) { if let Some(ts) = sample.timestamp() { - self.timstamped.entry(ts.clone()).or_insert(sample); + self.timstamped.entry(*ts).or_insert(sample); } else { self.untimestamped.push_back(sample); } diff --git a/zenoh/tests/qos.rs b/zenoh/tests/qos.rs index 24119e7b1e..1a9df306b2 100644 --- a/zenoh/tests/qos.rs +++ b/zenoh/tests/qos.rs @@ -52,13 +52,13 @@ fn pubsub() { task::sleep(SLEEP).await; ztimeout!(publisher1.put("qos").res_async()).unwrap(); - let qos = ztimeout!(subscriber.recv_async()).unwrap().qos().clone(); + let qos = *ztimeout!(subscriber.recv_async()).unwrap().qos(); assert_eq!(qos.priority(), Priority::DataHigh); assert_eq!(qos.congestion_control(), CongestionControl::Drop); ztimeout!(publisher2.put("qos").res_async()).unwrap(); - let qos = ztimeout!(subscriber.recv_async()).unwrap().qos().clone(); + let qos = *ztimeout!(subscriber.recv_async()).unwrap().qos(); assert_eq!(qos.priority(), Priority::DataLow); assert_eq!(qos.congestion_control(), CongestionControl::Block); From 0ca41e817044e80a6c422122f46aa3e60821ce64 Mon Sep 17 00:00:00 2001 From: Denis Biryukov Date: Wed, 13 Mar 2024 15:26:19 +0100 Subject: [PATCH 015/124] mark remaining sample-mutating methods as unstable and hidden --- zenoh/src/sample.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 1ac04313ab..9b9c55822e 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -520,10 +520,12 @@ impl Sample { self } - #[inline] /// Ensure that an associated Timestamp is present in this Sample. /// If not, a new one is created with the current system time and 0x00 as id. /// Get the timestamp of this sample (either existing one or newly created) + #[inline] + #[doc(hidden)] + #[zenoh_macros::unstable] pub fn ensure_timestamp(&mut self) -> &Timestamp { if let Some(ref timestamp) = self.timestamp { timestamp @@ -542,8 +544,9 @@ impl Sample { } /// Gets the mutable sample attachment: a map of key-value pairs, where each key and value are byte-slices. - #[zenoh_macros::unstable] #[inline] + #[doc(hidden)] + #[zenoh_macros::unstable] pub fn attachment_mut(&mut self) -> &mut Option { &mut self.attachment } From ea7179f789dba510c0e2070188a374768850c76e Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 14 Mar 2024 12:21:03 +0100 Subject: [PATCH 016/124] Add express support in the pipeline --- commons/zenoh-protocol/src/network/mod.rs | 16 +++++++++++++-- io/zenoh-transport/src/common/pipeline.rs | 24 ++++++++++++++--------- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/commons/zenoh-protocol/src/network/mod.rs b/commons/zenoh-protocol/src/network/mod.rs index 6af7fef243..0e198ddf0f 100644 --- a/commons/zenoh-protocol/src/network/mod.rs +++ b/commons/zenoh-protocol/src/network/mod.rs @@ -110,6 +110,18 @@ impl NetworkMessage { true } + #[inline] + pub fn is_express(&self) -> bool { + match &self.body { + NetworkBody::Push(msg) => msg.ext_qos.is_express(), + NetworkBody::Request(msg) => msg.ext_qos.is_express(), + NetworkBody::Response(msg) => msg.ext_qos.is_express(), + NetworkBody::ResponseFinal(msg) => msg.ext_qos.is_express(), + NetworkBody::Declare(msg) => msg.ext_qos.is_express(), + NetworkBody::OAM(msg) => msg.ext_qos.is_express(), + } + } + #[inline] pub fn is_droppable(&self) -> bool { if !self.is_reliable() { @@ -117,11 +129,11 @@ impl NetworkMessage { } let cc = match &self.body { - NetworkBody::Declare(msg) => msg.ext_qos.get_congestion_control(), NetworkBody::Push(msg) => msg.ext_qos.get_congestion_control(), NetworkBody::Request(msg) => msg.ext_qos.get_congestion_control(), NetworkBody::Response(msg) => msg.ext_qos.get_congestion_control(), NetworkBody::ResponseFinal(msg) => msg.ext_qos.get_congestion_control(), + NetworkBody::Declare(msg) => msg.ext_qos.get_congestion_control(), NetworkBody::OAM(msg) => msg.ext_qos.get_congestion_control(), }; @@ -131,11 +143,11 @@ impl NetworkMessage { #[inline] pub fn priority(&self) -> Priority { match &self.body { - NetworkBody::Declare(msg) => msg.ext_qos.get_priority(), NetworkBody::Push(msg) => msg.ext_qos.get_priority(), NetworkBody::Request(msg) => msg.ext_qos.get_priority(), NetworkBody::Response(msg) => msg.ext_qos.get_priority(), NetworkBody::ResponseFinal(msg) => msg.ext_qos.get_priority(), + NetworkBody::Declare(msg) => msg.ext_qos.get_priority(), NetworkBody::OAM(msg) => msg.ext_qos.get_priority(), } } diff --git a/io/zenoh-transport/src/common/pipeline.rs b/io/zenoh-transport/src/common/pipeline.rs index 3968eabdf5..516834fa41 100644 --- a/io/zenoh-transport/src/common/pipeline.rs +++ b/io/zenoh-transport/src/common/pipeline.rs @@ -161,12 +161,18 @@ impl StageIn { } macro_rules! zretok { - ($batch:expr) => {{ - let bytes = $batch.len(); - *c_guard = Some($batch); - drop(c_guard); - self.s_out.notify(bytes); - return true; + ($batch:expr, $msg:expr) => {{ + if $msg.is_express() { + // Move out existing batch + self.s_out.move_batch($batch); + return true; + } else { + let bytes = $batch.len(); + *c_guard = Some($batch); + drop(c_guard); + self.s_out.notify(bytes); + return true; + } }}; } @@ -174,7 +180,7 @@ impl StageIn { let mut batch = zgetbatch_rets!(false); // Attempt the serialization on the current batch let e = match batch.encode(&*msg) { - Ok(_) => zretok!(batch), + Ok(_) => zretok!(batch, msg), Err(e) => e, }; @@ -194,7 +200,7 @@ impl StageIn { if let BatchError::NewFrame = e { // Attempt a serialization with a new frame if batch.encode((&*msg, &frame)).is_ok() { - zretok!(batch); + zretok!(batch, msg); } } @@ -206,7 +212,7 @@ impl StageIn { // Attempt a second serialization on fully empty batch if batch.encode((&*msg, &frame)).is_ok() { - zretok!(batch); + zretok!(batch, msg); } // The second serialization attempt has failed. This means that the message is From 62bf7d3c12d1e4bf56375a6af7a6bd9ebdf8e81a Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 14 Mar 2024 12:34:42 +0100 Subject: [PATCH 017/124] Add express support to publisher and put --- zenoh/src/publication.rs | 24 ++++++++++++++++++++++-- zenoh/src/session.rs | 2 ++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index f12842d081..75d4ddc2b7 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -102,6 +102,13 @@ impl PutBuilder<'_, '_> { self } + /// Change the `congestion_control` to apply when routing the data. + #[inline] + pub fn express(mut self, is_express: bool) -> Self { + self.publisher = self.publisher.express(is_express); + self + } + /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] @@ -141,6 +148,7 @@ impl SyncResolve for PutBuilder<'_, '_> { key_expr, congestion_control, priority, + is_express, destination, } = self.publisher; @@ -151,6 +159,7 @@ impl SyncResolve for PutBuilder<'_, '_> { key_expr: key_expr?, congestion_control, priority, + is_express, destination, }; @@ -248,6 +257,7 @@ pub struct Publisher<'a> { pub(crate) key_expr: KeyExpr<'a>, pub(crate) congestion_control: CongestionControl, pub(crate) priority: Priority, + pub(crate) is_express: bool, pub(crate) destination: Locality, } @@ -738,6 +748,7 @@ pub struct PublisherBuilder<'a, 'b: 'a> { pub(crate) key_expr: ZResult>, pub(crate) congestion_control: CongestionControl, pub(crate) priority: Priority, + pub(crate) is_express: bool, pub(crate) destination: Locality, } @@ -751,6 +762,7 @@ impl<'a, 'b> Clone for PublisherBuilder<'a, 'b> { }, congestion_control: self.congestion_control, priority: self.priority, + is_express: self.is_express, destination: self.destination, } } @@ -771,6 +783,13 @@ impl<'a, 'b> PublisherBuilder<'a, 'b> { self } + /// Change the `congestion_control` to apply when routing the data. + #[inline] + pub fn express(mut self, is_express: bool) -> Self { + self.is_express = is_express; + self + } + /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] @@ -830,6 +849,7 @@ impl<'a, 'b> SyncResolve for PublisherBuilder<'a, 'b> { key_expr, congestion_control: self.congestion_control, priority: self.priority, + is_express: self.is_express, destination: self.destination, }; log::trace!("publish({:?})", publisher.key_expr); @@ -867,7 +887,7 @@ fn resolve_put( ext_qos: ext::QoSType::new( publisher.priority.into(), publisher.congestion_control, - false, + publisher.is_express, ), ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -933,7 +953,7 @@ fn resolve_put( qos: QoS::from(ext::QoSType::new( publisher.priority.into(), publisher.congestion_control, - false, + publisher.is_express, )), }; diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 4c303ae974..5e706a0da8 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -337,6 +337,7 @@ impl<'s, 'a> SessionDeclarations<'s, 'a> for SessionRef<'a> { key_expr: key_expr.try_into().map_err(Into::into), congestion_control: CongestionControl::DEFAULT, priority: Priority::DEFAULT, + is_express: false, destination: Locality::default(), } } @@ -1909,6 +1910,7 @@ impl<'s> SessionDeclarations<'s, 'static> for Arc { key_expr: key_expr.try_into().map_err(Into::into), congestion_control: CongestionControl::DEFAULT, priority: Priority::DEFAULT, + is_express: false, destination: Locality::default(), } } From 4d8ec6ca2d2f326c02af5aa71a92e68200dd2ba0 Mon Sep 17 00:00:00 2001 From: Gabriele Baldoni Date: Thu, 14 Mar 2024 18:34:55 +0000 Subject: [PATCH 018/124] fix(828): ensuring valid JSON response from REST API (#831) * fix(828): ensuring valid JSON response from REST API Signed-off-by: gabrik * fix(828): improved JSON format conversion Signed-off-by: gabrik * chore: addressing comments Signed-off-by: gabrik * fix(828): added 'into_string' for StringOrBase64 Signed-off-by: gabrik * chore: address comments Signed-off-by: gabrik --------- Signed-off-by: gabrik --- plugins/zenoh-plugin-rest/src/lib.rs | 85 ++++++++++++++++++---------- zenoh/src/payload.rs | 8 +++ 2 files changed, 64 insertions(+), 29 deletions(-) diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index 1a99d7b5a4..39225b5d25 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -18,9 +18,10 @@ //! //! [Click here for Zenoh's documentation](../zenoh/index.html) use async_std::prelude::FutureExt; -use base64::{engine::general_purpose::STANDARD as b64_std_engine, Engine}; +use base64::Engine; use futures::StreamExt; use http_types::Method; +use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::convert::TryFrom; use std::str::FromStr; @@ -28,6 +29,7 @@ use std::sync::Arc; use tide::http::Mime; use tide::sse::Sender; use tide::{Request, Response, Server, StatusCode}; +use zenoh::payload::StringOrBase64; use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, Reply}; @@ -46,36 +48,57 @@ lazy_static::lazy_static! { } const RAW_KEY: &str = "_raw"; -fn payload_to_json(payload: Payload) -> String { - payload - .deserialize::() - .unwrap_or_else(|_| format!(r#""{}""#, b64_std_engine.encode(payload.contiguous()))) +#[derive(Serialize, Deserialize)] +struct JSONSample { + key: String, + value: serde_json::Value, + encoding: String, + time: Option, } -fn sample_to_json(sample: Sample) -> String { - format!( - r#"{{ "key": "{}", "value": {}, "encoding": "{}", "time": "{}" }}"#, - sample.key_expr.as_str(), - payload_to_json(sample.payload), - sample.encoding, - if let Some(ts) = sample.timestamp { - ts.to_string() - } else { - "None".to_string() +pub fn base64_encode(data: &[u8]) -> String { + use base64::engine::general_purpose; + general_purpose::STANDARD.encode(data) +} + +fn payload_to_json(payload: Payload, encoding: &Encoding) -> serde_json::Value { + match payload.is_empty() { + // If the value is empty return a JSON null + true => serde_json::Value::Null, + // if it is not check the encoding + false => { + match encoding { + // If it is a JSON try to deserialize as json, if it fails fallback to base64 + &Encoding::APPLICATION_JSON | &Encoding::TEXT_JSON | &Encoding::TEXT_JSON5 => { + serde_json::from_slice::(&payload.contiguous()).unwrap_or( + serde_json::Value::String(StringOrBase64::from(payload).into_string()), + ) + } + // otherwise convert to JSON string + _ => serde_json::Value::String(StringOrBase64::from(payload).into_string()), + } } - ) + } } -fn result_to_json(sample: Result) -> String { +fn sample_to_json(sample: Sample) -> JSONSample { + JSONSample { + key: sample.key_expr.as_str().to_string(), + value: payload_to_json(sample.payload, &sample.encoding), + encoding: sample.encoding.to_string(), + time: sample.timestamp.map(|ts| ts.to_string()), + } +} + +fn result_to_json(sample: Result) -> JSONSample { match sample { Ok(sample) => sample_to_json(sample), - Err(err) => { - format!( - r#"{{ "key": "ERROR", "value": {}, "encoding": "{}"}}"#, - payload_to_json(err.payload), - err.encoding, - ) - } + Err(err) => JSONSample { + key: "ERROR".into(), + value: payload_to_json(err.payload, &err.encoding), + encoding: err.encoding.to_string(), + time: None, + }, } } @@ -83,10 +106,10 @@ async fn to_json(results: flume::Receiver) -> String { let values = results .stream() .filter_map(move |reply| async move { Some(result_to_json(reply.sample)) }) - .collect::>() - .await - .join(",\n"); - format!("[\n{values}\n]\n") + .collect::>() + .await; + + serde_json::to_string(&values).unwrap_or("[]".into()) } async fn to_json_response(results: flume::Receiver) -> Response { @@ -321,8 +344,12 @@ async fn query(mut req: Request<(Arc, String)>) -> tide::Result String { + match self { + StringOrBase64::String(s) | StringOrBase64::Base64(s) => s, + } + } +} + impl Deref for StringOrBase64 { type Target = String; From 622b230286ca37899f768b24b865e18669c2b0c1 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 15 Mar 2024 10:12:14 +0100 Subject: [PATCH 019/124] Add express support (#829) * Improve docs * Add express to examples * Fix doc in sample.rs --- examples/examples/z_ping.rs | 9 +++++++-- examples/examples/z_pong.rs | 10 +++++++--- examples/examples/z_pub_thr.rs | 4 ++++ zenoh/src/publication.rs | 8 ++++++-- zenoh/src/sample.rs | 2 +- 5 files changed, 25 insertions(+), 8 deletions(-) diff --git a/examples/examples/z_ping.rs b/examples/examples/z_ping.rs index cb6fecd81a..a57c937e48 100644 --- a/examples/examples/z_ping.rs +++ b/examples/examples/z_ping.rs @@ -22,7 +22,7 @@ fn main() { // initiate logging env_logger::init(); - let (config, warmup, size, n) = parse_args(); + let (config, warmup, size, n, express) = parse_args(); let session = zenoh::open(config).res().unwrap(); // The key expression to publish data on @@ -35,6 +35,7 @@ fn main() { let publisher = session .declare_publisher(key_expr_ping) .congestion_control(CongestionControl::Block) + .express(express) .res() .unwrap(); @@ -78,6 +79,9 @@ fn main() { #[derive(Parser)] struct Args { + /// express for sending data + #[arg(long, default_value = "false")] + no_express: bool, #[arg(short, long, default_value = "1")] /// The number of seconds to warm up (float) warmup: f64, @@ -90,12 +94,13 @@ struct Args { common: CommonArgs, } -fn parse_args() -> (Config, Duration, usize, usize) { +fn parse_args() -> (Config, Duration, usize, usize, bool) { let args = Args::parse(); ( args.common.into(), Duration::from_secs_f64(args.warmup), args.payload_size, args.samples, + !args.no_express, ) } diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index c2412b6d37..576ef232e5 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -21,7 +21,7 @@ fn main() { // initiate logging env_logger::init(); - let config = parse_args(); + let (config, express) = parse_args(); let session = zenoh::open(config).res().unwrap().into_arc(); @@ -34,6 +34,7 @@ fn main() { let publisher = session .declare_publisher(key_expr_pong) .congestion_control(CongestionControl::Block) + .express(express) .res() .unwrap(); @@ -47,11 +48,14 @@ fn main() { #[derive(clap::Parser, Clone, PartialEq, Eq, Hash, Debug)] struct Args { + /// express for sending data + #[arg(long, default_value = "false")] + no_express: bool, #[command(flatten)] common: CommonArgs, } -fn parse_args() -> Config { +fn parse_args() -> (Config, bool) { let args = Args::parse(); - args.common.into() + (args.common.into(), !args.no_express) } diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index c042b2e7a2..4354ad2e68 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -41,6 +41,7 @@ fn main() { .declare_publisher("test/thr") .congestion_control(CongestionControl::Block) .priority(prio) + .express(args.express) .res() .unwrap(); @@ -65,6 +66,9 @@ fn main() { #[derive(Parser, Clone, PartialEq, Eq, Hash, Debug)] struct Args { + /// express for sending data + #[arg(long, default_value = "false")] + express: bool, /// Priority for sending data #[arg(short, long)] priority: Option, diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 75d4ddc2b7..1531cab606 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -102,7 +102,9 @@ impl PutBuilder<'_, '_> { self } - /// Change the `congestion_control` to apply when routing the data. + /// Change the `express` policy to apply when routing the data. + /// When express is set to `true`, then the message will not be batched. + /// This usually has a positive impact on latency but negative impact on throughput. #[inline] pub fn express(mut self, is_express: bool) -> Self { self.publisher = self.publisher.express(is_express); @@ -783,7 +785,9 @@ impl<'a, 'b> PublisherBuilder<'a, 'b> { self } - /// Change the `congestion_control` to apply when routing the data. + /// Change the `express` policy to apply when routing the data. + /// When express is set to `true`, then the message will not be batched. + /// This usually has a positive impact on latency but negative impact on throughput. #[inline] pub fn express(mut self, is_express: bool) -> Self { self.is_express = is_express; diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 9c68b460d9..36ebeeb129 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -556,7 +556,7 @@ impl QoS { self.inner.get_congestion_control() } - /// Gets express flag value. If true, the message is not batched during transmission, in order to reduce latency. + /// Gets express flag value. If `true`, the message is not batched during transmission, in order to reduce latency. pub fn express(&self) -> bool { self.inner.is_express() } From d73da7d70fef25d76bf94945792da0f0adffed0b Mon Sep 17 00:00:00 2001 From: Denis Biryukov Date: Fri, 15 Mar 2024 11:22:00 +0100 Subject: [PATCH 020/124] clippy --- plugins/zenoh-plugin-rest/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index e0df8f286b..e2718f6579 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -344,7 +344,6 @@ async fn query(mut req: Request<(Arc, String)>) -> tide::Result Date: Mon, 18 Mar 2024 13:20:01 +0100 Subject: [PATCH 021/124] Rename IntoCallbackReceiver trait to IntoHandler trait (#816) --- zenoh-ext/src/querying_subscriber.rs | 36 ++++++++--------- zenoh/src/handlers.rs | 60 ++++++++++++++++------------ zenoh/src/liveliness.rs | 40 +++++++++---------- zenoh/src/prelude.rs | 4 +- zenoh/src/publication.rs | 22 +++++----- zenoh/src/query.rs | 20 +++++----- zenoh/src/queryable.rs | 22 +++++----- zenoh/src/scouting.rs | 22 +++++----- zenoh/src/subscriber.rs | 42 +++++++++---------- 9 files changed, 138 insertions(+), 130 deletions(-) diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 480e490fdd..8cb5480e58 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -105,7 +105,7 @@ impl<'a, 'b, KeySpace> QueryingSubscriberBuilder<'a, 'b, KeySpace, DefaultHandle handler: Handler, ) -> QueryingSubscriberBuilder<'a, 'b, KeySpace, Handler> where - Handler: zenoh::prelude::IntoCallbackReceiverPair<'static, Sample>, + Handler: zenoh::prelude::IntoHandler<'static, Sample>, { let QueryingSubscriberBuilder { session, @@ -214,17 +214,17 @@ impl<'a, 'b, KeySpace, Handler> QueryingSubscriberBuilder<'a, 'b, KeySpace, Hand impl<'a, KeySpace, Handler> Resolvable for QueryingSubscriberBuilder<'a, '_, KeySpace, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample>, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample>, + Handler::Handler: Send, { - type To = ZResult>; + type To = ZResult>; } impl SyncResolve for QueryingSubscriberBuilder<'_, '_, KeySpace, Handler> where KeySpace: Into + Clone, - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { fn res_sync(self) -> ::To { let session = self.session.clone(); @@ -272,8 +272,8 @@ where impl<'a, KeySpace, Handler> AsyncResolve for QueryingSubscriberBuilder<'a, '_, KeySpace, Handler> where KeySpace: Into + Clone, - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { type Future = Ready; @@ -462,7 +462,7 @@ where handler: Handler, ) -> FetchingSubscriberBuilder<'a, 'b, KeySpace, Handler, Fetch, TryIntoSample> where - Handler: zenoh::prelude::IntoCallbackReceiverPair<'static, Sample>, + Handler: zenoh::prelude::IntoHandler<'static, Sample>, { let FetchingSubscriberBuilder { session, @@ -536,11 +536,11 @@ impl< TryIntoSample, > Resolvable for FetchingSubscriberBuilder<'a, '_, KeySpace, Handler, Fetch, TryIntoSample> where - Handler: IntoCallbackReceiverPair<'static, Sample>, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample>, + Handler::Handler: Send, TryIntoSample: ExtractSample, { - type To = ZResult>; + type To = ZResult>; } impl< @@ -551,8 +551,8 @@ impl< > SyncResolve for FetchingSubscriberBuilder<'_, '_, KeySpace, Handler, Fetch, TryIntoSample> where KeySpace: Into, - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, TryIntoSample: ExtractSample + Send + Sync, { fn res_sync(self) -> ::To { @@ -569,8 +569,8 @@ impl< > AsyncResolve for FetchingSubscriberBuilder<'a, '_, KeySpace, Handler, Fetch, TryIntoSample> where KeySpace: Into, - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, TryIntoSample: ExtractSample + Send + Sync, { type Future = Ready; @@ -643,14 +643,14 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { ) -> ZResult where KeySpace: Into, - Handler: IntoCallbackReceiverPair<'static, Sample, Receiver = Receiver> + Send, + Handler: IntoHandler<'static, Sample, Handler = Receiver> + Send, TryIntoSample: ExtractSample + Send + Sync, { let state = Arc::new(Mutex::new(InnerState { pending_fetches: 0, merge_queue: MergeQueue::new(), })); - let (callback, receiver) = conf.handler.into_cb_receiver_pair(); + let (callback, receiver) = conf.handler.into_handler(); let sub_callback = { let state = state.clone(); diff --git a/zenoh/src/handlers.rs b/zenoh/src/handlers.rs index 69828a5d7f..e5ec3bb0dc 100644 --- a/zenoh/src/handlers.rs +++ b/zenoh/src/handlers.rs @@ -17,34 +17,36 @@ use crate::API_DATA_RECEPTION_CHANNEL_SIZE; /// An alias for `Arc`. pub type Dyn = std::sync::Arc; + /// An immutable callback function. pub type Callback<'a, T> = Dyn; -/// A type that can be converted into a [`Callback`]-receiver pair. +/// A type that can be converted into a [`Callback`]-handler pair. /// /// When Zenoh functions accept types that implement these, it intends to use the [`Callback`] as just that, -/// while granting you access to the receiver through the returned value via [`std::ops::Deref`] and [`std::ops::DerefMut`]. +/// while granting you access to the handler through the returned value via [`std::ops::Deref`] and [`std::ops::DerefMut`]. /// /// Any closure that accepts `T` can be converted into a pair of itself and `()`. -pub trait IntoCallbackReceiverPair<'a, T> { - type Receiver; - fn into_cb_receiver_pair(self) -> (Callback<'a, T>, Self::Receiver); +pub trait IntoHandler<'a, T> { + type Handler; + + fn into_handler(self) -> (Callback<'a, T>, Self::Handler); } -impl<'a, T, F> IntoCallbackReceiverPair<'a, T> for F + +impl<'a, T, F> IntoHandler<'a, T> for F where F: Fn(T) + Send + Sync + 'a, { - type Receiver = (); - fn into_cb_receiver_pair(self) -> (Callback<'a, T>, Self::Receiver) { + type Handler = (); + fn into_handler(self) -> (Callback<'a, T>, Self::Handler) { (Dyn::from(self), ()) } } -impl IntoCallbackReceiverPair<'static, T> - for (flume::Sender, flume::Receiver) -{ - type Receiver = flume::Receiver; - fn into_cb_receiver_pair(self) -> (Callback<'static, T>, Self::Receiver) { +impl IntoHandler<'static, T> for (flume::Sender, flume::Receiver) { + type Handler = flume::Receiver; + + fn into_handler(self) -> (Callback<'static, T>, Self::Handler) { let (sender, receiver) = self; ( Dyn::new(move |t| { @@ -56,18 +58,24 @@ impl IntoCallbackReceiverPair<'static, T> ) } } + +/// The default handler in Zenoh is a FIFO queue. pub struct DefaultHandler; -impl IntoCallbackReceiverPair<'static, T> for DefaultHandler { - type Receiver = flume::Receiver; - fn into_cb_receiver_pair(self) -> (Callback<'static, T>, Self::Receiver) { - flume::bounded(*API_DATA_RECEPTION_CHANNEL_SIZE).into_cb_receiver_pair() + +impl IntoHandler<'static, T> for DefaultHandler { + type Handler = flume::Receiver; + + fn into_handler(self) -> (Callback<'static, T>, Self::Handler) { + flume::bounded(*API_DATA_RECEPTION_CHANNEL_SIZE).into_handler() } } -impl IntoCallbackReceiverPair<'static, T> + +impl IntoHandler<'static, T> for (std::sync::mpsc::SyncSender, std::sync::mpsc::Receiver) { - type Receiver = std::sync::mpsc::Receiver; - fn into_cb_receiver_pair(self) -> (Callback<'static, T>, Self::Receiver) { + type Handler = std::sync::mpsc::Receiver; + + fn into_handler(self) -> (Callback<'static, T>, Self::Handler) { let (sender, receiver) = self; ( Dyn::new(move |t| { @@ -96,7 +104,7 @@ pub fn locked(fnmut: impl FnMut(T)) -> impl Fn(T) { /// - `callback` will never be called once `drop` has started. /// - `drop` will only be called **once**, and **after every** `callback` has ended. /// - The two previous guarantees imply that `call` and `drop` are never called concurrently. -pub struct CallbackPair +pub struct CallbackDrop where DropFn: FnMut() + Send + Sync + 'static, { @@ -104,7 +112,7 @@ where pub drop: DropFn, } -impl Drop for CallbackPair +impl Drop for CallbackDrop where DropFn: FnMut() + Send + Sync + 'static, { @@ -113,14 +121,14 @@ where } } -impl<'a, OnEvent, Event, DropFn> IntoCallbackReceiverPair<'a, Event> - for CallbackPair +impl<'a, OnEvent, Event, DropFn> IntoHandler<'a, Event> for CallbackDrop where OnEvent: Fn(Event) + Send + Sync + 'a, DropFn: FnMut() + Send + Sync + 'static, { - type Receiver = (); - fn into_cb_receiver_pair(self) -> (Callback<'a, Event>, Self::Receiver) { + type Handler = (); + + fn into_handler(self) -> (Callback<'a, Event>, Self::Handler) { (Dyn::from(move |evt| (self.callback)(evt)), ()) } } diff --git a/zenoh/src/liveliness.rs b/zenoh/src/liveliness.rs index d4229db4cc..4103504f13 100644 --- a/zenoh/src/liveliness.rs +++ b/zenoh/src/liveliness.rs @@ -484,7 +484,7 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { self.callback(locked(callback)) } - /// Receive the samples for this subscription with a [`Handler`](crate::prelude::IntoCallbackReceiverPair). + /// Receive the samples for this subscription with a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ```no_run @@ -507,7 +507,7 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { #[zenoh_macros::unstable] pub fn with(self, handler: Handler) -> LivelinessSubscriberBuilder<'a, 'b, Handler> where - Handler: crate::handlers::IntoCallbackReceiverPair<'static, Sample>, + Handler: crate::handlers::IntoHandler<'static, Sample>, { let LivelinessSubscriberBuilder { session, @@ -525,23 +525,23 @@ impl<'a, 'b> LivelinessSubscriberBuilder<'a, 'b, DefaultHandler> { #[zenoh_macros::unstable] impl<'a, Handler> Resolvable for LivelinessSubscriberBuilder<'a, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { - type To = ZResult>; + type To = ZResult>; } #[zenoh_macros::unstable] impl<'a, Handler> SyncResolve for LivelinessSubscriberBuilder<'a, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { #[zenoh_macros::unstable] fn res_sync(self) -> ::To { let key_expr = self.key_expr?; let session = self.session; - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); session .declare_subscriber_inner( &key_expr, @@ -564,8 +564,8 @@ where #[zenoh_macros::unstable] impl<'a, Handler> AsyncResolve for LivelinessSubscriberBuilder<'a, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { type Future = Ready; @@ -677,7 +677,7 @@ impl<'a, 'b> LivelinessGetBuilder<'a, 'b, DefaultHandler> { self.callback(locked(callback)) } - /// Receive the replies for this query with a [`Handler`](crate::prelude::IntoCallbackReceiverPair). + /// Receive the replies for this query with a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ``` @@ -700,7 +700,7 @@ impl<'a, 'b> LivelinessGetBuilder<'a, 'b, DefaultHandler> { #[inline] pub fn with(self, handler: Handler) -> LivelinessGetBuilder<'a, 'b, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply>, + Handler: IntoHandler<'static, Reply>, { let LivelinessGetBuilder { session, @@ -728,19 +728,19 @@ impl<'a, 'b, Handler> LivelinessGetBuilder<'a, 'b, Handler> { impl Resolvable for LivelinessGetBuilder<'_, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Reply> + Send, + Handler::Handler: Send, { - type To = ZResult; + type To = ZResult; } impl SyncResolve for LivelinessGetBuilder<'_, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Reply> + Send, + Handler::Handler: Send, { fn res_sync(self) -> ::To { - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); self.session .query( @@ -761,8 +761,8 @@ where impl AsyncResolve for LivelinessGetBuilder<'_, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Reply> + Send, + Handler::Handler: Send, { type Future = Ready; diff --git a/zenoh/src/prelude.rs b/zenoh/src/prelude.rs index 177906e9b1..26c93e1801 100644 --- a/zenoh/src/prelude.rs +++ b/zenoh/src/prelude.rs @@ -37,11 +37,11 @@ pub(crate) mod common { pub use zenoh_protocol::core::{EntityGlobalId, EntityId}; pub use crate::config::{self, Config, ValidatedMap}; - pub use crate::handlers::IntoCallbackReceiverPair; + pub use crate::handlers::IntoHandler; + pub use crate::selector::{Parameter, Parameters, Selector}; pub use crate::session::{Session, SessionDeclarations}; pub use crate::query::{ConsolidationMode, QueryConsolidation, QueryTarget}; - pub use crate::selector::{Parameter, Parameters, Selector}; pub use crate::encoding::Encoding; /// The encoding of a zenoh `Value`. diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 1531cab606..392c0bf8c1 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -22,7 +22,7 @@ use crate::SessionRef; use crate::Undeclarable; #[cfg(feature = "unstable")] use crate::{ - handlers::{Callback, DefaultHandler, IntoCallbackReceiverPair}, + handlers::{Callback, DefaultHandler, IntoHandler}, Id, }; use std::future::Ready; @@ -1180,7 +1180,7 @@ impl<'a> MatchingListenerBuilder<'a, DefaultHandler> { self.callback(crate::handlers::locked(callback)) } - /// Receive the MatchingStatuses for this listener with a [`Handler`](crate::prelude::IntoCallbackReceiverPair). + /// Receive the MatchingStatuses for this listener with a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ```no_run @@ -1208,7 +1208,7 @@ impl<'a> MatchingListenerBuilder<'a, DefaultHandler> { #[zenoh_macros::unstable] pub fn with(self, handler: Handler) -> MatchingListenerBuilder<'a, Handler> where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, MatchingStatus>, + Handler: crate::prelude::IntoHandler<'static, MatchingStatus>, { let MatchingListenerBuilder { publisher, @@ -1221,21 +1221,21 @@ impl<'a> MatchingListenerBuilder<'a, DefaultHandler> { #[zenoh_macros::unstable] impl<'a, Handler> Resolvable for MatchingListenerBuilder<'a, Handler> where - Handler: IntoCallbackReceiverPair<'static, MatchingStatus> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, MatchingStatus> + Send, + Handler::Handler: Send, { - type To = ZResult>; + type To = ZResult>; } #[zenoh_macros::unstable] impl<'a, Handler> SyncResolve for MatchingListenerBuilder<'a, Handler> where - Handler: IntoCallbackReceiverPair<'static, MatchingStatus> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, MatchingStatus> + Send, + Handler::Handler: Send, { #[zenoh_macros::unstable] fn res_sync(self) -> ::To { - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); self.publisher .session .declare_matches_listener_inner(&self.publisher, callback) @@ -1253,8 +1253,8 @@ where #[zenoh_macros::unstable] impl<'a, Handler> AsyncResolve for MatchingListenerBuilder<'a, Handler> where - Handler: IntoCallbackReceiverPair<'static, MatchingStatus> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, MatchingStatus> + Send, + Handler::Handler: Send, { type Future = Ready; diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index a848913c7a..fe48748ad4 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -209,7 +209,7 @@ impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { self.callback(locked(callback)) } - /// Receive the replies for this query with a [`Handler`](crate::prelude::IntoCallbackReceiverPair). + /// Receive the replies for this query with a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ``` @@ -231,7 +231,7 @@ impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { #[inline] pub fn with(self, handler: Handler) -> GetBuilder<'a, 'b, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply>, + Handler: IntoHandler<'static, Reply>, { let GetBuilder { session, @@ -362,19 +362,19 @@ impl Default for ReplyKeyExpr { impl Resolvable for GetBuilder<'_, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Reply> + Send, + Handler::Handler: Send, { - type To = ZResult; + type To = ZResult; } impl SyncResolve for GetBuilder<'_, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Reply> + Send, + Handler::Handler: Send, { fn res_sync(self) -> ::To { - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); self.session .query( @@ -395,8 +395,8 @@ where impl AsyncResolve for GetBuilder<'_, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Reply> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Reply> + Send, + Handler::Handler: Send, { type Future = Ready; diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index d98df046b7..d2eabcdc2a 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -599,7 +599,7 @@ impl<'a, 'b> QueryableBuilder<'a, 'b, DefaultHandler> { self.callback(locked(callback)) } - /// Receive the queries for this Queryable with a [`Handler`](crate::prelude::IntoCallbackReceiverPair). + /// Receive the queries for this Queryable with a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ```no_run @@ -621,7 +621,7 @@ impl<'a, 'b> QueryableBuilder<'a, 'b, DefaultHandler> { #[inline] pub fn with(self, handler: Handler) -> QueryableBuilder<'a, 'b, Handler> where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, Query>, + Handler: crate::prelude::IntoHandler<'static, Query>, { let QueryableBuilder { session, @@ -657,7 +657,7 @@ impl<'a, 'b, Handler> QueryableBuilder<'a, 'b, Handler> { } } -/// A queryable that provides data through a [`Handler`](crate::prelude::IntoCallbackReceiverPair). +/// A queryable that provides data through a [`Handler`](crate::prelude::IntoHandler). /// /// Queryables can be created from a zenoh [`Session`] /// with the [`declare_queryable`](crate::Session::declare_queryable) function @@ -740,20 +740,20 @@ impl Deref for Queryable<'_, Receiver> { impl<'a, Handler> Resolvable for QueryableBuilder<'a, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Query> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Query> + Send, + Handler::Handler: Send, { - type To = ZResult>; + type To = ZResult>; } impl<'a, Handler> SyncResolve for QueryableBuilder<'a, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Query> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Query> + Send, + Handler::Handler: Send, { fn res_sync(self) -> ::To { let session = self.session; - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); session .declare_queryable_inner( &self.key_expr?.to_wire(&session), @@ -774,8 +774,8 @@ where impl<'a, Handler> AsyncResolve for QueryableBuilder<'a, '_, Handler> where - Handler: IntoCallbackReceiverPair<'static, Query> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Query> + Send, + Handler::Handler: Send, { type Future = Ready; diff --git a/zenoh/src/scouting.rs b/zenoh/src/scouting.rs index ea09823ea1..470e2f1c61 100644 --- a/zenoh/src/scouting.rs +++ b/zenoh/src/scouting.rs @@ -115,7 +115,7 @@ impl ScoutBuilder { self.callback(locked(callback)) } - /// Receive the [`Hello`] messages from this scout with a [`Handler`](crate::prelude::IntoCallbackReceiverPair). + /// Receive the [`Hello`] messages from this scout with a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ```no_run @@ -136,7 +136,7 @@ impl ScoutBuilder { #[inline] pub fn with(self, handler: Handler) -> ScoutBuilder where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, Hello>, + Handler: crate::prelude::IntoHandler<'static, Hello>, { let ScoutBuilder { what, @@ -153,27 +153,27 @@ impl ScoutBuilder { impl Resolvable for ScoutBuilder where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, Hello> + Send, - Handler::Receiver: Send, + Handler: crate::prelude::IntoHandler<'static, Hello> + Send, + Handler::Handler: Send, { - type To = ZResult>; + type To = ZResult>; } impl SyncResolve for ScoutBuilder where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, Hello> + Send, - Handler::Receiver: Send, + Handler: crate::prelude::IntoHandler<'static, Hello> + Send, + Handler::Handler: Send, { fn res_sync(self) -> ::To { - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); scout(self.what, self.config?, callback).map(|scout| Scout { scout, receiver }) } } impl AsyncResolve for ScoutBuilder where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, Hello> + Send, - Handler::Receiver: Send, + Handler: crate::prelude::IntoHandler<'static, Hello> + Send, + Handler::Handler: Send, { type Future = Ready; @@ -231,7 +231,7 @@ impl fmt::Debug for ScoutInner { } } -/// A scout that returns [`Hello`] messages through a [`Handler`](crate::prelude::IntoCallbackReceiverPair). +/// A scout that returns [`Hello`] messages through a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ```no_run diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index d4c3257472..413c9201f2 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -13,7 +13,7 @@ // //! Subscribing primitives. -use crate::handlers::{locked, Callback, DefaultHandler, IntoCallbackReceiverPair}; +use crate::handlers::{locked, Callback, DefaultHandler, IntoHandler}; use crate::key_expr::KeyExpr; use crate::prelude::Locality; use crate::sample::Sample; @@ -392,7 +392,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { self.callback(locked(callback)) } - /// Receive the samples for this subscription with a [`Handler`](crate::prelude::IntoCallbackReceiverPair). + /// Receive the samples for this subscription with a [`Handler`](crate::prelude::IntoHandler). /// /// # Examples /// ```no_run @@ -414,7 +414,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { #[inline] pub fn with(self, handler: Handler) -> SubscriberBuilder<'a, 'b, Mode, Handler> where - Handler: crate::prelude::IntoCallbackReceiverPair<'static, Sample>, + Handler: crate::prelude::IntoHandler<'static, Sample>, { let SubscriberBuilder { session, @@ -511,21 +511,21 @@ impl<'a, 'b, Mode, Handler> SubscriberBuilder<'a, 'b, Mode, Handler> { // Push mode impl<'a, Handler> Resolvable for SubscriberBuilder<'a, '_, PushMode, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { - type To = ZResult>; + type To = ZResult>; } impl<'a, Handler> SyncResolve for SubscriberBuilder<'a, '_, PushMode, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { fn res_sync(self) -> ::To { let key_expr = self.key_expr?; let session = self.session; - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); session .declare_subscriber_inner( &key_expr, @@ -550,8 +550,8 @@ where impl<'a, Handler> AsyncResolve for SubscriberBuilder<'a, '_, PushMode, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { type Future = Ready; @@ -563,21 +563,21 @@ where // Pull mode impl<'a, Handler> Resolvable for SubscriberBuilder<'a, '_, PullMode, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { - type To = ZResult>; + type To = ZResult>; } impl<'a, Handler> SyncResolve for SubscriberBuilder<'a, '_, PullMode, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { fn res_sync(self) -> ::To { let key_expr = self.key_expr?; let session = self.session; - let (callback, receiver) = self.handler.into_cb_receiver_pair(); + let (callback, receiver) = self.handler.into_handler(); session .declare_subscriber_inner( &key_expr, @@ -604,8 +604,8 @@ where impl<'a, Handler> AsyncResolve for SubscriberBuilder<'a, '_, PullMode, Handler> where - Handler: IntoCallbackReceiverPair<'static, Sample> + Send, - Handler::Receiver: Send, + Handler: IntoHandler<'static, Sample> + Send, + Handler::Handler: Send, { type Future = Ready; @@ -614,7 +614,7 @@ where } } -/// A subscriber that provides data through a [`Handler`](crate::prelude::IntoCallbackReceiverPair). +/// A subscriber that provides data through a [`Handler`](crate::prelude::IntoHandler). /// /// Subscribers can be created from a zenoh [`Session`](crate::Session) /// with the [`declare_subscriber`](crate::SessionDeclarations::declare_subscriber) function @@ -647,7 +647,7 @@ pub struct Subscriber<'a, Receiver> { pub receiver: Receiver, } -/// A [`PullMode`] subscriber that provides data through a [`Handler`](crate::prelude::IntoCallbackReceiverPair). +/// A [`PullMode`] subscriber that provides data through a [`Handler`](crate::prelude::IntoHandler). /// /// PullSubscribers only provide data when explicitely pulled by the /// application with the [`pull`](PullSubscriber::pull) function. From 665c90f0b326b125658267b2e50c4a6b43b3a42a Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Mon, 18 Mar 2024 18:10:32 +0100 Subject: [PATCH 022/124] Optimize zint encode/decode (#838) * Rebase on protocol_changes * Fix rebase conflict --- commons/zenoh-codec/src/core/zint.rs | 52 ++++++++++++++++------------ commons/zenoh-codec/tests/codec.rs | 21 ++++++++++- 2 files changed, 50 insertions(+), 23 deletions(-) diff --git a/commons/zenoh-codec/src/core/zint.rs b/commons/zenoh-codec/src/core/zint.rs index 1c2f5a28e4..0daff7348b 100644 --- a/commons/zenoh-codec/src/core/zint.rs +++ b/commons/zenoh-codec/src/core/zint.rs @@ -17,7 +17,7 @@ use zenoh_buffers::{ writer::{DidntWrite, Writer}, }; -const VLE_LEN: usize = 10; +const VLE_LEN: usize = 9; impl LCodec for Zenoh080 { fn w_len(self, x: u64) -> usize { @@ -29,7 +29,6 @@ impl LCodec for Zenoh080 { const B6: u64 = u64::MAX << (7 * 6); const B7: u64 = u64::MAX << (7 * 7); const B8: u64 = u64::MAX << (7 * 8); - const B9: u64 = u64::MAX << (7 * 9); if (x & B1) == 0 { 1 @@ -47,10 +46,8 @@ impl LCodec for Zenoh080 { 7 } else if (x & B8) == 0 { 8 - } else if (x & B9) == 0 { - 9 } else { - 10 + 9 } } } @@ -112,15 +109,31 @@ where fn write(self, writer: &mut W, mut x: u64) -> Self::Output { writer.with_slot(VLE_LEN, move |buffer| { let mut len = 0; - let mut b = x as u8; - while x > 0x7f { - buffer[len] = b | 0x80; + while (x & !0x7f_u64) != 0 { + // SAFETY: buffer is guaranteed to be VLE_LEN long where VLE_LEN is + // the maximum number of bytes a VLE can take once encoded. + // I.e.: x is shifted 7 bits to the right every iteration, + // the loop is at most VLE_LEN iterations. + unsafe { + *buffer.get_unchecked_mut(len) = (x as u8) | 0x80_u8; + } len += 1; x >>= 7; - b = x as u8; } - buffer[len] = b; - len + 1 + // In case len == VLE_LEN then all the bits have already been written in the latest iteration. + // Else we haven't written all the necessary bytes yet. + if len != VLE_LEN { + // SAFETY: buffer is guaranteed to be VLE_LEN long where VLE_LEN is + // the maximum number of bytes a VLE can take once encoded. + // I.e.: x is shifted 7 bits to the right every iteration, + // the loop is at most VLE_LEN iterations. + unsafe { + *buffer.get_unchecked_mut(len) = x as u8; + } + len += 1; + } + // The number of written bytes + len })?; Ok(()) } @@ -137,19 +150,14 @@ where let mut v = 0; let mut i = 0; - let mut k = VLE_LEN; - while b > 0x7f && k > 0 { - v |= ((b & 0x7f) as u64) << i; - i += 7; + // 7 * VLE_LEN is beyond the maximum number of shift bits + while (b & 0x80_u8) != 0 && i != 7 * (VLE_LEN - 1) { + v |= ((b & 0x7f_u8) as u64) << i; b = reader.read_u8()?; - k -= 1; - } - if k > 0 { - v |= ((b & 0x7f) as u64) << i; - Ok(v) - } else { - Err(DidntRead) + i += 7; } + v |= (b as u64) << i; + Ok(v) } } diff --git a/commons/zenoh-codec/tests/codec.rs b/commons/zenoh-codec/tests/codec.rs index 7f23214b49..3bca8b7489 100644 --- a/commons/zenoh-codec/tests/codec.rs +++ b/commons/zenoh-codec/tests/codec.rs @@ -121,10 +121,28 @@ macro_rules! run { // Core #[test] fn codec_zint() { + run!(u8, { u8::MIN }); + run!(u8, { u8::MAX }); run!(u8, { thread_rng().gen::() }); + + run!(u16, { u16::MIN }); + run!(u16, { u16::MAX }); run!(u16, { thread_rng().gen::() }); + + run!(u32, { u32::MIN }); + run!(u32, { u32::MAX }); run!(u32, { thread_rng().gen::() }); + + run!(u64, { u64::MIN }); + run!(u64, { u64::MAX }); + let codec = Zenoh080::new(); + for i in 1..=codec.w_len(u64::MAX) { + run!(u64, { 1 << (7 * i) }); + } run!(u64, { thread_rng().gen::() }); + + run!(usize, { usize::MIN }); + run!(usize, { usize::MAX }); run!(usize, thread_rng().gen::()); } @@ -138,11 +156,12 @@ fn codec_zint_len() { codec.write(&mut writer, n).unwrap(); assert_eq!(codec.w_len(n), buff.len()); - for i in 1..=9 { + for i in 1..=codec.w_len(u64::MAX) { let mut buff = vec![]; let mut writer = buff.writer(); let n: u64 = 1 << (7 * i); codec.write(&mut writer, n).unwrap(); + println!("ZInt len: {} {:02x?}", n, buff); assert_eq!(codec.w_len(n), buff.len()); } From 7300f4c8fe1c1fd89f1109d5091a642c3c51c298 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 19 Mar 2024 14:35:17 +0100 Subject: [PATCH 023/124] Remove pull API and protocol support (#821) * Remove Pull subscriber * Fix doctest. Remove unused code. * Remove routing code for pull subscriptions * Remove pull mode from DeclareSubscriber * Remove unsupported Put/Del in Request/Response (#839) * Address review comments --------- Co-authored-by: OlivierHecart --- Cargo.lock | 1 + commons/zenoh-codec/src/zenoh/mod.rs | 9 - commons/zenoh-codec/src/zenoh/pull.rs | 93 ----- commons/zenoh-codec/tests/codec.rs | 5 - commons/zenoh-collections/src/ring_buffer.rs | 9 + commons/zenoh-protocol/src/network/declare.rs | 48 +-- commons/zenoh-protocol/src/zenoh/mod.rs | 27 +- commons/zenoh-protocol/src/zenoh/pull.rs | 56 --- examples/Cargo.toml | 1 + examples/examples/z_pull.rs | 72 ++-- io/zenoh-transport/src/shm.rs | 8 +- zenoh-ext/src/subscriber_ext.rs | 6 +- zenoh/src/liveliness.rs | 1 - zenoh/src/net/routing/dispatcher/face.rs | 6 - zenoh/src/net/routing/dispatcher/pubsub.rs | 263 +++----------- zenoh/src/net/routing/dispatcher/queries.rs | 16 - zenoh/src/net/routing/dispatcher/resource.rs | 15 - zenoh/src/net/routing/dispatcher/tables.rs | 2 - zenoh/src/net/routing/hat/client/mod.rs | 15 +- zenoh/src/net/routing/hat/client/pubsub.rs | 34 +- zenoh/src/net/routing/hat/client/queries.rs | 3 +- .../src/net/routing/hat/linkstate_peer/mod.rs | 15 +- .../net/routing/hat/linkstate_peer/pubsub.rs | 35 +- .../net/routing/hat/linkstate_peer/queries.rs | 1 - zenoh/src/net/routing/hat/p2p_peer/mod.rs | 15 +- zenoh/src/net/routing/hat/p2p_peer/pubsub.rs | 34 +- zenoh/src/net/routing/hat/p2p_peer/queries.rs | 3 +- zenoh/src/net/routing/hat/router/mod.rs | 15 +- zenoh/src/net/routing/hat/router/pubsub.rs | 37 +- zenoh/src/net/routing/hat/router/queries.rs | 1 - zenoh/src/net/runtime/adminspace.rs | 96 ++--- zenoh/src/net/tests/tables.rs | 5 - zenoh/src/session.rs | 43 +-- zenoh/src/subscriber.rs | 331 +----------------- 34 files changed, 242 insertions(+), 1079 deletions(-) delete mode 100644 commons/zenoh-codec/src/zenoh/pull.rs delete mode 100644 commons/zenoh-protocol/src/zenoh/pull.rs diff --git a/Cargo.lock b/Cargo.lock index 53f2600071..fa55ca4acd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4534,6 +4534,7 @@ dependencies = [ "rand 0.8.5", "rustc_version 0.4.0", "zenoh", + "zenoh-collections", "zenoh-ext", ] diff --git a/commons/zenoh-codec/src/zenoh/mod.rs b/commons/zenoh-codec/src/zenoh/mod.rs index 0d7146dc90..dc38e5ee84 100644 --- a/commons/zenoh-codec/src/zenoh/mod.rs +++ b/commons/zenoh-codec/src/zenoh/mod.rs @@ -13,7 +13,6 @@ // pub mod del; pub mod err; -pub mod pull; pub mod put; pub mod query; pub mod reply; @@ -81,9 +80,6 @@ where fn write(self, writer: &mut W, x: &RequestBody) -> Self::Output { match x { RequestBody::Query(b) => self.write(&mut *writer, b), - RequestBody::Put(b) => self.write(&mut *writer, b), - RequestBody::Del(b) => self.write(&mut *writer, b), - RequestBody::Pull(b) => self.write(&mut *writer, b), } } } @@ -100,9 +96,6 @@ where let codec = Zenoh080Header::new(header); let body = match imsg::mid(codec.header) { id::QUERY => RequestBody::Query(codec.read(&mut *reader)?), - id::PUT => RequestBody::Put(codec.read(&mut *reader)?), - id::DEL => RequestBody::Del(codec.read(&mut *reader)?), - id::PULL => RequestBody::Pull(codec.read(&mut *reader)?), _ => return Err(DidntRead), }; @@ -121,7 +114,6 @@ where match x { ResponseBody::Reply(b) => self.write(&mut *writer, b), ResponseBody::Err(b) => self.write(&mut *writer, b), - ResponseBody::Put(b) => self.write(&mut *writer, b), } } } @@ -139,7 +131,6 @@ where let body = match imsg::mid(codec.header) { id::REPLY => ResponseBody::Reply(codec.read(&mut *reader)?), id::ERR => ResponseBody::Err(codec.read(&mut *reader)?), - id::PUT => ResponseBody::Put(codec.read(&mut *reader)?), _ => return Err(DidntRead), }; diff --git a/commons/zenoh-codec/src/zenoh/pull.rs b/commons/zenoh-codec/src/zenoh/pull.rs deleted file mode 100644 index dc71901d58..0000000000 --- a/commons/zenoh-codec/src/zenoh/pull.rs +++ /dev/null @@ -1,93 +0,0 @@ -// -// Copyright (c) 2022 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// -use crate::{common::extension, RCodec, WCodec, Zenoh080, Zenoh080Header}; -use alloc::vec::Vec; -use zenoh_buffers::{ - reader::{DidntRead, Reader}, - writer::{DidntWrite, Writer}, -}; - -use zenoh_protocol::{ - common::imsg, - zenoh::{ - id, - pull::{flag, Pull}, - }, -}; - -impl WCodec<&Pull, &mut W> for Zenoh080 -where - W: Writer, -{ - type Output = Result<(), DidntWrite>; - - fn write(self, writer: &mut W, x: &Pull) -> Self::Output { - let Pull { ext_unknown } = x; - - // Header - let mut header = id::PULL; - let mut n_exts = ext_unknown.len() as u8; - if n_exts != 0 { - header |= flag::Z; - } - self.write(&mut *writer, header)?; - - // Extensions - for u in ext_unknown.iter() { - n_exts -= 1; - self.write(&mut *writer, (u, n_exts != 0))?; - } - - Ok(()) - } -} - -impl RCodec for Zenoh080 -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - let header: u8 = self.read(&mut *reader)?; - let codec = Zenoh080Header::new(header); - codec.read(reader) - } -} - -impl RCodec for Zenoh080Header -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - if imsg::mid(self.header) != id::PULL { - return Err(DidntRead); - } - - // Extensions - let mut ext_unknown = Vec::new(); - - let mut has_ext = imsg::has_flag(self.header, flag::Z); - while has_ext { - let ext: u8 = self.codec.read(&mut *reader)?; - let (u, ext) = extension::read(reader, "Pull", ext)?; - ext_unknown.push(u); - has_ext = ext; - } - - Ok(Pull { ext_unknown }) - } -} diff --git a/commons/zenoh-codec/tests/codec.rs b/commons/zenoh-codec/tests/codec.rs index 3bca8b7489..2f0e870c4f 100644 --- a/commons/zenoh-codec/tests/codec.rs +++ b/commons/zenoh-codec/tests/codec.rs @@ -600,8 +600,3 @@ fn codec_reply() { fn codec_err() { run!(zenoh::Err, zenoh::Err::rand()); } - -#[test] -fn codec_pull() { - run!(zenoh::Pull, zenoh::Pull::rand()); -} diff --git a/commons/zenoh-collections/src/ring_buffer.rs b/commons/zenoh-collections/src/ring_buffer.rs index fd60030ebc..e9f7909d5f 100644 --- a/commons/zenoh-collections/src/ring_buffer.rs +++ b/commons/zenoh-collections/src/ring_buffer.rs @@ -40,6 +40,15 @@ impl RingBuffer { Some(elem) } + #[inline] + pub fn push_force(&mut self, elem: T) -> Option { + self.push(elem).and_then(|elem| { + let ret = self.buffer.pop_front(); + self.buffer.push_back(elem); + ret + }) + } + #[inline] pub fn pull(&mut self) -> Option { let x = self.buffer.pop_front(); diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 2dd8de4ef8..187fa87662 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -146,31 +146,6 @@ impl Declare { } } -#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] -#[repr(u8)] -pub enum Mode { - #[default] - Push, - Pull, -} - -impl Mode { - pub const DEFAULT: Self = Self::Push; - - #[cfg(feature = "test")] - fn rand() -> Self { - use rand::Rng; - - let mut rng = rand::thread_rng(); - - if rng.gen_bool(0.5) { - Mode::Push - } else { - Mode::Pull - } - } -} - pub mod common { use super::*; @@ -320,9 +295,7 @@ pub mod subscriber { /// ~ [decl_exts] ~ if Z==1 /// +---------------+ /// - /// - if R==1 then the subscription is reliable, else it is best effort - /// - if P==1 then the subscription is pull, else it is push - /// + /// - if R==1 then the subscription is reliable, else it is best effort /// /// ``` #[derive(Debug, Clone, PartialEq, Eq)] pub struct DeclareSubscriber { @@ -343,34 +316,29 @@ pub mod subscriber { /// +-+-+-+-+-+-+-+-+ /// |Z|0_1| ID | /// +-+-+-+---------+ - /// % reserved |P|R% + /// % reserved |R% /// +---------------+ /// /// - if R==1 then the subscription is reliable, else it is best effort - /// - if P==1 then the subscription is pull, else it is push /// - rsv: Reserved /// ``` #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct SubscriberInfo { pub reliability: Reliability, - pub mode: Mode, } impl SubscriberInfo { pub const R: u64 = 1; - pub const P: u64 = 1 << 1; pub const DEFAULT: Self = Self { reliability: Reliability::DEFAULT, - mode: Mode::DEFAULT, }; #[cfg(feature = "test")] pub fn rand() -> Self { let reliability = Reliability::rand(); - let mode = Mode::rand(); - Self { reliability, mode } + Self { reliability } } } @@ -387,12 +355,7 @@ pub mod subscriber { } else { Reliability::BestEffort }; - let mode = if imsg::has_option(ext.value, SubscriberInfo::P) { - Mode::Pull - } else { - Mode::Push - }; - Self { reliability, mode } + Self { reliability } } } @@ -402,9 +365,6 @@ pub mod subscriber { if ext.reliability == Reliability::Reliable { v |= SubscriberInfo::R; } - if ext.mode == Mode::Pull { - v |= SubscriberInfo::P; - } Info::new(v) } } diff --git a/commons/zenoh-protocol/src/zenoh/mod.rs b/commons/zenoh-protocol/src/zenoh/mod.rs index 3e5d573c43..7bca48f3ba 100644 --- a/commons/zenoh-protocol/src/zenoh/mod.rs +++ b/commons/zenoh-protocol/src/zenoh/mod.rs @@ -13,7 +13,6 @@ // pub mod del; pub mod err; -pub mod pull; pub mod put; pub mod query; pub mod reply; @@ -21,7 +20,6 @@ pub mod reply; use crate::core::Encoding; pub use del::Del; pub use err::Err; -pub use pull::Pull; pub use put::Put; pub use query::{Consolidation, Query}; pub use reply::Reply; @@ -33,7 +31,6 @@ pub mod id { pub const QUERY: u8 = 0x03; pub const REPLY: u8 = 0x04; pub const ERR: u8 = 0x05; - pub const PULL: u8 = 0x06; } // DataInfo @@ -80,9 +77,6 @@ impl From for PushBody { #[derive(Debug, Clone, PartialEq, Eq)] pub enum RequestBody { Query(Query), - Put(Put), - Del(Del), - Pull(Pull), } impl RequestBody { @@ -92,11 +86,8 @@ impl RequestBody { let mut rng = rand::thread_rng(); - match rng.gen_range(0..4) { + match rng.gen_range(0..1) { 0 => RequestBody::Query(Query::rand()), - 1 => RequestBody::Put(Put::rand()), - 2 => RequestBody::Del(Del::rand()), - 3 => RequestBody::Pull(Pull::rand()), _ => unreachable!(), } } @@ -108,24 +99,11 @@ impl From for RequestBody { } } -impl From for RequestBody { - fn from(p: Put) -> RequestBody { - RequestBody::Put(p) - } -} - -impl From for RequestBody { - fn from(d: Del) -> RequestBody { - RequestBody::Del(d) - } -} - // Response #[derive(Debug, Clone, PartialEq, Eq)] pub enum ResponseBody { Reply(Reply), Err(Err), - Put(Put), } impl ResponseBody { @@ -134,10 +112,9 @@ impl ResponseBody { use rand::Rng; let mut rng = rand::thread_rng(); - match rng.gen_range(0..3) { + match rng.gen_range(0..2) { 0 => ResponseBody::Reply(Reply::rand()), 1 => ResponseBody::Err(Err::rand()), - 2 => ResponseBody::Put(Put::rand()), _ => unreachable!(), } } diff --git a/commons/zenoh-protocol/src/zenoh/pull.rs b/commons/zenoh-protocol/src/zenoh/pull.rs deleted file mode 100644 index eb4f7eb55e..0000000000 --- a/commons/zenoh-protocol/src/zenoh/pull.rs +++ /dev/null @@ -1,56 +0,0 @@ -// -// Copyright (c) 2022 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// -use crate::common::ZExtUnknown; -use alloc::vec::Vec; - -/// # Pull message -/// -/// ```text -/// Flags: -/// - X: Reserved -/// - X: Reserved -/// - Z: Extension If Z==1 then at least one extension is present -/// -/// 7 6 5 4 3 2 1 0 -/// +-+-+-+-+-+-+-+-+ -/// |Z|X|X| PULL | -/// +-+-+-+---------+ -/// ~ [pull_exts] ~ if Z==1 -/// +---------------+ -/// ``` -pub mod flag { - // pub const X: u8 = 1 << 5; // 0x20 Reserved - // pub const X: u8 = 1 << 6; // 0x40 Reserved - pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Pull { - pub ext_unknown: Vec, -} - -impl Pull { - #[cfg(feature = "test")] - pub fn rand() -> Self { - use rand::Rng; - let mut rng = rand::thread_rng(); - - let mut ext_unknown = Vec::new(); - for _ in 0..rng.gen_range(0..4) { - ext_unknown.push(ZExtUnknown::rand2(1, false)); - } - - Self { ext_unknown } - } -} diff --git a/examples/Cargo.toml b/examples/Cargo.toml index 190894fb18..b827ed2e7f 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -50,6 +50,7 @@ git-version = { workspace = true } json5 = { workspace = true } log = { workspace = true } zenoh = { workspace = true } +zenoh-collections = { workspace = true } zenoh-ext = { workspace = true } [dev-dependencies] diff --git a/examples/examples/z_pull.rs b/examples/examples/z_pull.rs index 910d7614cf..d2c9a5380b 100644 --- a/examples/examples/z_pull.rs +++ b/examples/examples/z_pull.rs @@ -13,9 +13,12 @@ // use async_std::task::sleep; use clap::Parser; -use std::time::Duration; -use zenoh::config::Config; -use zenoh::prelude::r#async::*; +use std::{ + sync::{Arc, Mutex}, + time::Duration, +}; +use zenoh::{config::Config, prelude::r#async::*}; +use zenoh_collections::RingBuffer; use zenoh_examples::CommonArgs; #[async_std::main] @@ -23,50 +26,67 @@ async fn main() { // initiate logging env_logger::init(); - let (config, key_expr) = parse_args(); + let (config, key_expr, cache, interval) = parse_args(); println!("Opening session..."); let session = zenoh::open(config).res().await.unwrap(); - println!("Declaring Subscriber on '{key_expr}'..."); + println!("Creating a local queue keeping the last {cache} elements..."); + let arb = Arc::new(Mutex::new(RingBuffer::new(cache))); + let arb_c = arb.clone(); - let subscriber = session + println!("Declaring Subscriber on '{key_expr}'..."); + let _subscriber = session .declare_subscriber(&key_expr) - .pull_mode() - .callback(|sample| { - let payload = sample - .payload() - .deserialize::() - .unwrap_or_else(|e| format!("{}", e)); - println!( - ">> [Subscriber] Received {} ('{}': '{}')", - sample.kind(), - sample.key_expr().as_str(), - payload, - ); + .callback(move |sample| { + arb_c.lock().unwrap().push_force(sample); }) .res() .await .unwrap(); - println!("Press CTRL-C to quit..."); - for idx in 0..u32::MAX { - sleep(Duration::from_secs(1)).await; - println!("[{idx:4}] Pulling..."); - subscriber.pull().res().await.unwrap(); + println!("Pulling data every {:#?} seconds", interval); + loop { + let mut res = arb.lock().unwrap().pull(); + print!(">> [Subscriber] Pulling "); + match res.take() { + Some(sample) => { + let payload = sample + .payload() + .deserialize::() + .unwrap_or_else(|e| format!("{}", e)); + println!( + "{} ('{}': '{}')", + sample.kind(), + sample.key_expr().as_str(), + payload, + ); + } + None => { + println!("nothing... sleep for {:#?}", interval); + sleep(interval).await; + } + } } } -#[derive(clap::Parser, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(clap::Parser, Clone, PartialEq, Debug)] struct SubArgs { #[arg(short, long, default_value = "demo/example/**")] /// The Key Expression to subscribe to. key: KeyExpr<'static>, + /// The size of the cache. + #[arg(long, default_value = "3")] + cache: usize, + /// The interval for pulling the cache. + #[arg(long, default_value = "5.0")] + interval: f32, #[command(flatten)] common: CommonArgs, } -fn parse_args() -> (Config, KeyExpr<'static>) { +fn parse_args() -> (Config, KeyExpr<'static>, usize, Duration) { let args = SubArgs::parse(); - (args.common.into(), args.key) + let interval = Duration::from_secs_f32(args.interval); + (args.common.into(), args.key, args.cache, interval) } diff --git a/io/zenoh-transport/src/shm.rs b/io/zenoh-transport/src/shm.rs index 31910f51ae..0dd6662286 100644 --- a/io/zenoh-transport/src/shm.rs +++ b/io/zenoh-transport/src/shm.rs @@ -140,12 +140,9 @@ pub fn map_zmsg_to_shminfo(msg: &mut NetworkMessage) -> ZResult { }, NetworkBody::Request(Request { payload, .. }) => match payload { RequestBody::Query(b) => b.map_to_shminfo(), - RequestBody::Put(b) => b.map_to_shminfo(), - RequestBody::Del(_) | RequestBody::Pull(_) => Ok(false), }, NetworkBody::Response(Response { payload, .. }) => match payload { ResponseBody::Reply(b) => b.map_to_shminfo(), - ResponseBody::Put(b) => b.map_to_shminfo(), ResponseBody::Err(b) => b.map_to_shminfo(), }, NetworkBody::ResponseFinal(_) | NetworkBody::Declare(_) | NetworkBody::OAM(_) => Ok(false), @@ -194,13 +191,10 @@ pub fn map_zmsg_to_shmbuf( }, NetworkBody::Request(Request { payload, .. }) => match payload { RequestBody::Query(b) => b.map_to_shmbuf(shmr), - RequestBody::Put(b) => b.map_to_shmbuf(shmr), - RequestBody::Del(_) | RequestBody::Pull(_) => Ok(false), }, NetworkBody::Response(Response { payload, .. }) => match payload { - ResponseBody::Put(b) => b.map_to_shmbuf(shmr), - ResponseBody::Err(b) => b.map_to_shmbuf(shmr), ResponseBody::Reply(b) => b.map_to_shmbuf(shmr), + ResponseBody::Err(b) => b.map_to_shmbuf(shmr), }, NetworkBody::ResponseFinal(_) | NetworkBody::Declare(_) | NetworkBody::OAM(_) => Ok(false), } diff --git a/zenoh-ext/src/subscriber_ext.rs b/zenoh-ext/src/subscriber_ext.rs index 89d3b5f691..6ac796efb1 100644 --- a/zenoh-ext/src/subscriber_ext.rs +++ b/zenoh-ext/src/subscriber_ext.rs @@ -21,7 +21,7 @@ use zenoh::{ liveliness::LivelinessSubscriberBuilder, prelude::Sample, query::{QueryConsolidation, QueryTarget}, - subscriber::{PushMode, Reliability, Subscriber, SubscriberBuilder}, + subscriber::{Reliability, Subscriber, SubscriberBuilder}, }; use crate::ExtractSample; @@ -122,9 +122,7 @@ pub trait SubscriberBuilderExt<'a, 'b, Handler> { fn querying(self) -> QueryingSubscriberBuilder<'a, 'b, Self::KeySpace, Handler>; } -impl<'a, 'b, Handler> SubscriberBuilderExt<'a, 'b, Handler> - for SubscriberBuilder<'a, 'b, PushMode, Handler> -{ +impl<'a, 'b, Handler> SubscriberBuilderExt<'a, 'b, Handler> for SubscriberBuilder<'a, 'b, Handler> { type KeySpace = crate::UserSpace; /// Create a [`FetchingSubscriber`](super::FetchingSubscriber). diff --git a/zenoh/src/liveliness.rs b/zenoh/src/liveliness.rs index 4103504f13..425aa62592 100644 --- a/zenoh/src/liveliness.rs +++ b/zenoh/src/liveliness.rs @@ -398,7 +398,6 @@ impl Drop for LivelinessToken<'_> { /// let subscriber = session /// .declare_subscriber("key/expression") /// .best_effort() -/// .pull_mode() /// .res() /// .await /// .unwrap(); diff --git a/zenoh/src/net/routing/dispatcher/face.rs b/zenoh/src/net/routing/dispatcher/face.rs index 79c9da9127..cb565053c9 100644 --- a/zenoh/src/net/routing/dispatcher/face.rs +++ b/zenoh/src/net/routing/dispatcher/face.rs @@ -244,12 +244,6 @@ impl Primitives for Face { msg.ext_nodeid.node_id, ); } - RequestBody::Pull(_) => { - pull_data(&self.tables.tables, &self.state.clone(), msg.wire_expr); - } - _ => { - log::error!("{} Unsupported request!", self); - } } } diff --git a/zenoh/src/net/routing/dispatcher/pubsub.rs b/zenoh/src/net/routing/dispatcher/pubsub.rs index c0d1bb4a34..89c6c40206 100644 --- a/zenoh/src/net/routing/dispatcher/pubsub.rs +++ b/zenoh/src/net/routing/dispatcher/pubsub.rs @@ -12,17 +12,15 @@ // ZettaScale Zenoh Team, // use super::face::FaceState; -use super::resource::{DataRoutes, Direction, PullCaches, Resource}; +use super::resource::{DataRoutes, Direction, Resource}; use super::tables::{NodeId, Route, RoutingExpr, Tables, TablesLock}; use crate::net::routing::hat::HatTrait; -use std::borrow::Cow; use std::collections::HashMap; use std::sync::Arc; -use std::sync::RwLock; use zenoh_core::zread; -use zenoh_protocol::core::key_expr::{keyexpr, OwnedKeyExpr}; +use zenoh_protocol::core::key_expr::keyexpr; use zenoh_protocol::network::declare::subscriber::ext::SubscriberInfo; -use zenoh_protocol::network::declare::{Mode, SubscriberId}; +use zenoh_protocol::network::declare::SubscriberId; use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::{declare::ext, Push}, @@ -83,13 +81,10 @@ pub(crate) fn declare_subscription( drop(rtables); let wtables = zwrite!(tables.tables); - for (mut res, data_routes, matching_pulls) in matches_data_routes { + for (mut res, data_routes) in matches_data_routes { get_mut_unchecked(&mut res) .context_mut() .update_data_routes(data_routes); - get_mut_unchecked(&mut res) - .context_mut() - .update_matching_pulls(matching_pulls); } drop(wtables); } @@ -148,13 +143,10 @@ pub(crate) fn undeclare_subscription( drop(rtables); let wtables = zwrite!(tables.tables); - for (mut res, data_routes, matching_pulls) in matches_data_routes { + for (mut res, data_routes) in matches_data_routes { get_mut_unchecked(&mut res) .context_mut() .update_data_routes(data_routes); - get_mut_unchecked(&mut res) - .context_mut() - .update_matching_pulls(matching_pulls); } Resource::clean(&mut res); drop(wtables); @@ -223,7 +215,6 @@ pub(crate) fn update_data_routes(tables: &Tables, res: &mut Arc) { pub(crate) fn update_data_routes_from(tables: &mut Tables, res: &mut Arc) { update_data_routes(tables, res); - update_matching_pulls(tables, res); let res = get_mut_unchecked(res); for child in res.childs.values_mut() { update_data_routes_from(tables, child); @@ -233,22 +224,17 @@ pub(crate) fn update_data_routes_from(tables: &mut Tables, res: &mut Arc( tables: &'a Tables, res: &'a Arc, -) -> Vec<(Arc, DataRoutes, Arc)> { +) -> Vec<(Arc, DataRoutes)> { let mut routes = vec![]; if res.context.is_some() { let mut expr = RoutingExpr::new(res, ""); - routes.push(( - res.clone(), - compute_data_routes(tables, &mut expr), - compute_matching_pulls(tables, &mut expr), - )); + routes.push((res.clone(), compute_data_routes(tables, &mut expr))); for match_ in &res.context().matches { let match_ = match_.upgrade().unwrap(); if !Arc::ptr_eq(&match_, res) { let mut expr = RoutingExpr::new(&match_, ""); let match_routes = compute_data_routes(tables, &mut expr); - let matching_pulls = compute_matching_pulls(tables, &mut expr); - routes.push((match_, match_routes, matching_pulls)); + routes.push((match_, match_routes)); } } } @@ -258,12 +244,10 @@ pub(crate) fn compute_matches_data_routes<'a>( pub(crate) fn update_matches_data_routes<'a>(tables: &'a mut Tables, res: &'a mut Arc) { if res.context.is_some() { update_data_routes(tables, res); - update_matching_pulls(tables, res); for match_ in &res.context().matches { let mut match_ = match_.upgrade().unwrap(); if !Arc::ptr_eq(&match_, res) { update_data_routes(tables, &mut match_); - update_matching_pulls(tables, &mut match_); } } } @@ -278,9 +262,6 @@ pub(crate) fn disable_matches_data_routes(_tables: &mut Tables, res: &mut Arc Arc { - let mut pull_caches = PullCaches::default(); - compute_matching_pulls_(tables, &mut pull_caches, expr); - Arc::new(pull_caches) -} - -pub(crate) fn update_matching_pulls(tables: &Tables, res: &mut Arc) { - if res.context.is_some() { - let mut res_mut = res.clone(); - let res_mut = get_mut_unchecked(&mut res_mut); - if res_mut.context_mut().matching_pulls.is_none() { - res_mut.context_mut().matching_pulls = Some(Arc::new(PullCaches::default())); - } - compute_matching_pulls_( - tables, - get_mut_unchecked(res_mut.context_mut().matching_pulls.as_mut().unwrap()), - &mut RoutingExpr::new(res, ""), - ); - } -} - -#[inline] -fn get_matching_pulls( - tables: &Tables, - res: &Option>, - expr: &mut RoutingExpr, -) -> Arc { - res.as_ref() - .and_then(|res| res.context.as_ref()) - .and_then(|ctx| ctx.matching_pulls.clone()) - .unwrap_or_else(|| compute_matching_pulls(tables, expr)) -} - -macro_rules! cache_data { - ( - $matching_pulls:expr, - $expr:expr, - $payload:expr - ) => { - for context in $matching_pulls.iter() { - get_mut_unchecked(&mut context.clone()) - .last_values - .insert($expr.full_expr().to_string(), $payload.clone()); - } - }; -} - #[cfg(feature = "stats")] macro_rules! inc_stats { ( @@ -497,12 +406,10 @@ pub fn full_reentrant_route_data( let route = get_data_route(&tables, face, &res, &mut expr, routing_context); - let matching_pulls = get_matching_pulls(&tables, &res, &mut expr); - - if !(route.is_empty() && matching_pulls.is_empty()) { + if !route.is_empty() { treat_timestamp!(&tables.hlc, payload, tables.drop_future_timestamp); - if route.len() == 1 && matching_pulls.len() == 0 { + if route.len() == 1 { let (outface, key_expr, context) = route.values().next().unwrap(); if tables .hat_code @@ -524,26 +431,43 @@ pub fn full_reentrant_route_data( payload, }) } - } else { - if !matching_pulls.is_empty() { - let lock = zlock!(tables.pull_caches_lock); - cache_data!(matching_pulls, expr, payload); - drop(lock); - } + } else if tables.whatami == WhatAmI::Router { + let route = route + .values() + .filter(|(outface, _key_expr, _context)| { + tables + .hat_code + .egress_filter(&tables, face, outface, &mut expr) + }) + .cloned() + .collect::>(); - if tables.whatami == WhatAmI::Router { - let route = route - .values() - .filter(|(outface, _key_expr, _context)| { - tables - .hat_code - .egress_filter(&tables, face, outface, &mut expr) - }) - .cloned() - .collect::>(); + drop(tables); + for (outface, key_expr, context) in route { + #[cfg(feature = "stats")] + if !admin { + inc_stats!(face, tx, user, payload) + } else { + inc_stats!(face, tx, admin, payload) + } - drop(tables); - for (outface, key_expr, context) in route { + outface.primitives.send_push(Push { + wire_expr: key_expr, + ext_qos, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType { node_id: context }, + payload: payload.clone(), + }) + } + } else { + drop(tables); + for (outface, key_expr, context) in route.values() { + if face.id != outface.id + && match (face.mcast_group.as_ref(), outface.mcast_group.as_ref()) { + (Some(l), Some(r)) => l != r, + _ => true, + } + { #[cfg(feature = "stats")] if !admin { inc_stats!(face, tx, user, payload) @@ -552,41 +476,13 @@ pub fn full_reentrant_route_data( } outface.primitives.send_push(Push { - wire_expr: key_expr, + wire_expr: key_expr.into(), ext_qos, ext_tstamp: None, - ext_nodeid: ext::NodeIdType { node_id: context }, + ext_nodeid: ext::NodeIdType { node_id: *context }, payload: payload.clone(), }) } - } else { - drop(tables); - for (outface, key_expr, context) in route.values() { - if face.id != outface.id - && match ( - face.mcast_group.as_ref(), - outface.mcast_group.as_ref(), - ) { - (Some(l), Some(r)) => l != r, - _ => true, - } - { - #[cfg(feature = "stats")] - if !admin { - inc_stats!(face, tx, user, payload) - } else { - inc_stats!(face, tx, admin, payload) - } - - outface.primitives.send_push(Push { - wire_expr: key_expr.into(), - ext_qos, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType { node_id: *context }, - payload: payload.clone(), - }) - } - } } } } @@ -597,68 +493,3 @@ pub fn full_reentrant_route_data( } } } - -pub fn pull_data(tables_ref: &RwLock, face: &Arc, expr: WireExpr) { - let tables = zread!(tables_ref); - match tables.get_mapping(face, &expr.scope, expr.mapping) { - Some(prefix) => match Resource::get_resource(prefix, expr.suffix.as_ref()) { - Some(mut res) => { - let res = get_mut_unchecked(&mut res); - match res.session_ctxs.get_mut(&face.id) { - Some(ctx) => match &ctx.subs { - Some(_subinfo) => { - // let reliability = subinfo.reliability; - let lock = zlock!(tables.pull_caches_lock); - let route = get_mut_unchecked(ctx) - .last_values - .drain() - .map(|(name, sample)| { - ( - Resource::get_best_key(&tables.root_res, &name, face.id) - .to_owned(), - sample, - ) - }) - .collect::>(); - drop(lock); - drop(tables); - for (key_expr, payload) in route { - face.primitives.send_push(Push { - wire_expr: key_expr, - ext_qos: ext::QoSType::PUSH, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - payload, - }); - } - } - None => { - log::error!( - "{} Pull data for unknown subscriber {} (no info)!", - face, - prefix.expr() + expr.suffix.as_ref() - ); - } - }, - None => { - log::error!( - "{} Pull data for unknown subscriber {} (no context)!", - face, - prefix.expr() + expr.suffix.as_ref() - ); - } - } - } - None => { - log::error!( - "{} Pull data for unknown subscriber {} (no resource)!", - face, - prefix.expr() + expr.suffix.as_ref() - ); - } - }, - None => { - log::error!("{} Pull data with unknown scope {}!", face, expr.scope); - } - }; -} diff --git a/zenoh/src/net/routing/dispatcher/queries.rs b/zenoh/src/net/routing/dispatcher/queries.rs index 721a98b8c2..04262e555d 100644 --- a/zenoh/src/net/routing/dispatcher/queries.rs +++ b/zenoh/src/net/routing/dispatcher/queries.rs @@ -460,20 +460,12 @@ macro_rules! inc_req_stats { if let Some(stats) = $face.stats.as_ref() { use zenoh_buffers::buffer::Buffer; match &$body { - RequestBody::Put(p) => { - stats.[<$txrx _z_put_msgs>].[](1); - stats.[<$txrx _z_put_pl_bytes>].[](p.payload.len()); - } - RequestBody::Del(_) => { - stats.[<$txrx _z_del_msgs>].[](1); - } RequestBody::Query(q) => { stats.[<$txrx _z_query_msgs>].[](1); stats.[<$txrx _z_query_pl_bytes>].[]( q.ext_body.as_ref().map(|b| b.payload.len()).unwrap_or(0), ); } - RequestBody::Pull(_) => (), } } } @@ -492,14 +484,6 @@ macro_rules! inc_res_stats { if let Some(stats) = $face.stats.as_ref() { use zenoh_buffers::buffer::Buffer; match &$body { - ResponseBody::Put(p) => { - stats.[<$txrx _z_put_msgs>].[](1); - let mut n = p.payload.len(); - if let Some(a) = p.ext_attachment.as_ref() { - n += a.buffer.len(); - } - stats.[<$txrx _z_put_pl_bytes>].[](n); - } ResponseBody::Reply(r) => { stats.[<$txrx _z_reply_msgs>].[](1); let mut n = 0; diff --git a/zenoh/src/net/routing/dispatcher/resource.rs b/zenoh/src/net/routing/dispatcher/resource.rs index 9f43841025..3e35db14b6 100644 --- a/zenoh/src/net/routing/dispatcher/resource.rs +++ b/zenoh/src/net/routing/dispatcher/resource.rs @@ -24,7 +24,6 @@ use zenoh_config::WhatAmI; #[cfg(feature = "complete_n")] use zenoh_protocol::network::request::ext::TargetType; use zenoh_protocol::network::RequestId; -use zenoh_protocol::zenoh::PushBody; use zenoh_protocol::{ core::{key_expr::keyexpr, ExprId, WireExpr}, network::{ @@ -51,7 +50,6 @@ pub(crate) struct QueryTargetQabl { pub(crate) distance: f64, } pub(crate) type QueryTargetQablSet = Vec; -pub(crate) type PullCaches = Vec>; pub(crate) struct SessionContext { pub(crate) face: Arc, @@ -59,7 +57,6 @@ pub(crate) struct SessionContext { pub(crate) remote_expr_id: Option, pub(crate) subs: Option, pub(crate) qabl: Option, - pub(crate) last_values: HashMap, pub(crate) in_interceptor_cache: Option>, pub(crate) e_interceptor_cache: Option>, } @@ -121,7 +118,6 @@ impl QueryRoutes { pub(crate) struct ResourceContext { pub(crate) matches: Vec>, - pub(crate) matching_pulls: Option>, pub(crate) hat: Box, pub(crate) valid_data_routes: bool, pub(crate) data_routes: DataRoutes, @@ -133,7 +129,6 @@ impl ResourceContext { fn new(hat: Box) -> ResourceContext { ResourceContext { matches: Vec::new(), - matching_pulls: None, hat, valid_data_routes: false, data_routes: DataRoutes::default(), @@ -159,14 +154,6 @@ impl ResourceContext { pub(crate) fn disable_query_routes(&mut self) { self.valid_query_routes = false; } - - pub(crate) fn update_matching_pulls(&mut self, pulls: Arc) { - self.matching_pulls = Some(pulls); - } - - pub(crate) fn disable_matching_pulls(&mut self) { - self.matching_pulls = None; - } } pub struct Resource { @@ -445,7 +432,6 @@ impl Resource { remote_expr_id: None, subs: None, qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }) @@ -708,7 +694,6 @@ pub fn register_expr( remote_expr_id: Some(expr_id), subs: None, qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }) diff --git a/zenoh/src/net/routing/dispatcher/tables.rs b/zenoh/src/net/routing/dispatcher/tables.rs index e239a316a1..4f2fc2ee83 100644 --- a/zenoh/src/net/routing/dispatcher/tables.rs +++ b/zenoh/src/net/routing/dispatcher/tables.rs @@ -71,7 +71,6 @@ pub struct Tables { pub(crate) mcast_groups: Vec>, pub(crate) mcast_faces: Vec>, pub(crate) interceptors: Vec, - pub(crate) pull_caches_lock: Mutex<()>, pub(crate) hat: Box, pub(crate) hat_code: Arc, // @TODO make this a Box } @@ -103,7 +102,6 @@ impl Tables { mcast_groups: vec![], mcast_faces: vec![], interceptors: interceptor_factories(config)?, - pull_caches_lock: Mutex::new(()), hat: hat_code.new_tables(router_peers_failover_brokering), hat_code: hat_code.into(), }) diff --git a/zenoh/src/net/routing/hat/client/mod.rs b/zenoh/src/net/routing/hat/client/mod.rs index 05210bcaee..a9908f5f58 100644 --- a/zenoh/src/net/routing/hat/client/mod.rs +++ b/zenoh/src/net/routing/hat/client/mod.rs @@ -20,9 +20,7 @@ use crate::{ net::routing::{ dispatcher::face::Face, - router::{ - compute_data_routes, compute_matching_pulls, compute_query_routes, RoutesIndexes, - }, + router::{compute_data_routes, compute_query_routes, RoutesIndexes}, }, runtime::Runtime, }; @@ -192,11 +190,7 @@ impl HatBaseTrait for HatCode { let rtables = zread!(tables.tables); for _match in subs_matches.drain(..) { let mut expr = RoutingExpr::new(&_match, ""); - matches_data_routes.push(( - _match.clone(), - compute_data_routes(&rtables, &mut expr), - compute_matching_pulls(&rtables, &mut expr), - )); + matches_data_routes.push((_match.clone(), compute_data_routes(&rtables, &mut expr))); } for _match in qabls_matches.drain(..) { matches_query_routes.push((_match.clone(), compute_query_routes(&rtables, &_match))); @@ -204,13 +198,10 @@ impl HatBaseTrait for HatCode { drop(rtables); let mut wtables = zwrite!(tables.tables); - for (mut res, data_routes, matching_pulls) in matches_data_routes { + for (mut res, data_routes) in matches_data_routes { get_mut_unchecked(&mut res) .context_mut() .update_data_routes(data_routes); - get_mut_unchecked(&mut res) - .context_mut() - .update_matching_pulls(matching_pulls); Resource::clean(&mut res); } for (mut res, query_routes) in matches_query_routes { diff --git a/zenoh/src/net/routing/hat/client/pubsub.rs b/zenoh/src/net/routing/hat/client/pubsub.rs index f9f827ecc5..290f90f95f 100644 --- a/zenoh/src/net/routing/hat/client/pubsub.rs +++ b/zenoh/src/net/routing/hat/client/pubsub.rs @@ -30,7 +30,7 @@ use zenoh_protocol::{ core::{Reliability, WhatAmI}, network::declare::{ common::ext::WireExprType, ext, subscriber::ext::SubscriberInfo, Declare, DeclareBody, - DeclareSubscriber, Mode, UndeclareSubscriber, + DeclareSubscriber, UndeclareSubscriber, }, }; use zenoh_sync::get_mut_unchecked; @@ -94,16 +94,11 @@ fn register_client_subscription( { let res = get_mut_unchecked(res); match res.session_ctxs.get_mut(&face.id) { - Some(ctx) => match &ctx.subs { - Some(info) => { - if Mode::Pull == info.mode { - get_mut_unchecked(ctx).subs = Some(*sub_info); - } - } - None => { + Some(ctx) => { + if ctx.subs.is_none() { get_mut_unchecked(ctx).subs = Some(*sub_info); } - }, + } None => { res.session_ctxs.insert( face.id, @@ -113,7 +108,6 @@ fn register_client_subscription( remote_expr_id: None, subs: Some(*sub_info), qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }), @@ -132,10 +126,8 @@ fn declare_client_subscription( sub_info: &SubscriberInfo, ) { register_client_subscription(tables, face, id, res, sub_info); - let mut propa_sub_info = *sub_info; - propa_sub_info.mode = Mode::Push; - propagate_simple_subscription(tables, res, &propa_sub_info, face); + propagate_simple_subscription(tables, res, sub_info, face); // This introduced a buffer overflow on windows // @TODO: Let's deactivate this on windows until Fixed #[cfg(not(windows))] @@ -243,7 +235,6 @@ fn forget_client_subscription( pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers - mode: Mode::Push, }; for src_face in tables .faces @@ -327,20 +318,19 @@ impl HatPubSubTrait for HatCode { let mres = mres.upgrade().unwrap(); for (sid, context) in &mres.session_ctxs { - if let Some(subinfo) = &context.subs { - if match tables.whatami { + if context.subs.is_some() + && match tables.whatami { WhatAmI::Router => context.face.whatami != WhatAmI::Router, _ => { source_type == WhatAmI::Client || context.face.whatami == WhatAmI::Client } - } && subinfo.mode == Mode::Push - { - route.entry(*sid).or_insert_with(|| { - let key_expr = Resource::get_best_key(expr.prefix, expr.suffix, *sid); - (context.face.clone(), key_expr.to_owned(), NodeId::default()) - }); } + { + route.entry(*sid).or_insert_with(|| { + let key_expr = Resource::get_best_key(expr.prefix, expr.suffix, *sid); + (context.face.clone(), key_expr.to_owned(), NodeId::default()) + }); } } } diff --git a/zenoh/src/net/routing/hat/client/queries.rs b/zenoh/src/net/routing/hat/client/queries.rs index 4964a8880a..81e5ba52d9 100644 --- a/zenoh/src/net/routing/hat/client/queries.rs +++ b/zenoh/src/net/routing/hat/client/queries.rs @@ -22,7 +22,7 @@ use crate::net::routing::router::RoutesIndexes; use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use ordered_float::OrderedFloat; use std::borrow::Cow; -use std::collections::{HashMap, HashSet}; +use std::collections::HashSet; use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_buffers::ZBuf; @@ -133,7 +133,6 @@ fn register_client_queryable( remote_expr_id: None, subs: None, qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }) diff --git a/zenoh/src/net/routing/hat/linkstate_peer/mod.rs b/zenoh/src/net/routing/hat/linkstate_peer/mod.rs index 5591ea3b3e..3c4e2091f0 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/mod.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/mod.rs @@ -36,9 +36,7 @@ use crate::{ routing::{ dispatcher::face::Face, hat::TREES_COMPUTATION_DELAY_MS, - router::{ - compute_data_routes, compute_matching_pulls, compute_query_routes, RoutesIndexes, - }, + router::{compute_data_routes, compute_query_routes, RoutesIndexes}, }, }, runtime::Runtime, @@ -311,11 +309,7 @@ impl HatBaseTrait for HatCode { let rtables = zread!(tables.tables); for _match in subs_matches.drain(..) { let mut expr = RoutingExpr::new(&_match, ""); - matches_data_routes.push(( - _match.clone(), - compute_data_routes(&rtables, &mut expr), - compute_matching_pulls(&rtables, &mut expr), - )); + matches_data_routes.push((_match.clone(), compute_data_routes(&rtables, &mut expr))); } for _match in qabls_matches.drain(..) { matches_query_routes.push((_match.clone(), compute_query_routes(&rtables, &_match))); @@ -323,13 +317,10 @@ impl HatBaseTrait for HatCode { drop(rtables); let mut wtables = zwrite!(tables.tables); - for (mut res, data_routes, matching_pulls) in matches_data_routes { + for (mut res, data_routes) in matches_data_routes { get_mut_unchecked(&mut res) .context_mut() .update_data_routes(data_routes); - get_mut_unchecked(&mut res) - .context_mut() - .update_matching_pulls(matching_pulls); Resource::clean(&mut res); } for (mut res, query_routes) in matches_query_routes { diff --git a/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs b/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs index 9a41915333..dddb6ae366 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/pubsub.rs @@ -33,7 +33,7 @@ use zenoh_protocol::{ core::{Reliability, WhatAmI, ZenohId}, network::declare::{ common::ext::WireExprType, ext, subscriber::ext::SubscriberInfo, Declare, DeclareBody, - DeclareSubscriber, Mode, UndeclareSubscriber, + DeclareSubscriber, UndeclareSubscriber, }, }; use zenoh_sync::get_mut_unchecked; @@ -207,16 +207,11 @@ fn register_client_subscription( { let res = get_mut_unchecked(res); match res.session_ctxs.get_mut(&face.id) { - Some(ctx) => match &ctx.subs { - Some(info) => { - if Mode::Pull == info.mode { - get_mut_unchecked(ctx).subs = Some(*sub_info); - } - } - None => { + Some(ctx) => { + if ctx.subs.is_none() { get_mut_unchecked(ctx).subs = Some(*sub_info); } - }, + } None => { res.session_ctxs.insert( face.id, @@ -226,7 +221,6 @@ fn register_client_subscription( remote_expr_id: None, subs: Some(*sub_info), qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }), @@ -245,10 +239,8 @@ fn declare_client_subscription( sub_info: &SubscriberInfo, ) { register_client_subscription(tables, face, id, res, sub_info); - let mut propa_sub_info = *sub_info; - propa_sub_info.mode = Mode::Push; let zid = tables.zid; - register_peer_subscription(tables, face, res, &propa_sub_info, zid); + register_peer_subscription(tables, face, res, sub_info, zid); } #[inline] @@ -454,7 +446,6 @@ fn forget_client_subscription( pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers - mode: Mode::Push, }; if face.whatami == WhatAmI::Client { @@ -511,7 +502,6 @@ pub(super) fn pubsub_tree_change(tables: &mut Tables, new_childs: &[Vec context.face.whatami != WhatAmI::Router, _ => { source_type == WhatAmI::Client || context.face.whatami == WhatAmI::Client } - } && subinfo.mode == Mode::Push - { - route.entry(*sid).or_insert_with(|| { - let key_expr = Resource::get_best_key(expr.prefix, expr.suffix, *sid); - (context.face.clone(), key_expr.to_owned(), NodeId::default()) - }); } + { + route.entry(*sid).or_insert_with(|| { + let key_expr = Resource::get_best_key(expr.prefix, expr.suffix, *sid); + (context.face.clone(), key_expr.to_owned(), NodeId::default()) + }); } } } diff --git a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs index 51aac2175a..fa553e5121 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs @@ -285,7 +285,6 @@ fn register_client_queryable( remote_expr_id: None, subs: None, qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }) diff --git a/zenoh/src/net/routing/hat/p2p_peer/mod.rs b/zenoh/src/net/routing/hat/p2p_peer/mod.rs index 1a6c1ba407..59b39d4284 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/mod.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/mod.rs @@ -23,9 +23,7 @@ use crate::{ protocol::linkstate::LinkStateList, routing::{ dispatcher::face::Face, - router::{ - compute_data_routes, compute_matching_pulls, compute_query_routes, RoutesIndexes, - }, + router::{compute_data_routes, compute_query_routes, RoutesIndexes}, }, }, runtime::Runtime, @@ -241,11 +239,7 @@ impl HatBaseTrait for HatCode { let rtables = zread!(tables.tables); for _match in subs_matches.drain(..) { let mut expr = RoutingExpr::new(&_match, ""); - matches_data_routes.push(( - _match.clone(), - compute_data_routes(&rtables, &mut expr), - compute_matching_pulls(&rtables, &mut expr), - )); + matches_data_routes.push((_match.clone(), compute_data_routes(&rtables, &mut expr))); } for _match in qabls_matches.drain(..) { matches_query_routes.push((_match.clone(), compute_query_routes(&rtables, &_match))); @@ -253,13 +247,10 @@ impl HatBaseTrait for HatCode { drop(rtables); let mut wtables = zwrite!(tables.tables); - for (mut res, data_routes, matching_pulls) in matches_data_routes { + for (mut res, data_routes) in matches_data_routes { get_mut_unchecked(&mut res) .context_mut() .update_data_routes(data_routes); - get_mut_unchecked(&mut res) - .context_mut() - .update_matching_pulls(matching_pulls); Resource::clean(&mut res); } for (mut res, query_routes) in matches_query_routes { diff --git a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs index 4f6ce5aeca..a722176292 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs @@ -30,7 +30,7 @@ use zenoh_protocol::{ core::{Reliability, WhatAmI}, network::declare::{ common::ext::WireExprType, ext, subscriber::ext::SubscriberInfo, Declare, DeclareBody, - DeclareSubscriber, Mode, UndeclareSubscriber, + DeclareSubscriber, UndeclareSubscriber, }, }; use zenoh_sync::get_mut_unchecked; @@ -94,16 +94,11 @@ fn register_client_subscription( { let res = get_mut_unchecked(res); match res.session_ctxs.get_mut(&face.id) { - Some(ctx) => match &ctx.subs { - Some(info) => { - if Mode::Pull == info.mode { - get_mut_unchecked(ctx).subs = Some(*sub_info); - } - } - None => { + Some(ctx) => { + if ctx.subs.is_none() { get_mut_unchecked(ctx).subs = Some(*sub_info); } - }, + } None => { res.session_ctxs.insert( face.id, @@ -113,7 +108,6 @@ fn register_client_subscription( remote_expr_id: None, subs: Some(*sub_info), qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }), @@ -132,10 +126,8 @@ fn declare_client_subscription( sub_info: &SubscriberInfo, ) { register_client_subscription(tables, face, id, res, sub_info); - let mut propa_sub_info = *sub_info; - propa_sub_info.mode = Mode::Push; - propagate_simple_subscription(tables, res, &propa_sub_info, face); + propagate_simple_subscription(tables, res, sub_info, face); // This introduced a buffer overflow on windows // TODO: Let's deactivate this on windows until Fixed #[cfg(not(windows))] @@ -243,7 +235,6 @@ fn forget_client_subscription( pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers - mode: Mode::Push, }; for src_face in tables .faces @@ -327,20 +318,19 @@ impl HatPubSubTrait for HatCode { let mres = mres.upgrade().unwrap(); for (sid, context) in &mres.session_ctxs { - if let Some(subinfo) = &context.subs { - if match tables.whatami { + if context.subs.is_some() + && match tables.whatami { WhatAmI::Router => context.face.whatami != WhatAmI::Router, _ => { source_type == WhatAmI::Client || context.face.whatami == WhatAmI::Client } - } && subinfo.mode == Mode::Push - { - route.entry(*sid).or_insert_with(|| { - let key_expr = Resource::get_best_key(expr.prefix, expr.suffix, *sid); - (context.face.clone(), key_expr.to_owned(), NodeId::default()) - }); } + { + route.entry(*sid).or_insert_with(|| { + let key_expr = Resource::get_best_key(expr.prefix, expr.suffix, *sid); + (context.face.clone(), key_expr.to_owned(), NodeId::default()) + }); } } } diff --git a/zenoh/src/net/routing/hat/p2p_peer/queries.rs b/zenoh/src/net/routing/hat/p2p_peer/queries.rs index 04b31b41ef..caea6fe6b8 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/queries.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/queries.rs @@ -22,7 +22,7 @@ use crate::net::routing::router::RoutesIndexes; use crate::net::routing::{RoutingContext, PREFIX_LIVELINESS}; use ordered_float::OrderedFloat; use std::borrow::Cow; -use std::collections::{HashMap, HashSet}; +use std::collections::HashSet; use std::sync::atomic::Ordering; use std::sync::Arc; use zenoh_buffers::ZBuf; @@ -133,7 +133,6 @@ fn register_client_queryable( remote_expr_id: None, subs: None, qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }) diff --git a/zenoh/src/net/routing/hat/router/mod.rs b/zenoh/src/net/routing/hat/router/mod.rs index ff576ae271..47cf02db46 100644 --- a/zenoh/src/net/routing/hat/router/mod.rs +++ b/zenoh/src/net/routing/hat/router/mod.rs @@ -40,9 +40,7 @@ use crate::{ routing::{ dispatcher::face::Face, hat::TREES_COMPUTATION_DELAY_MS, - router::{ - compute_data_routes, compute_matching_pulls, compute_query_routes, RoutesIndexes, - }, + router::{compute_data_routes, compute_query_routes, RoutesIndexes}, }, }, runtime::Runtime, @@ -480,11 +478,7 @@ impl HatBaseTrait for HatCode { let rtables = zread!(tables.tables); for _match in subs_matches.drain(..) { let mut expr = RoutingExpr::new(&_match, ""); - matches_data_routes.push(( - _match.clone(), - compute_data_routes(&rtables, &mut expr), - compute_matching_pulls(&rtables, &mut expr), - )); + matches_data_routes.push((_match.clone(), compute_data_routes(&rtables, &mut expr))); } for _match in qabls_matches.drain(..) { matches_query_routes.push((_match.clone(), compute_query_routes(&rtables, &_match))); @@ -492,13 +486,10 @@ impl HatBaseTrait for HatCode { drop(rtables); let mut wtables = zwrite!(tables.tables); - for (mut res, data_routes, matching_pulls) in matches_data_routes { + for (mut res, data_routes) in matches_data_routes { get_mut_unchecked(&mut res) .context_mut() .update_data_routes(data_routes); - get_mut_unchecked(&mut res) - .context_mut() - .update_matching_pulls(matching_pulls); Resource::clean(&mut res); } for (mut res, query_routes) in matches_query_routes { diff --git a/zenoh/src/net/routing/hat/router/pubsub.rs b/zenoh/src/net/routing/hat/router/pubsub.rs index da1ca66efd..93c4cb7002 100644 --- a/zenoh/src/net/routing/hat/router/pubsub.rs +++ b/zenoh/src/net/routing/hat/router/pubsub.rs @@ -33,7 +33,7 @@ use zenoh_protocol::{ core::{Reliability, WhatAmI, ZenohId}, network::declare::{ common::ext::WireExprType, ext, subscriber::ext::SubscriberInfo, Declare, DeclareBody, - DeclareSubscriber, Mode, UndeclareSubscriber, + DeclareSubscriber, UndeclareSubscriber, }, }; use zenoh_sync::get_mut_unchecked; @@ -243,8 +243,7 @@ fn declare_peer_subscription( peer: ZenohId, ) { register_peer_subscription(tables, face, res, sub_info, peer); - let mut propa_sub_info = *sub_info; - propa_sub_info.mode = Mode::Push; + let propa_sub_info = *sub_info; let zid = tables.zid; register_router_subscription(tables, face, res, &propa_sub_info, zid); } @@ -260,16 +259,11 @@ fn register_client_subscription( { let res = get_mut_unchecked(res); match res.session_ctxs.get_mut(&face.id) { - Some(ctx) => match &ctx.subs { - Some(info) => { - if Mode::Pull == info.mode { - get_mut_unchecked(ctx).subs = Some(*sub_info); - } - } - None => { + Some(ctx) => { + if ctx.subs.is_none() { get_mut_unchecked(ctx).subs = Some(*sub_info); } - }, + } None => { res.session_ctxs.insert( face.id, @@ -279,7 +273,6 @@ fn register_client_subscription( remote_expr_id: None, subs: Some(*sub_info), qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }), @@ -298,10 +291,8 @@ fn declare_client_subscription( sub_info: &SubscriberInfo, ) { register_client_subscription(tables, face, id, res, sub_info); - let mut propa_sub_info = *sub_info; - propa_sub_info.mode = Mode::Push; let zid = tables.zid; - register_router_subscription(tables, face, res, &propa_sub_info, zid); + register_router_subscription(tables, face, res, sub_info, zid); } #[inline] @@ -600,7 +591,6 @@ fn forget_client_subscription( pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers - mode: Mode::Push, }; if face.whatami == WhatAmI::Client { @@ -720,7 +710,6 @@ pub(super) fn pubsub_tree_change( if *sub == tree_id { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers - mode: Mode::Push, }; send_sourced_subscription_to_net_childs( tables, @@ -799,7 +788,6 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: let key_expr = Resource::decl_key(res, dst_face); let sub_info = SubscriberInfo { reliability: Reliability::Reliable, // @TODO compute proper reliability to propagate from reliability of known subscribers - mode: Mode::Push, }; dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { @@ -1003,14 +991,11 @@ impl HatPubSubTrait for HatCode { if master || source_type == WhatAmI::Router { for (sid, context) in &mres.session_ctxs { - if let Some(subinfo) = &context.subs { - if context.face.whatami != WhatAmI::Router && subinfo.mode == Mode::Push { - route.entry(*sid).or_insert_with(|| { - let key_expr = - Resource::get_best_key(expr.prefix, expr.suffix, *sid); - (context.face.clone(), key_expr.to_owned(), NodeId::default()) - }); - } + if context.subs.is_some() && context.face.whatami != WhatAmI::Router { + route.entry(*sid).or_insert_with(|| { + let key_expr = Resource::get_best_key(expr.prefix, expr.suffix, *sid); + (context.face.clone(), key_expr.to_owned(), NodeId::default()) + }); } } } diff --git a/zenoh/src/net/routing/hat/router/queries.rs b/zenoh/src/net/routing/hat/router/queries.rs index b76f0adcc6..aca6f71b3e 100644 --- a/zenoh/src/net/routing/hat/router/queries.rs +++ b/zenoh/src/net/routing/hat/router/queries.rs @@ -413,7 +413,6 @@ fn register_client_queryable( remote_expr_id: None, subs: None, qabl: None, - last_values: HashMap::new(), in_interceptor_cache: None, e_interceptor_cache: None, }) diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index b67692e704..29106cb89d 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -388,58 +388,60 @@ impl Primitives for AdminSpace { fn send_request(&self, msg: Request) { trace!("recv Request {:?}", msg); - if let RequestBody::Query(query) = msg.payload { - let primitives = zlock!(self.primitives).as_ref().unwrap().clone(); - { - let conf = self.context.runtime.state.config.lock(); - if !conf.adminspace.permissions().read { - log::error!( + match msg.payload { + RequestBody::Query(query) => { + let primitives = zlock!(self.primitives).as_ref().unwrap().clone(); + { + let conf = self.context.runtime.state.config.lock(); + if !conf.adminspace.permissions().read { + log::error!( "Received GET on '{}' but adminspace.permissions.read=false in configuration", msg.wire_expr ); - primitives.send_response_final(ResponseFinal { - rid: msg.id, - ext_qos: ext::QoSType::RESPONSE_FINAL, - ext_tstamp: None, - }); - return; - } - } - - let key_expr = match self.key_expr_to_string(&msg.wire_expr) { - Ok(key_expr) => key_expr.into_owned(), - Err(e) => { - log::error!("Unknown KeyExpr: {}", e); - primitives.send_response_final(ResponseFinal { - rid: msg.id, - ext_qos: ext::QoSType::RESPONSE_FINAL, - ext_tstamp: None, - }); - return; + primitives.send_response_final(ResponseFinal { + rid: msg.id, + ext_qos: ext::QoSType::RESPONSE_FINAL, + ext_tstamp: None, + }); + return; + } } - }; - - let zid = self.zid; - let parameters = query.parameters.to_owned(); - let query = Query { - inner: Arc::new(QueryInner { - key_expr: key_expr.clone(), - parameters, - value: query - .ext_body - .map(|b| Value::from(b.payload).with_encoding(b.encoding)), - qid: msg.id, - zid, - primitives, - #[cfg(feature = "unstable")] - attachment: query.ext_attachment.map(Into::into), - }), - eid: self.queryable_id, - }; - for (key, handler) in &self.handlers { - if key_expr.intersects(key) { - handler(&self.context, query.clone()); + let key_expr = match self.key_expr_to_string(&msg.wire_expr) { + Ok(key_expr) => key_expr.into_owned(), + Err(e) => { + log::error!("Unknown KeyExpr: {}", e); + primitives.send_response_final(ResponseFinal { + rid: msg.id, + ext_qos: ext::QoSType::RESPONSE_FINAL, + ext_tstamp: None, + }); + return; + } + }; + + let zid = self.zid; + let parameters = query.parameters.to_owned(); + let query = Query { + inner: Arc::new(QueryInner { + key_expr: key_expr.clone(), + parameters, + value: query + .ext_body + .map(|b| Value::from(b.payload).with_encoding(b.encoding)), + qid: msg.id, + zid, + primitives, + #[cfg(feature = "unstable")] + attachment: query.ext_attachment.map(Into::into), + }), + eid: self.queryable_id, + }; + + for (key, handler) in &self.handlers { + if key_expr.intersects(key) { + handler(&self.context, query.clone()); + } } } } diff --git a/zenoh/src/net/tests/tables.rs b/zenoh/src/net/tests/tables.rs index 4560eefaae..516bcd0109 100644 --- a/zenoh/src/net/tests/tables.rs +++ b/zenoh/src/net/tests/tables.rs @@ -26,7 +26,6 @@ use zenoh_protocol::core::{ key_expr::keyexpr, ExprId, Reliability, WhatAmI, WireExpr, ZenohId, EMPTY_EXPR_ID, }; use zenoh_protocol::network::declare::subscriber::ext::SubscriberInfo; -use zenoh_protocol::network::declare::Mode; use zenoh_protocol::network::{ext, Declare, DeclareBody, DeclareKeyExpr}; use zenoh_protocol::zenoh::{PushBody, Put}; @@ -59,7 +58,6 @@ fn base_test() { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, - mode: Mode::Push, }; declare_subscription( @@ -186,7 +184,6 @@ fn multisub_test() { // -------------- let sub_info = SubscriberInfo { reliability: Reliability::Reliable, - mode: Mode::Push, }; declare_subscription( zlock!(tables.ctrl_lock).as_ref(), @@ -305,7 +302,6 @@ fn clean_test() { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, - mode: Mode::Push, }; declare_subscription( @@ -570,7 +566,6 @@ fn client_test() { let sub_info = SubscriberInfo { reliability: Reliability::Reliable, - mode: Mode::Push, }; let primitives0 = Arc::new(ClientPrimitives::new()); diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 5e706a0da8..496c6879ce 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -81,7 +81,7 @@ use zenoh_protocol::{ }, zenoh::{ query::{self, ext::QueryBodyType, Consolidation}, - Pull, PushBody, RequestBody, ResponseBody, + PushBody, RequestBody, ResponseBody, }, }; use zenoh_result::ZResult; @@ -294,7 +294,7 @@ impl<'s, 'a> SessionDeclarations<'s, 'a> for SessionRef<'a> { fn declare_subscriber<'b, TryIntoKeyExpr>( &'s self, key_expr: TryIntoKeyExpr, - ) -> SubscriberBuilder<'a, 'b, PushMode, DefaultHandler> + ) -> SubscriberBuilder<'a, 'b, DefaultHandler> where TryIntoKeyExpr: TryInto>, >>::Error: Into, @@ -303,7 +303,6 @@ impl<'s, 'a> SessionDeclarations<'s, 'a> for SessionRef<'a> { session: self.clone(), key_expr: TryIntoKeyExpr::try_into(key_expr).map_err(Into::into), reliability: Reliability::DEFAULT, - mode: PushMode, origin: Locality::default(), handler: DefaultHandler, } @@ -578,7 +577,7 @@ impl<'a> SessionDeclarations<'a, 'a> for Session { fn declare_subscriber<'b, TryIntoKeyExpr>( &'a self, key_expr: TryIntoKeyExpr, - ) -> SubscriberBuilder<'a, 'b, PushMode, DefaultHandler> + ) -> SubscriberBuilder<'a, 'b, DefaultHandler> where TryIntoKeyExpr: TryInto>, >>::Error: Into, @@ -1556,29 +1555,6 @@ impl Session { } } - pub(crate) fn pull<'a>(&'a self, key_expr: &'a KeyExpr) -> impl Resolve> + 'a { - ResolveClosure::new(move || { - trace!("pull({:?})", key_expr); - let state = zread!(self.state); - let primitives = state.primitives.as_ref().unwrap().clone(); - drop(state); - primitives.send_request(Request { - id: 0, // @TODO compute a proper request ID - wire_expr: key_expr.to_wire(self).to_owned(), - ext_qos: ext::QoSType::REQUEST, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType::DEFAULT, - ext_target: request::ext::TargetType::DEFAULT, - ext_budget: None, - ext_timeout: None, - payload: RequestBody::Pull(Pull { - ext_unknown: vec![], - }), - }); - Ok(()) - }) - } - #[allow(clippy::too_many_arguments)] pub(crate) fn query( &self, @@ -1819,7 +1795,7 @@ impl<'s> SessionDeclarations<'s, 'static> for Arc { fn declare_subscriber<'b, TryIntoKeyExpr>( &'s self, key_expr: TryIntoKeyExpr, - ) -> SubscriberBuilder<'static, 'b, PushMode, DefaultHandler> + ) -> SubscriberBuilder<'static, 'b, DefaultHandler> where TryIntoKeyExpr: TryInto>, >>::Error: Into, @@ -1828,7 +1804,6 @@ impl<'s> SessionDeclarations<'s, 'static> for Arc { session: SessionRef::Shared(self.clone()), key_expr: key_expr.try_into().map_err(Into::into), reliability: Reliability::DEFAULT, - mode: PushMode, origin: Locality::default(), handler: DefaultHandler, } @@ -2110,20 +2085,12 @@ impl Primitives for Session { #[cfg(feature = "unstable")] m.ext_attachment.map(Into::into), ), - RequestBody::Put(_) => (), - RequestBody::Del(_) => (), - RequestBody::Pull(_) => todo!(), } } fn send_response(&self, msg: Response) { trace!("recv Response {:?}", msg); match msg.payload { - ResponseBody::Put(_) => { - log::warn!( - "Received a ResponseBody::Put, but this isn't supported yet. Dropping message." - ) - } ResponseBody::Err(e) => { let mut state = zwrite!(self.state); match state.queries.get_mut(&msg.rid) { @@ -2453,7 +2420,7 @@ pub trait SessionDeclarations<'s, 'a> { fn declare_subscriber<'b, TryIntoKeyExpr>( &'s self, key_expr: TryIntoKeyExpr, - ) -> SubscriberBuilder<'a, 'b, PushMode, DefaultHandler> + ) -> SubscriberBuilder<'a, 'b, DefaultHandler> where TryIntoKeyExpr: TryInto>, >>::Error: Into; diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index 413c9201f2..4488140610 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -24,10 +24,10 @@ use std::fmt; use std::future::Ready; use std::ops::{Deref, DerefMut}; use std::sync::Arc; -use zenoh_core::{AsyncResolve, Resolvable, Resolve, SyncResolve}; +use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; #[cfg(feature = "unstable")] use zenoh_protocol::core::EntityGlobalId; -use zenoh_protocol::network::declare::{subscriber::ext::SubscriberInfo, Mode}; +use zenoh_protocol::network::declare::subscriber::ext::SubscriberInfo; /// The kind of reliability. pub use zenoh_protocol::core::Reliability; @@ -80,90 +80,6 @@ pub(crate) struct SubscriberInner<'a> { pub(crate) alive: bool, } -/// A [`PullMode`] subscriber that provides data through a callback. -/// -/// CallbackPullSubscribers only provide data when explicitely pulled by the -/// application with the [`pull`](CallbackPullSubscriber::pull) function. -/// CallbackPullSubscribers can be created from a zenoh [`Session`](crate::Session) -/// with the [`declare_subscriber`](crate::SessionDeclarations::declare_subscriber) function, -/// the [`callback`](SubscriberBuilder::callback) function -/// and the [`pull_mode`](SubscriberBuilder::pull_mode) function -/// of the resulting builder. -/// -/// Subscribers are automatically undeclared when dropped. -/// -/// # Examples -/// ``` -/// # async_std::task::block_on(async { -/// use zenoh::prelude::r#async::*; -/// -/// let session = zenoh::open(config::peer()).res().await.unwrap(); -/// let subscriber = session -/// .declare_subscriber("key/expression") -/// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr(), sample.payload()); }) -/// .pull_mode() -/// .res() -/// .await -/// .unwrap(); -/// subscriber.pull(); -/// # }) -/// ``` -pub(crate) struct PullSubscriberInner<'a> { - inner: SubscriberInner<'a>, -} - -impl<'a> PullSubscriberInner<'a> { - /// Pull available data for a [`CallbackPullSubscriber`]. - /// - /// # Examples - /// ``` - /// # async_std::task::block_on(async { - /// use zenoh::prelude::r#async::*; - /// - /// let session = zenoh::open(config::peer()).res().await.unwrap(); - /// let subscriber = session - /// .declare_subscriber("key/expression") - /// .callback(|sample| { println!("Received: {} {:?}", sample.key_expr(), sample.payload()); }) - /// .pull_mode() - /// .res() - /// .await - /// .unwrap(); - /// subscriber.pull(); - /// # }) - /// ``` - #[inline] - pub fn pull(&self) -> impl Resolve> + '_ { - self.inner.session.pull(&self.inner.state.key_expr) - } - - /// Close a [`CallbackPullSubscriber`](CallbackPullSubscriber). - /// - /// `CallbackPullSubscribers` are automatically closed when dropped, but you may want to use this function to handle errors or - /// close the `CallbackPullSubscriber` asynchronously. - /// - /// # Examples - /// ``` - /// # async_std::task::block_on(async { - /// use zenoh::prelude::r#async::*; - /// - /// let session = zenoh::open(config::peer()).res().await.unwrap(); - /// # fn data_handler(_sample: Sample) { }; - /// let subscriber = session - /// .declare_subscriber("key/expression") - /// .callback(data_handler) - /// .pull_mode() - /// .res() - /// .await - /// .unwrap(); - /// subscriber.undeclare().res().await.unwrap(); - /// # }) - /// ``` - #[inline] - pub fn undeclare(self) -> impl Resolve> + 'a { - Undeclarable::undeclare_inner(self.inner, ()) - } -} - impl<'a> SubscriberInner<'a> { /// Close a [`CallbackSubscriber`](CallbackSubscriber). /// @@ -248,28 +164,6 @@ impl Drop for SubscriberInner<'_> { } } -/// The mode for pull subscribers. -#[non_exhaustive] -#[derive(Debug, Clone, Copy)] -pub struct PullMode; - -impl From for Mode { - fn from(_: PullMode) -> Self { - Mode::Pull - } -} - -/// The mode for push subscribers. -#[non_exhaustive] -#[derive(Debug, Clone, Copy)] -pub struct PushMode; - -impl From for Mode { - fn from(_: PushMode) -> Self { - Mode::Push - } -} - /// A builder for initializing a [`FlumeSubscriber`]. /// /// # Examples @@ -281,7 +175,6 @@ impl From for Mode { /// let subscriber = session /// .declare_subscriber("key/expression") /// .best_effort() -/// .pull_mode() /// .res() /// .await /// .unwrap(); @@ -289,7 +182,7 @@ impl From for Mode { /// ``` #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] -pub struct SubscriberBuilder<'a, 'b, Mode, Handler> { +pub struct SubscriberBuilder<'a, 'b, Handler> { #[cfg(feature = "unstable")] pub session: SessionRef<'a>, #[cfg(not(feature = "unstable"))] @@ -305,8 +198,6 @@ pub struct SubscriberBuilder<'a, 'b, Mode, Handler> { #[cfg(not(feature = "unstable"))] pub(crate) reliability: Reliability, - #[cfg(feature = "unstable")] - pub mode: Mode, #[cfg(not(feature = "unstable"))] pub(crate) mode: Mode, @@ -321,7 +212,7 @@ pub struct SubscriberBuilder<'a, 'b, Mode, Handler> { pub(crate) handler: Handler, } -impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { +impl<'a, 'b> SubscriberBuilder<'a, 'b, DefaultHandler> { /// Receive the samples for this subscription with a callback. /// /// # Examples @@ -339,7 +230,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { /// # }) /// ``` #[inline] - pub fn callback(self, callback: Callback) -> SubscriberBuilder<'a, 'b, Mode, Callback> + pub fn callback(self, callback: Callback) -> SubscriberBuilder<'a, 'b, Callback> where Callback: Fn(Sample) + Send + Sync + 'static, { @@ -347,7 +238,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { session, key_expr, reliability, - mode, + origin, handler: _, } = self; @@ -355,7 +246,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { session, key_expr, reliability, - mode, + origin, handler: callback, } @@ -385,7 +276,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { pub fn callback_mut( self, callback: CallbackMut, - ) -> SubscriberBuilder<'a, 'b, Mode, impl Fn(Sample) + Send + Sync + 'static> + ) -> SubscriberBuilder<'a, 'b, impl Fn(Sample) + Send + Sync + 'static> where CallbackMut: FnMut(Sample) + Send + Sync + 'static, { @@ -412,7 +303,7 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { /// # }) /// ``` #[inline] - pub fn with(self, handler: Handler) -> SubscriberBuilder<'a, 'b, Mode, Handler> + pub fn with(self, handler: Handler) -> SubscriberBuilder<'a, 'b, Handler> where Handler: crate::prelude::IntoHandler<'static, Sample>, { @@ -420,7 +311,6 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { session, key_expr, reliability, - mode, origin, handler: _, } = self; @@ -428,13 +318,13 @@ impl<'a, 'b, Mode> SubscriberBuilder<'a, 'b, Mode, DefaultHandler> { session, key_expr, reliability, - mode, origin, handler, } } } -impl<'a, 'b, Mode, Handler> SubscriberBuilder<'a, 'b, Mode, Handler> { + +impl<'a, 'b, Handler> SubscriberBuilder<'a, 'b, Handler> { /// Change the subscription reliability. #[inline] pub fn reliability(mut self, reliability: Reliability) -> Self { @@ -464,52 +354,10 @@ impl<'a, 'b, Mode, Handler> SubscriberBuilder<'a, 'b, Mode, Handler> { self.origin = origin; self } - - /// Change the subscription mode to Pull. - #[inline] - pub fn pull_mode(self) -> SubscriberBuilder<'a, 'b, PullMode, Handler> { - let SubscriberBuilder { - session, - key_expr, - reliability, - mode: _, - origin, - handler, - } = self; - SubscriberBuilder { - session, - key_expr, - reliability, - mode: PullMode, - origin, - handler, - } - } - - /// Change the subscription mode to Push. - #[inline] - pub fn push_mode(self) -> SubscriberBuilder<'a, 'b, PushMode, Handler> { - let SubscriberBuilder { - session, - key_expr, - reliability, - mode: _, - origin, - handler, - } = self; - SubscriberBuilder { - session, - key_expr, - reliability, - mode: PushMode, - origin, - handler, - } - } } // Push mode -impl<'a, Handler> Resolvable for SubscriberBuilder<'a, '_, PushMode, Handler> +impl<'a, Handler> Resolvable for SubscriberBuilder<'a, '_, Handler> where Handler: IntoHandler<'static, Sample> + Send, Handler::Handler: Send, @@ -517,7 +365,7 @@ where type To = ZResult>; } -impl<'a, Handler> SyncResolve for SubscriberBuilder<'a, '_, PushMode, Handler> +impl<'a, Handler> SyncResolve for SubscriberBuilder<'a, '_, Handler> where Handler: IntoHandler<'static, Sample> + Send, Handler::Handler: Send, @@ -534,7 +382,6 @@ where callback, &SubscriberInfo { reliability: self.reliability, - mode: self.mode.into(), }, ) .map(|sub_state| Subscriber { @@ -548,61 +395,7 @@ where } } -impl<'a, Handler> AsyncResolve for SubscriberBuilder<'a, '_, PushMode, Handler> -where - Handler: IntoHandler<'static, Sample> + Send, - Handler::Handler: Send, -{ - type Future = Ready; - - fn res_async(self) -> Self::Future { - std::future::ready(self.res_sync()) - } -} - -// Pull mode -impl<'a, Handler> Resolvable for SubscriberBuilder<'a, '_, PullMode, Handler> -where - Handler: IntoHandler<'static, Sample> + Send, - Handler::Handler: Send, -{ - type To = ZResult>; -} - -impl<'a, Handler> SyncResolve for SubscriberBuilder<'a, '_, PullMode, Handler> -where - Handler: IntoHandler<'static, Sample> + Send, - Handler::Handler: Send, -{ - fn res_sync(self) -> ::To { - let key_expr = self.key_expr?; - let session = self.session; - let (callback, receiver) = self.handler.into_handler(); - session - .declare_subscriber_inner( - &key_expr, - &None, - self.origin, - callback, - &SubscriberInfo { - reliability: self.reliability, - mode: self.mode.into(), - }, - ) - .map(|sub_state| PullSubscriber { - subscriber: PullSubscriberInner { - inner: SubscriberInner { - session, - state: sub_state, - alive: true, - }, - }, - receiver, - }) - } -} - -impl<'a, Handler> AsyncResolve for SubscriberBuilder<'a, '_, PullMode, Handler> +impl<'a, Handler> AsyncResolve for SubscriberBuilder<'a, '_, Handler> where Handler: IntoHandler<'static, Sample> + Send, Handler::Handler: Send, @@ -647,102 +440,6 @@ pub struct Subscriber<'a, Receiver> { pub receiver: Receiver, } -/// A [`PullMode`] subscriber that provides data through a [`Handler`](crate::prelude::IntoHandler). -/// -/// PullSubscribers only provide data when explicitely pulled by the -/// application with the [`pull`](PullSubscriber::pull) function. -/// PullSubscribers can be created from a zenoh [`Session`](crate::Session) -/// with the [`declare_subscriber`](crate::SessionDeclarations::declare_subscriber) function, -/// the [`with`](SubscriberBuilder::with) function -/// and the [`pull_mode`](SubscriberBuilder::pull_mode) function -/// of the resulting builder. -/// -/// Subscribers are automatically undeclared when dropped. -/// -/// # Examples -/// ``` -/// # async_std::task::block_on(async { -/// use zenoh::prelude::r#async::*; -/// -/// let session = zenoh::open(config::peer()).res().await.unwrap(); -/// let subscriber = session -/// .declare_subscriber("key/expression") -/// .with(flume::bounded(32)) -/// .pull_mode() -/// .res() -/// .await -/// .unwrap(); -/// subscriber.pull(); -/// # }) -/// ``` -#[non_exhaustive] -pub struct PullSubscriber<'a, Receiver> { - pub(crate) subscriber: PullSubscriberInner<'a>, - pub receiver: Receiver, -} - -impl<'a, Receiver> Deref for PullSubscriber<'a, Receiver> { - type Target = Receiver; - fn deref(&self) -> &Self::Target { - &self.receiver - } -} - -impl<'a, Receiver> DerefMut for PullSubscriber<'a, Receiver> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.receiver - } -} - -impl<'a, Receiver> PullSubscriber<'a, Receiver> { - /// Pull available data for a [`PullSubscriber`]. - /// - /// # Examples - /// ``` - /// # async_std::task::block_on(async { - /// use zenoh::prelude::r#async::*; - /// - /// let session = zenoh::open(config::peer()).res().await.unwrap(); - /// let subscriber = session - /// .declare_subscriber("key/expression") - /// .with(flume::bounded(32)) - /// .pull_mode() - /// .res() - /// .await - /// .unwrap(); - /// subscriber.pull(); - /// # }) - /// ``` - #[inline] - pub fn pull(&self) -> impl Resolve> + '_ { - self.subscriber.pull() - } - - /// Close a [`PullSubscriber`]. - /// - /// Subscribers are automatically closed when dropped, but you may want to use this function to handle errors or - /// close the Subscriber asynchronously. - /// - /// # Examples - /// ``` - /// # async_std::task::block_on(async { - /// use zenoh::prelude::r#async::*; - /// - /// let session = zenoh::open(config::peer()).res().await.unwrap(); - /// let subscriber = session.declare_subscriber("key/expression") - /// .pull_mode() - /// .res() - /// .await - /// .unwrap(); - /// subscriber.undeclare().res().await.unwrap(); - /// # }) - /// ``` - #[inline] - pub fn undeclare(self) -> impl Resolve> + 'a { - self.subscriber.undeclare() - } -} - impl<'a, Receiver> Subscriber<'a, Receiver> { /// Returns the [`EntityGlobalId`] of this Subscriber. /// From 5b18594a5084bc2f98fc14ee52078dffa2328eec Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Tue, 19 Mar 2024 14:58:46 +0100 Subject: [PATCH 024/124] replaced sample new to put/delete --- .../src/replica/aligner.rs | 2 +- .../src/replica/storage.rs | 250 ++++++++++-------- zenoh/src/sample.rs | 70 +++-- zenoh/src/session.rs | 6 +- 4 files changed, 192 insertions(+), 136 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index fb46b78082..0df648409d 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -108,7 +108,7 @@ impl Aligner { let Value { payload, encoding, .. } = value; - let sample = Sample::new(key, payload) + let sample = Sample::put(key, payload) .with_encoding(encoding) .with_timestamp(ts); log::debug!("[ALIGNER] Adding {:?} to storage", sample); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 0708dcabd9..9e4ae7ad0e 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -19,12 +19,12 @@ use async_trait::async_trait; use flume::{Receiver, Sender}; use futures::select; use std::collections::{HashMap, HashSet}; -use std::str::{self, FromStr}; +use std::str::FromStr; use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::ZBuf; use zenoh::prelude::r#async::*; use zenoh::query::ConsolidationMode; -use zenoh::time::{Timestamp, NTP64}; +use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; use zenoh_backend_traits::{Capability, History, Persistence, StorageInsertionResult, StoredData}; @@ -39,9 +39,102 @@ pub const WILDCARD_UPDATES_FILENAME: &str = "wildcard_updates"; pub const TOMBSTONE_FILENAME: &str = "tombstones"; #[derive(Clone)] -struct Update { - kind: SampleKind, - data: StoredData, +enum Update { + Put(StoredData), + Delete(Timestamp), +} + +impl From for Update { + fn from(sample: Sample) -> Self { + let mut sample = sample; + let timestamp = *sample.ensure_timestamp(); + match sample.kind() { + SampleKind::Put => Update::Put(StoredData { + value: Value::from(sample), + timestamp, + }), + SampleKind::Delete => Update::Delete(timestamp), + } + } +} + +impl Update { + fn timestamp(&self) -> &Timestamp { + match self { + Update::Put(data) => &data.timestamp, + Update::Delete(ts) => ts, + } + } +} + +// implement from String for Update +impl TryFrom for Update { + type Error = zenoh::Error; + + fn try_from(value: String) -> Result { + let result: (String, String, String, Vec<&[u8]>) = serde_json::from_str(&value)?; + let mut payload = ZBuf::default(); + for slice in result.3 { + payload.push_zslice(slice.to_vec().into()); + } + let value = Value::new(payload).with_encoding(result.2); + let timestamp = Timestamp::from_str(&result.1).map_err(|_|"Error parsing timestamp")?; + if result .0.eq(&(SampleKind::Put).to_string()) { + Ok(Update::Put(StoredData { value, timestamp })) + } else { + Ok(Update::Delete(timestamp)) + } + } +} + +// implement to_string for Update +impl ToString for Update { + fn to_string(&self) -> String { + let result = match self { + Update::Put(data) => ( + SampleKind::Put.to_string(), + data.timestamp.to_string(), + data.value.encoding.to_string(), + data.value.payload.slices().collect::>(), + ), + Update::Delete(ts) => ( + SampleKind::Delete.to_string(), + ts.to_string(), + "".to_string(), + vec![], + ), + }; + serde_json::to_string_pretty(&result).unwrap() + } +} + +trait IntoSample { + fn into_sample(self, key_expr: IntoKeyExpr) -> Sample + where + IntoKeyExpr: Into>; +} + +impl IntoSample for StoredData { + fn into_sample(self, key_expr: IntoKeyExpr) -> Sample + where + IntoKeyExpr: Into>, + { + Sample::put(key_expr, self.value.payload) + .with_encoding(self.value.encoding) + .with_timestamp(self.timestamp) + } +} + +impl IntoSample for Update { + fn into_sample(self, key_expr: IntoKeyExpr) -> Sample + where + IntoKeyExpr: Into>, + { + match self { + Update::Put(data) => data.into_sample(key_expr), + Update::Delete(ts) => Sample::delete(key_expr).with_timestamp(ts), + } + } } pub struct ReplicationService { @@ -109,10 +202,10 @@ impl StorageService { let saved_wc = std::fs::read_to_string(zenoh_home().join(WILDCARD_UPDATES_FILENAME)).unwrap(); let saved_wc: HashMap = - serde_json::from_str(&saved_wc).unwrap(); + serde_json::from_str(&saved_wc).unwrap(); // TODO: Remove unwrap let mut wildcard_updates = storage_service.wildcard_updates.write().await; for (k, data) in saved_wc { - wildcard_updates.insert(&k, construct_update(data)); + wildcard_updates.insert(&k, Update::try_from(data).unwrap()); // TODO: Remove unwrap } } } @@ -269,6 +362,7 @@ impl StorageService { } else { sample }; + let sample_timestamp = *sample.timestamp().unwrap_or(&new_reception_timestamp()); // if wildcard, update wildcard_updates if sample.key_expr().is_wild() { @@ -297,28 +391,17 @@ impl StorageService { log::trace!( "Sample `{:?}` identified as neded processing for key {}", sample, - k + &k ); // there might be the case that the actual update was outdated due to a wild card update, but not stored yet in the storage. // get the relevant wild card entry and use that value and timestamp to update the storage - let sample_to_store = match self - .ovderriding_wild_update(&k, sample.timestamp().unwrap()) - .await - { - Some(overriding_update) => { - let Value { - payload, encoding, .. - } = overriding_update.data.value; - Sample::new(KeyExpr::from(k.clone()), payload) - .with_encoding(encoding) - .with_timestamp(overriding_update.data.timestamp) - .with_kind(overriding_update.kind) - } - None => Sample::new(KeyExpr::from(k.clone()), sample.payload().clone()) - .with_encoding(sample.encoding().clone()) - .with_timestamp(*sample.timestamp().unwrap()) - .with_kind(sample.kind()), - }; + let sample_to_store = + match self.ovderriding_wild_update(&k, &sample_timestamp).await { + Some(overriding_update) => overriding_update.into_sample(k.clone()), + + None => sample.clone(), + }; + let timestamp = sample_to_store.timestamp().unwrap_or(&sample_timestamp); let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { Ok(stripped) => stripped, @@ -341,11 +424,8 @@ impl StorageService { } SampleKind::Delete => { // register a tombstone - self.mark_tombstone(&k, *sample_to_store.timestamp().unwrap()) - .await; - storage - .delete(stripped_key, *sample_to_store.timestamp().unwrap()) - .await + self.mark_tombstone(&k, *timestamp).await; + storage.delete(stripped_key, *timestamp).await } }; drop(storage); @@ -393,22 +473,12 @@ impl StorageService { // @TODO: change into a better store that does incremental writes let key = sample.key_expr().clone(); let mut wildcards = self.wildcard_updates.write().await; - let timestamp = *sample.timestamp().unwrap(); - wildcards.insert( - &key, - Update { - kind: sample.kind(), - data: StoredData { - value: Value::from(sample), - timestamp, - }, - }, - ); + wildcards.insert(&key, sample.into()); if self.capability.persistence.eq(&Persistence::Durable) { // flush to disk to makeit durable let mut serialized_data = HashMap::new(); for (k, update) in wildcards.key_value_pairs() { - serialized_data.insert(k, serialize_update(update)); + serialized_data.insert(k, update.to_string()); } if let Err(e) = std::fs::write( zenoh_home().join(WILDCARD_UPDATES_FILENAME), @@ -437,34 +507,36 @@ impl StorageService { let mut update = None; for node in wildcards.intersecting_keys(key_expr) { let weight = wildcards.weight_at(&node); - if weight.is_some() && weight.unwrap().data.timestamp > *ts { - // if the key matches a wild card update, check whether it was saved in storage - // remember that wild card updates change only existing keys - let stripped_key = match self.strip_prefix(&key_expr.into()) { - Ok(stripped) => stripped, - Err(e) => { - log::error!("{}", e); - break; - } - }; - let mut storage = self.storage.lock().await; - match storage.get(stripped_key, "").await { - Ok(stored_data) => { - for entry in stored_data { - if entry.timestamp > *ts { - return None; + if let Some(weight) = weight { + if weight.timestamp() > ts { + // if the key matches a wild card update, check whether it was saved in storage + // remember that wild card updates change only existing keys + let stripped_key = match self.strip_prefix(&key_expr.into()) { + Ok(stripped) => stripped, + Err(e) => { + log::error!("{}", e); + break; + } + }; + let mut storage = self.storage.lock().await; + match storage.get(stripped_key, "").await { + Ok(stored_data) => { + for entry in stored_data { + if entry.timestamp > *ts { + return None; + } } } - } - Err(e) => { - log::warn!( - "Storage '{}' raised an error fetching a query on key {} : {}", - self.name, - key_expr, - e - ); - ts = &weight.unwrap().data.timestamp; - update = Some(weight.unwrap().clone()); + Err(e) => { + log::warn!( + "Storage '{}' raised an error fetching a query on key {} : {}", + self.name, + key_expr, + e + ); + ts = weight.timestamp(); + update = Some(weight.clone()); + } } } } @@ -517,12 +589,7 @@ impl StorageService { match storage.get(stripped_key, q.parameters()).await { Ok(stored_data) => { for entry in stored_data { - let Value { - payload, encoding, .. - } = entry.value; - let sample = Sample::new(key.clone(), payload) - .with_encoding(encoding) - .with_timestamp(entry.timestamp); + let sample = entry.into_sample(key.clone()); // apply outgoing interceptor on results let sample = if let Some(ref interceptor) = self.out_interceptor { interceptor(sample) @@ -558,7 +625,7 @@ impl StorageService { let Value { payload, encoding, .. } = entry.value; - let sample = Sample::new(q.key_expr().clone(), payload) + let sample = Sample::put(q.key_expr().clone(), payload) .with_encoding(encoding) .with_timestamp(entry.timestamp); // apply outgoing interceptor on results @@ -687,35 +754,6 @@ impl StorageService { } } -fn serialize_update(update: &Update) -> String { - let result = ( - update.kind.to_string(), - update.data.timestamp.to_string(), - update.data.value.encoding.to_string(), - update.data.value.payload.slices().collect::>(), - ); - serde_json::to_string_pretty(&result).unwrap() -} - -fn construct_update(data: String) -> Update { - let result: (String, String, String, Vec<&[u8]>) = serde_json::from_str(&data).unwrap(); // @TODO: remove the unwrap() - let mut payload = ZBuf::default(); - for slice in result.3 { - payload.push_zslice(slice.to_vec().into()); - } - let value = Value::new(payload).with_encoding(result.2); - let data = StoredData { - value, - timestamp: Timestamp::from_str(&result.1).unwrap(), // @TODO: remove the unwrap() - }; - let kind = if result.0.eq(&(SampleKind::Put).to_string()) { - SampleKind::Put - } else { - SampleKind::Delete - }; - Update { kind, data } -} - // Periodic event cleaning-up data info for old metadata struct GarbageCollectionEvent { config: GarbageCollectionConfig, @@ -747,7 +785,7 @@ impl Timed for GarbageCollectionEvent { let mut to_be_removed = HashSet::new(); for (k, update) in wildcard_updates.key_value_pairs() { - let ts = update.data.timestamp; + let ts = update.timestamp(); if ts.get_time() < &time_limit { // mark key to be removed to_be_removed.insert(k); diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index e294fd2c0c..757d65afd8 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -378,9 +378,9 @@ pub struct Sample { } impl Sample { - /// Creates a new Sample. + /// Creates a "put" Sample. #[inline] - pub fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self + pub fn put(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self where IntoKeyExpr: Into>, IntoPayload: Into, @@ -389,7 +389,7 @@ impl Sample { key_expr: key_expr.into(), payload: payload.into(), encoding: Encoding::default(), - kind: SampleKind::default(), + kind: SampleKind::Put, timestamp: None, qos: QoS::default(), #[cfg(feature = "unstable")] @@ -398,29 +398,55 @@ impl Sample { attachment: None, } } - /// Creates a new Sample. + + /// Creates a "delete" Sample. #[inline] - pub fn try_from( - key_expr: TryIntoKeyExpr, - payload: IntoPayload, - ) -> Result + pub fn delete(key_expr: IntoKeyExpr) -> Self where - TryIntoKeyExpr: TryInto>, - >>::Error: Into, - IntoPayload: Into, + IntoKeyExpr: Into>, { - Ok(Sample { - key_expr: key_expr.try_into().map_err(Into::into)?, - payload: payload.into(), + Sample { + key_expr: key_expr.into(), + payload: Payload::empty(), encoding: Encoding::default(), - kind: SampleKind::default(), + kind: SampleKind::Delete, timestamp: None, qos: QoS::default(), #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), #[cfg(feature = "unstable")] attachment: None, - }) + } + } + + /// Attempts to create a "put" Sample + #[inline] + pub fn try_put( + key_expr: TryIntoKeyExpr, + payload: TryIntoPayload, + ) -> Result + where + TryIntoKeyExpr: TryInto>, + >>::Error: Into, + TryIntoPayload: TryInto, + >::Error: Into, + { + let key_expr: KeyExpr<'static> = key_expr.try_into().map_err(Into::into)?; + let payload: Payload = payload.try_into().map_err(Into::into)?; + Ok(Self::put(key_expr, payload)) + } + + /// Attempts to create a "delete" Sample + #[inline] + pub fn try_delete( + key_expr: TryIntoKeyExpr, + ) -> Result + where + TryIntoKeyExpr: TryInto>, + >>::Error: Into, + { + let key_expr: KeyExpr<'static> = key_expr.try_into().map_err(Into::into)?; + Ok(Self::delete(key_expr)) } /// Creates a new Sample with optional data info. @@ -444,9 +470,10 @@ impl Sample { self } - /// Sets the encoding of this Sample. + /// Sets the encoding of this Sample #[inline] pub fn with_encoding(mut self, encoding: Encoding) -> Self { + assert!(self.kind == SampleKind::Put, "Cannot set encoding on a delete sample"); self.encoding = encoding; self } @@ -469,15 +496,6 @@ impl Sample { self.kind } - /// Sets the kind of this Sample. - #[inline] - #[doc(hidden)] - #[zenoh_macros::unstable] - pub fn with_kind(mut self, kind: SampleKind) -> Self { - self.kind = kind; - self - } - /// Gets the encoding of this sample #[inline] pub fn encoding(&self) -> &Encoding { diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 5e706a0da8..93d1e2fb9d 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -1538,7 +1538,7 @@ impl Session { let zenoh_collections::single_or_vec::IntoIter { drain, last } = callbacks.into_iter(); for (cb, key_expr) in drain { #[allow(unused_mut)] - let mut sample = Sample::new(key_expr, payload.clone()).with_info(info.clone()); + let mut sample = Sample::put(key_expr, payload.clone()).with_info(info.clone()); #[cfg(feature = "unstable")] { sample.attachment = attachment.clone(); @@ -1547,7 +1547,7 @@ impl Session { } if let Some((cb, key_expr)) = last { #[allow(unused_mut)] - let mut sample = Sample::new(key_expr, payload).with_info(info); + let mut sample = Sample::put(key_expr, payload).with_info(info); #[cfg(feature = "unstable")] { sample.attachment = attachment; @@ -2257,7 +2257,7 @@ impl Primitives for Session { #[allow(unused_mut)] let mut sample = - Sample::new(key_expr.into_owned(), payload).with_info(Some(info)); + Sample::put(key_expr.into_owned(), payload).with_info(Some(info)); #[cfg(feature = "unstable")] { sample.attachment = attachment; From 1038beb92c438d477215813eaed9c173d9785f94 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Tue, 19 Mar 2024 16:14:06 +0100 Subject: [PATCH 025/124] interceptors removed --- .../src/replica/storage.rs | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 9e4ae7ad0e..ed7c6a1d9c 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -153,8 +153,6 @@ pub struct StorageService { capability: Capability, tombstones: Arc>>, wildcard_updates: Arc>>, - in_interceptor: Option Sample + Send + Sync>>, - out_interceptor: Option Sample + Send + Sync>>, replication: Option, } @@ -178,8 +176,6 @@ impl StorageService { capability: store_intercept.capability, tombstones: Arc::new(RwLock::new(KeBoxTree::default())), wildcard_updates: Arc::new(RwLock::new(KeBoxTree::default())), - in_interceptor: store_intercept.in_interceptor, - out_interceptor: store_intercept.out_interceptor, replication, }; if storage_service @@ -356,12 +352,6 @@ impl StorageService { // the trimming during PUT and GET should be handled by the plugin async fn process_sample(&self, sample: Sample) { log::trace!("[STORAGE] Processing sample: {:?}", sample); - // Call incoming data interceptor (if any) - let sample = if let Some(ref interceptor) = self.in_interceptor { - interceptor(sample) - } else { - sample - }; let sample_timestamp = *sample.timestamp().unwrap_or(&new_reception_timestamp()); // if wildcard, update wildcard_updates @@ -590,12 +580,6 @@ impl StorageService { Ok(stored_data) => { for entry in stored_data { let sample = entry.into_sample(key.clone()); - // apply outgoing interceptor on results - let sample = if let Some(ref interceptor) = self.out_interceptor { - interceptor(sample) - } else { - sample - }; if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", @@ -628,12 +612,6 @@ impl StorageService { let sample = Sample::put(q.key_expr().clone(), payload) .with_encoding(encoding) .with_timestamp(entry.timestamp); - // apply outgoing interceptor on results - let sample = if let Some(ref interceptor) = self.out_interceptor { - interceptor(sample) - } else { - sample - }; if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", From 09a84b34ad9be1fa39b4f0dd268d722d6a211f72 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Tue, 19 Mar 2024 16:29:14 +0100 Subject: [PATCH 026/124] interceptors removed --- plugins/zenoh-backend-example/src/lib.rs | 13 ++---------- plugins/zenoh-backend-traits/src/lib.rs | 8 -------- .../src/backends_mgt.rs | 7 ------- .../zenoh-plugin-storage-manager/src/lib.rs | 4 ---- .../src/memory_backend/mod.rs | 20 ------------------- 5 files changed, 2 insertions(+), 50 deletions(-) diff --git a/plugins/zenoh-backend-example/src/lib.rs b/plugins/zenoh-backend-example/src/lib.rs index 602d29f375..f81231a498 100644 --- a/plugins/zenoh-backend-example/src/lib.rs +++ b/plugins/zenoh-backend-example/src/lib.rs @@ -13,11 +13,8 @@ // use async_std::sync::RwLock; use async_trait::async_trait; -use std::{ - collections::{hash_map::Entry, HashMap}, - sync::Arc, -}; -use zenoh::{prelude::OwnedKeyExpr, sample::Sample, time::Timestamp, value::Value}; +use std::collections::{hash_map::Entry, HashMap}; +use zenoh::{prelude::OwnedKeyExpr, time::Timestamp, value::Value}; use zenoh_backend_traits::{ config::{StorageConfig, VolumeConfig}, Capability, History, Persistence, Storage, StorageInsertionResult, StoredData, Volume, @@ -71,12 +68,6 @@ impl Volume for ExampleBackend { async fn create_storage(&self, _props: StorageConfig) -> ZResult> { Ok(Box::::default()) } - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { - None - } - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { - None - } } #[async_trait] diff --git a/plugins/zenoh-backend-traits/src/lib.rs b/plugins/zenoh-backend-traits/src/lib.rs index d17e6dfd77..91e030d361 100644 --- a/plugins/zenoh-backend-traits/src/lib.rs +++ b/plugins/zenoh-backend-traits/src/lib.rs @@ -210,14 +210,6 @@ pub trait Volume: Send + Sync { /// Creates a storage configured with some properties. async fn create_storage(&self, props: StorageConfig) -> ZResult>; - - /// Returns an interceptor that will be called before pushing any data - /// into a storage created by this backend. `None` can be returned for no interception point. - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>>; - - /// Returns an interceptor that will be called before sending any reply - /// to a query from a storage created by this backend. `None` can be returned for no interception point. - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>>; } pub type VolumeInstance = Box; diff --git a/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs b/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs index aa7260e868..90a6ae6250 100644 --- a/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs +++ b/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs @@ -14,7 +14,6 @@ use super::storages_mgt::*; use flume::Sender; use std::sync::Arc; -use zenoh::prelude::r#async::*; use zenoh::Session; use zenoh_backend_traits::config::StorageConfig; use zenoh_backend_traits::{Capability, VolumeInstance}; @@ -23,16 +22,12 @@ use zenoh_result::ZResult; pub struct StoreIntercept { pub storage: Box, pub capability: Capability, - pub in_interceptor: Option Sample + Send + Sync>>, - pub out_interceptor: Option Sample + Send + Sync>>, } pub(crate) async fn create_and_start_storage( admin_key: String, config: StorageConfig, backend: &VolumeInstance, - in_interceptor: Option Sample + Send + Sync>>, - out_interceptor: Option Sample + Send + Sync>>, zenoh: Arc, ) -> ZResult> { log::trace!("Create storage '{}'", &admin_key); @@ -41,8 +36,6 @@ pub(crate) async fn create_and_start_storage( let store_intercept = StoreIntercept { storage, capability, - in_interceptor, - out_interceptor, }; start_storage(store_intercept, config, admin_key, zenoh).await diff --git a/plugins/zenoh-plugin-storage-manager/src/lib.rs b/plugins/zenoh-plugin-storage-manager/src/lib.rs index 0db30bbd6a..91df2f108d 100644 --- a/plugins/zenoh-plugin-storage-manager/src/lib.rs +++ b/plugins/zenoh-plugin-storage-manager/src/lib.rs @@ -239,14 +239,10 @@ impl StorageRuntimeInner { volume_id, backend.name() ); - let in_interceptor = backend.instance().incoming_data_interceptor(); - let out_interceptor = backend.instance().outgoing_data_interceptor(); let stopper = async_std::task::block_on(create_and_start_storage( admin_key, storage.clone(), backend.instance(), - in_interceptor, - out_interceptor, self.session.clone(), ))?; self.storages diff --git a/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs b/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs index ebb4922c9d..4e333b8592 100644 --- a/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs +++ b/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs @@ -61,26 +61,6 @@ impl Volume for MemoryBackend { log::debug!("Create Memory Storage with configuration: {:?}", properties); Ok(Box::new(MemoryStorage::new(properties).await?)) } - - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { - // By default: no interception point - None - // To test interceptors, uncomment this line: - // Some(Arc::new(|sample| { - // trace!(">>>> IN INTERCEPTOR FOR {:?}", sample); - // sample - // })) - } - - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { - // By default: no interception point - None - // To test interceptors, uncomment this line: - // Some(Arc::new(|sample| { - // trace!("<<<< OUT INTERCEPTOR FOR {:?}", sample); - // sample - // })) - } } impl Drop for MemoryBackend { From 886c37c1922b7882fa3c670f5ad71b1662857729 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Tue, 19 Mar 2024 17:13:02 +0100 Subject: [PATCH 027/124] storage sample added --- .../src/replica/storage.rs | 144 +++++++++++------- 1 file changed, 91 insertions(+), 53 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index ed7c6a1d9c..41a456e344 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -27,7 +27,7 @@ use zenoh::query::ConsolidationMode; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; -use zenoh_backend_traits::{Capability, History, Persistence, StorageInsertionResult, StoredData}; +use zenoh_backend_traits::{Capability, History, Persistence, Storage, StorageInsertionResult, StoredData}; use zenoh_keyexpr::key_expr::OwnedKeyExpr; use zenoh_keyexpr::keyexpr_tree::impls::KeyedSetProvider; use zenoh_keyexpr::keyexpr_tree::{support::NonWild, support::UnknownWildness, KeBoxTree}; @@ -38,22 +38,52 @@ use zenoh_util::{zenoh_home, Timed, TimedEvent, Timer}; pub const WILDCARD_UPDATES_FILENAME: &str = "wildcard_updates"; pub const TOMBSTONE_FILENAME: &str = "tombstones"; +#[derive(Clone, Debug)] +enum StorageSampleKind { + Put(Value), + Delete, +} + +#[derive(Clone, Debug)] +struct StorageSample { + pub key_expr: KeyExpr<'static>, + pub timestamp: Timestamp, + pub kind: StorageSampleKind, +} + +impl From for StorageSample { + fn from(sample: Sample) -> Self { + let timestamp = *sample.timestamp().unwrap_or(&new_reception_timestamp()); + // TODO: add API for disassembly of Sample + let key_expr = sample.key_expr().clone(); + let payload = sample.payload().clone(); + let encoding = sample.encoding().clone(); + let kind = match sample.kind() { + SampleKind::Put => StorageSampleKind::Put(Value::new(payload).with_encoding(encoding)), + SampleKind::Delete => StorageSampleKind::Delete, + }; + StorageSample { + key_expr, + timestamp, + kind, + } + } +} + #[derive(Clone)] enum Update { Put(StoredData), Delete(Timestamp), } -impl From for Update { - fn from(sample: Sample) -> Self { - let mut sample = sample; - let timestamp = *sample.ensure_timestamp(); - match sample.kind() { - SampleKind::Put => Update::Put(StoredData { - value: Value::from(sample), - timestamp, +impl From for Update { + fn from(value: StorageSample) -> Self { + match value.kind { + StorageSampleKind::Put(data) => Update::Put(StoredData { + value: data, + timestamp: value.timestamp, }), - SampleKind::Delete => Update::Delete(timestamp), + StorageSampleKind::Delete => Update::Delete(value.timestamp), } } } @@ -78,8 +108,8 @@ impl TryFrom for Update { payload.push_zslice(slice.to_vec().into()); } let value = Value::new(payload).with_encoding(result.2); - let timestamp = Timestamp::from_str(&result.1).map_err(|_|"Error parsing timestamp")?; - if result .0.eq(&(SampleKind::Put).to_string()) { + let timestamp = Timestamp::from_str(&result.1).map_err(|_| "Error parsing timestamp")?; + if result.0.eq(&(SampleKind::Put).to_string()) { Ok(Update::Put(StoredData { value, timestamp })) } else { Ok(Update::Delete(timestamp)) @@ -90,7 +120,7 @@ impl TryFrom for Update { // implement to_string for Update impl ToString for Update { fn to_string(&self) -> String { - let result = match self { + let result = match self { Update::Put(data) => ( SampleKind::Put.to_string(), data.timestamp.to_string(), @@ -108,31 +138,41 @@ impl ToString for Update { } } -trait IntoSample { - fn into_sample(self, key_expr: IntoKeyExpr) -> Sample +trait IntoStorageSample { + fn into_sample(self, key_expr: IntoKeyExpr) -> StorageSample where IntoKeyExpr: Into>; } -impl IntoSample for StoredData { - fn into_sample(self, key_expr: IntoKeyExpr) -> Sample +impl IntoStorageSample for StoredData { + fn into_sample(self, key_expr: IntoKeyExpr) -> StorageSample where IntoKeyExpr: Into>, { - Sample::put(key_expr, self.value.payload) - .with_encoding(self.value.encoding) - .with_timestamp(self.timestamp) + StorageSample { + key_expr: key_expr.into(), + timestamp: self.timestamp, + kind: StorageSampleKind::Put(self.value), + } } } -impl IntoSample for Update { - fn into_sample(self, key_expr: IntoKeyExpr) -> Sample +impl IntoStorageSample for Update { + fn into_sample(self, key_expr: IntoKeyExpr) -> StorageSample where IntoKeyExpr: Into>, { match self { - Update::Put(data) => data.into_sample(key_expr), - Update::Delete(ts) => Sample::delete(key_expr).with_timestamp(ts), + Update::Put(data) => StorageSample { + key_expr: key_expr.into(), + timestamp: data.timestamp, + kind: StorageSampleKind::Put(data.value), + }, + Update::Delete(ts) => StorageSample { + key_expr: key_expr.into(), + timestamp: ts, + kind: StorageSampleKind::Delete, + }, } } } @@ -201,7 +241,8 @@ impl StorageService { serde_json::from_str(&saved_wc).unwrap(); // TODO: Remove unwrap let mut wildcard_updates = storage_service.wildcard_updates.write().await; for (k, data) in saved_wc { - wildcard_updates.insert(&k, Update::try_from(data).unwrap()); // TODO: Remove unwrap + wildcard_updates.insert(&k, Update::try_from(data).unwrap()); + // TODO: Remove unwrap } } } @@ -272,7 +313,7 @@ impl StorageService { log::error!("Sample {:?} is not timestamped. Please timestamp samples meant for replicated storage.", sample); } else { - self.process_sample(sample).await; + self.process_sample(sample.into()).await; } }, // on query on key_expr @@ -350,33 +391,32 @@ impl StorageService { // The storage should only simply save the key, sample pair while put and retrieve the same during get // the trimming during PUT and GET should be handled by the plugin - async fn process_sample(&self, sample: Sample) { + async fn process_sample(&self, sample: StorageSample) { log::trace!("[STORAGE] Processing sample: {:?}", sample); - let sample_timestamp = *sample.timestamp().unwrap_or(&new_reception_timestamp()); // if wildcard, update wildcard_updates - if sample.key_expr().is_wild() { + if sample.key_expr.is_wild() { self.register_wildcard_update(sample.clone()).await; } - let matching_keys = if sample.key_expr().is_wild() { - self.get_matching_keys(sample.key_expr()).await + let matching_keys = if sample.key_expr.is_wild() { + self.get_matching_keys(&sample.key_expr).await } else { - vec![sample.key_expr().clone().into()] + vec![sample.key_expr.clone().into()] }; log::trace!( "The list of keys matching `{}` is : {:?}", - sample.key_expr(), + sample.key_expr, matching_keys ); for k in matching_keys { if !self - .is_deleted(&k.clone(), sample.timestamp().unwrap()) + .is_deleted(&k.clone(), &sample.timestamp) .await && (self.capability.history.eq(&History::All) || (self.capability.history.eq(&History::Latest) - && self.is_latest(&k, sample.timestamp().unwrap()).await)) + && self.is_latest(&k, &sample.timestamp).await)) { log::trace!( "Sample `{:?}` identified as neded processing for key {}", @@ -386,14 +426,13 @@ impl StorageService { // there might be the case that the actual update was outdated due to a wild card update, but not stored yet in the storage. // get the relevant wild card entry and use that value and timestamp to update the storage let sample_to_store = - match self.ovderriding_wild_update(&k, &sample_timestamp).await { + match self.ovderriding_wild_update(&k, &sample.timestamp).await { Some(overriding_update) => overriding_update.into_sample(k.clone()), - None => sample.clone(), + None => sample.into(), }; - let timestamp = sample_to_store.timestamp().unwrap_or(&sample_timestamp); - let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { + let stripped_key = match self.strip_prefix(&sample_to_store.key_expr) { Ok(stripped) => stripped, Err(e) => { log::error!("{}", e); @@ -401,22 +440,21 @@ impl StorageService { } }; let mut storage = self.storage.lock().await; - let result = match sample.kind() { - SampleKind::Put => { + let result = match sample_to_store.kind { + StorageSampleKind::Put(data) => { storage .put( stripped_key, - Value::new(sample_to_store.payload().clone()) - .with_encoding(sample_to_store.encoding().clone()), - *sample_to_store.timestamp().unwrap(), + data, + sample_to_store.timestamp, ) .await - } - SampleKind::Delete => { + }, + StorageSampleKind::Delete => { // register a tombstone - self.mark_tombstone(&k, *timestamp).await; - storage.delete(stripped_key, *timestamp).await - } + self.mark_tombstone(&k, sample_to_store.timestamp).await; + storage.delete(stripped_key, sample_to_store.timestamp).await + }, }; drop(storage); if self.replication.is_some() @@ -428,7 +466,7 @@ impl StorageService { .as_ref() .unwrap() .log_propagation - .send((k.clone(), *sample_to_store.timestamp().unwrap())); + .send((k.clone(), sample_to_store.timestamp)); match sending { Ok(_) => (), Err(e) => { @@ -459,9 +497,9 @@ impl StorageService { } } - async fn register_wildcard_update(&self, sample: Sample) { + async fn register_wildcard_update(&self, sample: StorageSample) { // @TODO: change into a better store that does incremental writes - let key = sample.key_expr().clone(); + let key = sample.key_expr.clone(); let mut wildcards = self.wildcard_updates.write().await; wildcards.insert(&key, sample.into()); if self.capability.persistence.eq(&Persistence::Durable) { @@ -719,7 +757,7 @@ impl StorageService { while let Ok(reply) = replies.recv_async().await { match reply.sample { Ok(sample) => { - self.process_sample(sample).await; + self.process_sample(sample.into()).await; } Err(e) => log::warn!( "Storage '{}' received an error to align query: {:?}", From 780c82a3cae1115e624141c929639648e6902e16 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Tue, 19 Mar 2024 17:35:38 +0100 Subject: [PATCH 028/124] some compile error fixes --- .../src/replica/aligner.rs | 18 ++++++++++-------- .../src/replica/storage.rs | 10 +++++----- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 0df648409d..3f672382f1 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -12,6 +12,9 @@ // ZettaScale Zenoh Team, // +use crate::replica::storage::StorageSampleKind; + +use super::storage::StorageSample; use super::{Digest, EraType, LogEntry, Snapshotter}; use super::{CONTENTS, ERA, INTERVALS, SUBINTERVALS}; use async_std::sync::{Arc, RwLock}; @@ -29,7 +32,7 @@ pub struct Aligner { digest_key: OwnedKeyExpr, snapshotter: Arc, rx_digest: Receiver<(String, Digest)>, - tx_sample: Sender, + tx_sample: Sender, digests_processed: RwLock>, } @@ -38,7 +41,7 @@ impl Aligner { session: Arc, digest_key: OwnedKeyExpr, rx_digest: Receiver<(String, Digest)>, - tx_sample: Sender, + tx_sample: Sender, snapshotter: Arc, ) { let aligner = Aligner { @@ -105,12 +108,11 @@ impl Aligner { log::trace!("[ALIGNER] Received queried samples: {missing_data:?}"); for (key, (ts, value)) in missing_data { - let Value { - payload, encoding, .. - } = value; - let sample = Sample::put(key, payload) - .with_encoding(encoding) - .with_timestamp(ts); + let sample = StorageSample { + key_expr: key.into(), + timestamp: ts, + kind: StorageSampleKind::Put(value), + }; log::debug!("[ALIGNER] Adding {:?} to storage", sample); self.tx_sample.send_async(sample).await.unwrap_or_else(|e| { log::error!("[ALIGNER] Error adding sample to storage: {}", e) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 41a456e344..307ca95680 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -39,13 +39,13 @@ pub const WILDCARD_UPDATES_FILENAME: &str = "wildcard_updates"; pub const TOMBSTONE_FILENAME: &str = "tombstones"; #[derive(Clone, Debug)] -enum StorageSampleKind { +pub enum StorageSampleKind { Put(Value), Delete, } #[derive(Clone, Debug)] -struct StorageSample { +pub struct StorageSample { pub key_expr: KeyExpr<'static>, pub timestamp: Timestamp, pub kind: StorageSampleKind, @@ -179,7 +179,7 @@ impl IntoStorageSample for Update { pub struct ReplicationService { pub empty_start: bool, - pub aligner_updates: Receiver, + pub aligner_updates: Receiver, pub log_propagation: Sender<(OwnedKeyExpr, Timestamp)>, } @@ -361,7 +361,7 @@ impl StorageService { } }; sample.ensure_timestamp(); - self.process_sample(sample).await; + self.process_sample(sample.into()).await; }, // on query on key_expr query = storage_queryable.recv_async() => { @@ -429,7 +429,7 @@ impl StorageService { match self.ovderriding_wild_update(&k, &sample.timestamp).await { Some(overriding_update) => overriding_update.into_sample(k.clone()), - None => sample.into(), + None => sample.clone().into(), }; let stripped_key = match self.strip_prefix(&sample_to_store.key_expr) { From af0d167f6a1b0bcdc6d09074c4e2960f93034e90 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Tue, 19 Mar 2024 17:54:31 +0100 Subject: [PATCH 029/124] removed interceptor proxy --- plugins/zenoh-backend-traits/src/lib.rs | 50 +------------------------ 1 file changed, 1 insertion(+), 49 deletions(-) diff --git a/plugins/zenoh-backend-traits/src/lib.rs b/plugins/zenoh-backend-traits/src/lib.rs index 91e030d361..16c00f64af 100644 --- a/plugins/zenoh-backend-traits/src/lib.rs +++ b/plugins/zenoh-backend-traits/src/lib.rs @@ -135,9 +135,7 @@ use async_trait::async_trait; use const_format::concatcp; -use std::sync::Arc; -use zenoh::prelude::{KeyExpr, OwnedKeyExpr, Sample, Selector}; -use zenoh::queryable::ReplyBuilder; +use zenoh::prelude::OwnedKeyExpr; use zenoh::time::Timestamp; use zenoh::value::Value; pub use zenoh::Result as ZResult; @@ -274,49 +272,3 @@ pub trait Storage: Send + Sync { /// Remember to fetch the entry corresponding to the `None` key async fn get_all_entries(&self) -> ZResult, Timestamp)>>; } - -/// A wrapper around the [`zenoh::queryable::Query`] allowing to call the -/// OutgoingDataInterceptor (if any) before to send the reply -pub struct Query { - q: zenoh::queryable::Query, - interceptor: Option Sample + Send + Sync>>, -} - -impl Query { - pub fn new( - q: zenoh::queryable::Query, - interceptor: Option Sample + Send + Sync>>, - ) -> Query { - Query { q, interceptor } - } - - /// The full [`Selector`] of this Query. - #[inline(always)] - pub fn selector(&self) -> Selector<'_> { - self.q.selector() - } - - /// The key selector part of this Query. - #[inline(always)] - pub fn key_expr(&self) -> &KeyExpr<'static> { - self.q.key_expr() - } - - /// This Query's selector parameters. - #[inline(always)] - pub fn parameters(&self) -> &str { - self.q.parameters() - } - - /// Sends a Sample as a reply to this Query - pub fn reply(&self, sample: Sample) -> ReplyBuilder<'_> { - // Call outgoing intercerceptor - let sample = if let Some(ref interceptor) = self.interceptor { - interceptor(sample) - } else { - sample - }; - // Send reply - self.q.reply_sample(sample) - } -} From 067823d3aa514735e60b684b0807fedfcfeb8069 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 20 Mar 2024 18:01:54 +0100 Subject: [PATCH 030/124] sample builders --- .../src/replica/storage.rs | 33 +- zenoh-ext/src/querying_subscriber.rs | 9 +- zenoh/src/sample.rs | 434 ++++++++++++------ zenoh/src/session.rs | 39 +- 4 files changed, 350 insertions(+), 165 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 307ca95680..5aa6b92a99 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -24,10 +24,13 @@ use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::ZBuf; use zenoh::prelude::r#async::*; use zenoh::query::ConsolidationMode; +use zenoh::sample::SampleBuilder; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; -use zenoh_backend_traits::{Capability, History, Persistence, Storage, StorageInsertionResult, StoredData}; +use zenoh_backend_traits::{ + Capability, History, Persistence, Storage, StorageInsertionResult, StoredData, +}; use zenoh_keyexpr::key_expr::OwnedKeyExpr; use zenoh_keyexpr::keyexpr_tree::impls::KeyedSetProvider; use zenoh_keyexpr::keyexpr_tree::{support::NonWild, support::UnknownWildness, KeBoxTree}; @@ -353,14 +356,20 @@ impl StorageService { select!( // on sample for key_expr sample = storage_sub.recv_async() => { - let mut sample = match sample { + let sample = match sample { Ok(sample) => sample, Err(e) => { log::error!("Error in sample: {}", e); continue; } }; - sample.ensure_timestamp(); + let sample = if sample.timestamp().is_none() { + SampleBuilder::new(sample).with_current_timestamp().res_sync() + + + } else { + sample + }; self.process_sample(sample.into()).await; }, // on query on key_expr @@ -411,9 +420,7 @@ impl StorageService { ); for k in matching_keys { - if !self - .is_deleted(&k.clone(), &sample.timestamp) - .await + if !self.is_deleted(&k.clone(), &sample.timestamp).await && (self.capability.history.eq(&History::All) || (self.capability.history.eq(&History::Latest) && self.is_latest(&k, &sample.timestamp).await)) @@ -443,18 +450,16 @@ impl StorageService { let result = match sample_to_store.kind { StorageSampleKind::Put(data) => { storage - .put( - stripped_key, - data, - sample_to_store.timestamp, - ) + .put(stripped_key, data, sample_to_store.timestamp) .await - }, + } StorageSampleKind::Delete => { // register a tombstone self.mark_tombstone(&k, sample_to_store.timestamp).await; - storage.delete(stripped_key, sample_to_store.timestamp).await - }, + storage + .delete(stripped_key, sample_to_store.timestamp) + .await + } }; drop(storage); if self.replication.is_some() diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 8cb5480e58..7ca2730f57 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -20,6 +20,7 @@ use std::time::Duration; use zenoh::handlers::{locked, DefaultHandler}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, QueryTarget, ReplyKeyExpr}; +use zenoh::sample::SampleBuilder; use zenoh::subscriber::{Reliability, Subscriber}; use zenoh::time::Timestamp; use zenoh::Result as ZResult; @@ -655,7 +656,7 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { let sub_callback = { let state = state.clone(); let callback = callback.clone(); - move |mut s| { + move |s| { let state = &mut zlock!(state); if state.pending_fetches == 0 { callback(s); @@ -663,7 +664,11 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { log::trace!("Sample received while fetch in progress: push it to merge_queue"); // ensure the sample has a timestamp, thus it will always be sorted into the MergeQueue // after any timestamped Sample possibly coming from a fetch reply. - s.ensure_timestamp(); + let s = if s.timestamp().is_none() { + SampleBuilder::new(s).with_current_timestamp().res_sync() + } else { + s + }; state.merge_queue.push(s); } } diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 757d65afd8..395191a0d6 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -20,10 +20,8 @@ use crate::time::{new_reception_timestamp, Timestamp}; use crate::Priority; #[zenoh_macros::unstable] use serde::Serialize; -use std::{ - convert::{TryFrom, TryInto}, - fmt, -}; +use std::{convert::TryFrom, fmt}; +use zenoh_core::{zresult, AsyncResolve, Resolvable, SyncResolve}; use zenoh_protocol::core::EntityGlobalId; use zenoh_protocol::{core::CongestionControl, network::push::ext::QoSType}; @@ -57,6 +55,83 @@ pub(crate) struct DataInfo { pub qos: QoS, } +pub(crate) trait DataInfoIntoSample { + fn into_sample( + self, + key_expr: IntoKeyExpr, + payload: IntoPayload, + #[cfg(feature = "unstable")] attachment: Option, + ) -> Sample + where + IntoKeyExpr: Into>, + IntoPayload: Into; +} + +impl DataInfoIntoSample for DataInfo { + // TODO: this is internal function. + // Technically it may create invalid sample (e.g. a delete sample with a payload and encoding) + // The test for it is intentionally not added to avoid inserting extra "if" into hot path. + // This need to be additionally investigated and measured. + #[inline] + fn into_sample( + self, + key_expr: IntoKeyExpr, + payload: IntoPayload, + #[cfg(feature = "unstable")] attachment: Option, + ) -> Sample + where + IntoKeyExpr: Into>, + IntoPayload: Into, + { + Sample { + key_expr: key_expr.into(), + payload: payload.into(), + kind: self.kind, + encoding: self.encoding.unwrap_or_default(), + timestamp: self.timestamp, + qos: self.qos, + #[cfg(feature = "unstable")] + source_info: SourceInfo { + source_id: self.source_id, + source_sn: self.source_sn, + }, + #[cfg(feature = "unstable")] + attachment, + } + } +} + +impl DataInfoIntoSample for Option { + #[inline] + fn into_sample( + self, + key_expr: IntoKeyExpr, + payload: IntoPayload, + #[cfg(feature = "unstable")] attachment: Option, + ) -> Sample + where + IntoKeyExpr: Into>, + IntoPayload: Into, + { + if let Some(data_info) = self { + data_info.into_sample(key_expr, payload, attachment) + } else { + Sample { + key_expr: key_expr.into(), + payload: payload.into(), + kind: SampleKind::Put, + encoding: Encoding::default(), + timestamp: None, + qos: QoS::default(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment, + } + } + } +} + /// Informations on the source of a zenoh [`Sample`]. #[zenoh_macros::unstable] #[derive(Debug, Clone)] @@ -359,125 +434,275 @@ impl TryFrom for SampleKind { #[zenoh_macros::unstable] pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; -/// A zenoh sample. -#[non_exhaustive] -#[derive(Clone, Debug)] -pub struct Sample { - pub(crate) key_expr: KeyExpr<'static>, - pub(crate) payload: Payload, - pub(crate) kind: SampleKind, - pub(crate) encoding: Encoding, - pub(crate) timestamp: Option, - pub(crate) qos: QoS, +pub struct SampleBuilder(Sample); - #[cfg(feature = "unstable")] - pub(crate) source_info: SourceInfo, +impl SampleBuilder { + pub fn new(sample: Sample) -> Self { + Self(sample) + } - #[cfg(feature = "unstable")] - pub(crate) attachment: Option, + pub fn with_keyexpr(mut self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + self.0.key_expr = key_expr.into(); + self + } + + // pub(crate) fn with_kind(mut self, kind: SampleKind) -> Self { + // self.0.kind = kind; + // self + // } + + pub(crate) fn with_encoding(mut self, encoding: Encoding) -> Self { + self.0.encoding = encoding; + self + } + + pub(crate) fn with_payload(mut self, payload: IntoPayload) -> Self + where + IntoPayload: Into, + { + self.0.payload = payload.into(); + self + } + + pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { + self.0.timestamp = Some(timestamp); + self + } + + pub fn with_current_timestamp(mut self) -> Self { + self.0.timestamp = Some(new_reception_timestamp()); + self + } + + pub fn with_qos(mut self, qos: QoS) -> Self { + self.0.qos = qos; + self + } + + pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { + self.0.source_info = source_info; + self + } + + pub fn with_attachment(mut self, attachment: Attachment) -> Self { + self.0.attachment = Some(attachment); + self + } } -impl Sample { - /// Creates a "put" Sample. - #[inline] - pub fn put(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self +pub struct PutSampleBuilder(SampleBuilder); + +impl PutSampleBuilder { + pub fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self where IntoKeyExpr: Into>, IntoPayload: Into, { - Sample { + Self(SampleBuilder::new(Sample { key_expr: key_expr.into(), payload: payload.into(), - encoding: Encoding::default(), kind: SampleKind::Put, + encoding: Encoding::default(), timestamp: None, qos: QoS::default(), #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), #[cfg(feature = "unstable")] attachment: None, - } + })) } - /// Creates a "delete" Sample. - #[inline] - pub fn delete(key_expr: IntoKeyExpr) -> Self + pub fn with_payload(mut self, payload: IntoPayload) -> Self + where + IntoPayload: Into, + { + self.0 = self.0.with_payload(payload); + self + } + + pub fn with_encoding(mut self, encoding: Encoding) -> Self { + self.0 = self.0.with_encoding(encoding); + self + } + + pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { + self.0 = self.0.with_timestamp(timestamp); + self + } + + pub fn with_current_timestamp(mut self) -> Self { + self.0 = self.0.with_current_timestamp(); + self + } + + pub fn with_qos(mut self, qos: QoS) -> Self { + self.0 = self.0.with_qos(qos); + self + } + + #[zenoh_macros::unstable] + pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { + self.0 = self.0.with_source_info(source_info); + self + } + + #[zenoh_macros::unstable] + pub fn with_attachment(mut self, attachment: Attachment) -> Self { + self.0 = self.0.with_attachment(attachment); + self + } +} + +pub struct DeleteSampleBuilder(SampleBuilder); + +impl DeleteSampleBuilder { + pub fn new(key_expr: IntoKeyExpr) -> Self where IntoKeyExpr: Into>, { - Sample { + Self(SampleBuilder::new(Sample { key_expr: key_expr.into(), payload: Payload::empty(), - encoding: Encoding::default(), kind: SampleKind::Delete, + encoding: Encoding::default(), timestamp: None, qos: QoS::default(), #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), #[cfg(feature = "unstable")] attachment: None, - } + })) } - - /// Attempts to create a "put" Sample - #[inline] - pub fn try_put( - key_expr: TryIntoKeyExpr, - payload: TryIntoPayload, - ) -> Result + pub fn with_keyexpr(mut self, key_expr: IntoKeyExpr) -> Self where - TryIntoKeyExpr: TryInto>, - >>::Error: Into, - TryIntoPayload: TryInto, - >::Error: Into, + IntoKeyExpr: Into>, { - let key_expr: KeyExpr<'static> = key_expr.try_into().map_err(Into::into)?; - let payload: Payload = payload.try_into().map_err(Into::into)?; - Ok(Self::put(key_expr, payload)) + self.0 = self.0.with_keyexpr(key_expr); + self + } + pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { + self.0 = self.0.with_timestamp(timestamp); + self + } + pub fn with_current_timestamp(mut self) -> Self { + self.0 = self.0.with_current_timestamp(); + self } + pub fn with_qos(mut self, qos: QoS) -> Self { + self.0 = self.0.with_qos(qos); + self + } + #[zenoh_macros::unstable] + pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { + self.0 = self.0.with_source_info(source_info); + self + } + #[zenoh_macros::unstable] + pub fn with_attachment(mut self, attachment: Attachment) -> Self { + self.0 = self.0.with_attachment(attachment); + self + } +} - /// Attempts to create a "delete" Sample - #[inline] - pub fn try_delete( - key_expr: TryIntoKeyExpr, - ) -> Result - where - TryIntoKeyExpr: TryInto>, - >>::Error: Into, - { - let key_expr: KeyExpr<'static> = key_expr.try_into().map_err(Into::into)?; - Ok(Self::delete(key_expr)) +impl From for SampleBuilder { + fn from(sample: Sample) -> Self { + SampleBuilder(sample) } +} - /// Creates a new Sample with optional data info. - #[inline] - pub(crate) fn with_info(mut self, mut data_info: Option) -> Self { - if let Some(mut data_info) = data_info.take() { - self.kind = data_info.kind; - if let Some(encoding) = data_info.encoding.take() { - self.encoding = encoding; - } - self.qos = data_info.qos; - self.timestamp = data_info.timestamp; - #[cfg(feature = "unstable")] - { - self.source_info = SourceInfo { - source_id: data_info.source_id, - source_sn: data_info.source_sn, - }; - } +impl TryFrom for PutSampleBuilder { + type Error = zresult::Error; + fn try_from(sample: Sample) -> Result { + if sample.kind != SampleKind::Put { + bail!("Sample is not a put sample") } - self + Ok(Self(SampleBuilder(sample))) } +} - /// Sets the encoding of this Sample - #[inline] - pub fn with_encoding(mut self, encoding: Encoding) -> Self { - assert!(self.kind == SampleKind::Put, "Cannot set encoding on a delete sample"); - self.encoding = encoding; - self +impl TryFrom for DeleteSampleBuilder { + type Error = zresult::Error; + fn try_from(sample: Sample) -> Result { + if sample.kind != SampleKind::Delete { + bail!("Sample is not a delete sample") + } + Ok(Self(SampleBuilder(sample))) + } +} + +impl Resolvable for SampleBuilder { + type To = Sample; +} + +impl Resolvable for PutSampleBuilder { + type To = Sample; +} + +impl Resolvable for DeleteSampleBuilder { + type To = Sample; +} + +impl SyncResolve for SampleBuilder { + fn res_sync(self) -> Self::To { + self.0 } +} + +impl SyncResolve for PutSampleBuilder { + fn res_sync(self) -> Self::To { + self.0.res_sync() + } +} + +impl SyncResolve for DeleteSampleBuilder { + fn res_sync(self) -> Self::To { + self.0.res_sync() + } +} +impl AsyncResolve for SampleBuilder { + type Future = futures::future::Ready; + fn res_async(self) -> Self::Future { + futures::future::ready(self.0) + } +} + +impl AsyncResolve for PutSampleBuilder { + type Future = futures::future::Ready; + fn res_async(self) -> Self::Future { + self.0.res_async() + } +} + +impl AsyncResolve for DeleteSampleBuilder { + type Future = futures::future::Ready; + fn res_async(self) -> Self::Future { + self.0.res_async() + } +} + +/// A zenoh sample. +#[non_exhaustive] +#[derive(Clone, Debug)] +pub struct Sample { + pub(crate) key_expr: KeyExpr<'static>, + pub(crate) payload: Payload, + pub(crate) kind: SampleKind, + pub(crate) encoding: Encoding, + pub(crate) timestamp: Option, + pub(crate) qos: QoS, + + #[cfg(feature = "unstable")] + pub(crate) source_info: SourceInfo, + + #[cfg(feature = "unstable")] + pub(crate) attachment: Option, +} + +impl Sample { /// Gets the key expression on which this Sample was published. #[inline] pub fn key_expr(&self) -> &KeyExpr<'static> { @@ -508,15 +733,6 @@ impl Sample { self.timestamp.as_ref() } - /// Sets the timestamp of this Sample. - #[inline] - #[doc(hidden)] - #[zenoh_macros::unstable] - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.timestamp = Some(timestamp); - self - } - /// Gets the quality of service settings this Sample was sent with. #[inline] pub fn qos(&self) -> &QoS { @@ -530,52 +746,12 @@ impl Sample { &self.source_info } - /// Sets the source info of this Sample. - #[zenoh_macros::unstable] - #[inline] - pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { - self.source_info = source_info; - self - } - - /// Ensure that an associated Timestamp is present in this Sample. - /// If not, a new one is created with the current system time and 0x00 as id. - /// Get the timestamp of this sample (either existing one or newly created) - #[inline] - #[doc(hidden)] - #[zenoh_macros::unstable] - pub fn ensure_timestamp(&mut self) -> &Timestamp { - if let Some(ref timestamp) = self.timestamp { - timestamp - } else { - let timestamp = new_reception_timestamp(); - self.timestamp = Some(timestamp); - self.timestamp.as_ref().unwrap() - } - } - /// Gets the sample attachment: a map of key-value pairs, where each key and value are byte-slices. #[zenoh_macros::unstable] #[inline] pub fn attachment(&self) -> Option<&Attachment> { self.attachment.as_ref() } - - /// Gets the mutable sample attachment: a map of key-value pairs, where each key and value are byte-slices. - #[inline] - #[doc(hidden)] - #[zenoh_macros::unstable] - pub fn attachment_mut(&mut self) -> &mut Option { - &mut self.attachment - } - - #[inline] - #[doc(hidden)] - #[zenoh_macros::unstable] - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.attachment = Some(attachment); - self - } } impl From for Value { diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 93d1e2fb9d..0a63d82354 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -32,6 +32,7 @@ use crate::queryable::*; #[cfg(feature = "unstable")] use crate::sample::Attachment; use crate::sample::DataInfo; +use crate::sample::DataInfoIntoSample; use crate::sample::QoS; use crate::selector::TIME_RANGE_KEY; use crate::subscriber::*; @@ -1537,21 +1538,21 @@ impl Session { drop(state); let zenoh_collections::single_or_vec::IntoIter { drain, last } = callbacks.into_iter(); for (cb, key_expr) in drain { - #[allow(unused_mut)] - let mut sample = Sample::put(key_expr, payload.clone()).with_info(info.clone()); - #[cfg(feature = "unstable")] - { - sample.attachment = attachment.clone(); - } + let sample = info.clone().into_sample( + key_expr, + payload.clone(), + #[cfg(feature = "unstable")] + attachment.clone(), + ); cb(sample); } if let Some((cb, key_expr)) = last { - #[allow(unused_mut)] - let mut sample = Sample::put(key_expr, payload).with_info(info); - #[cfg(feature = "unstable")] - { - sample.attachment = attachment; - } + let sample = info.into_sample( + key_expr, + payload, + #[cfg(feature = "unstable")] + attachment.clone(), + ); cb(sample); } } @@ -2254,14 +2255,12 @@ impl Primitives for Session { attachment: _attachment.map(Into::into), }, }; - - #[allow(unused_mut)] - let mut sample = - Sample::put(key_expr.into_owned(), payload).with_info(Some(info)); - #[cfg(feature = "unstable")] - { - sample.attachment = attachment; - } + let sample = info.into_sample( + key_expr.into_owned(), + payload, + #[cfg(feature = "unstable")] + attachment, + ); let new_reply = Reply { sample: Ok(sample), replier_id: ZenohId::rand(), // TODO From 4f1ba2f11fabc36a9c6900fee77107fd256fc14f Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 20 Mar 2024 19:32:38 +0100 Subject: [PATCH 031/124] compiles --- .../src/replica/storage.rs | 37 ++++++++++++------- zenoh/src/sample.rs | 29 +++++++++++++++ 2 files changed, 52 insertions(+), 14 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 5aa6b92a99..f90ea01754 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -28,9 +28,8 @@ use zenoh::sample::SampleBuilder; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; -use zenoh_backend_traits::{ - Capability, History, Persistence, Storage, StorageInsertionResult, StoredData, -}; +use zenoh_backend_traits::{Capability, History, Persistence, StorageInsertionResult, StoredData}; +use zenoh_core::SyncResolve; use zenoh_keyexpr::key_expr::OwnedKeyExpr; use zenoh_keyexpr::keyexpr_tree::impls::KeyedSetProvider; use zenoh_keyexpr::keyexpr_tree::{support::NonWild, support::UnknownWildness, KeBoxTree}; @@ -274,7 +273,12 @@ impl StorageService { t.add_async(gc).await; // subscribe on key_expr - let storage_sub = match self.session.declare_subscriber(&self.key_expr).res().await { + let storage_sub = match self + .session + .declare_subscriber(&self.key_expr) + .res_async() + .await + { Ok(storage_sub) => storage_sub, Err(e) => { log::error!("Error starting storage '{}': {}", self.name, e); @@ -287,7 +291,7 @@ impl StorageService { .session .declare_queryable(&self.key_expr) .complete(self.complete) - .res() + .res_async() .await { Ok(storage_queryable) => storage_queryable, @@ -365,8 +369,6 @@ impl StorageService { }; let sample = if sample.timestamp().is_none() { SampleBuilder::new(sample).with_current_timestamp().res_sync() - - } else { sample }; @@ -622,8 +624,12 @@ impl StorageService { match storage.get(stripped_key, q.parameters()).await { Ok(stored_data) => { for entry in stored_data { - let sample = entry.into_sample(key.clone()); - if let Err(e) = q.reply_sample(sample).res().await { + if let Err(e) = q + .reply(key.clone(), entry.value.payload) + .with_timestamp(entry.timestamp) + .res_async() + .await + { log::warn!( "Storage '{}' raised an error replying a query: {}", self.name, @@ -652,10 +658,13 @@ impl StorageService { let Value { payload, encoding, .. } = entry.value; - let sample = Sample::put(q.key_expr().clone(), payload) + if let Err(e) = q + .reply(q.key_expr().clone(), payload) .with_encoding(encoding) - .with_timestamp(entry.timestamp); - if let Err(e) = q.reply_sample(sample).res().await { + .with_timestamp(entry.timestamp) + .res_async() + .await + { log::warn!( "Storage '{}' raised an error replying a query: {}", self.name, @@ -668,7 +677,7 @@ impl StorageService { let err_message = format!("Storage '{}' raised an error on query: {}", self.name, e); log::warn!("{}", err_message); - if let Err(e) = q.reply_err(err_message).res().await { + if let Err(e) = q.reply_err(err_message).res_async().await { log::warn!( "Storage '{}' raised an error replying a query: {}", self.name, @@ -750,7 +759,7 @@ impl StorageService { .get(KeyExpr::from(&self.key_expr).with_parameters("_time=[..]")) .target(QueryTarget::All) .consolidation(ConsolidationMode::None) - .res() + .res_async() .await { Ok(replies) => replies, diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 395191a0d6..29d46cca3e 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -684,6 +684,19 @@ impl AsyncResolve for DeleteSampleBuilder { } } +pub struct SampleDecomposed { + pub key_expr: KeyExpr<'static>, + pub payload: Payload, + pub kind: SampleKind, + pub encoding: Encoding, + pub timestamp: Option, + pub qos: QoS, + #[cfg(feature = "unstable")] + pub source_info: SourceInfo, + #[cfg(feature = "unstable")] + pub attachment: Option, +} + /// A zenoh sample. #[non_exhaustive] #[derive(Clone, Debug)] @@ -752,6 +765,22 @@ impl Sample { pub fn attachment(&self) -> Option<&Attachment> { self.attachment.as_ref() } + + /// Decomposes the Sample into its components + pub fn decompose(self) -> SampleDecomposed { + SampleDecomposed { + key_expr: self.key_expr, + payload: self.payload, + kind: self.kind, + encoding: self.encoding, + timestamp: self.timestamp, + qos: self.qos, + #[cfg(feature = "unstable")] + source_info: self.source_info, + #[cfg(feature = "unstable")] + attachment: self.attachment, + } + } } impl From for Value { From d7cb97a3705b82364a2b48557d025aa3bff156da Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sat, 23 Mar 2024 00:41:17 +0100 Subject: [PATCH 032/124] SampleBuilderTrait --- .../src/replica/storage.rs | 4 +- zenoh-ext/src/querying_subscriber.rs | 8 +- zenoh/src/lib.rs | 1 + zenoh/src/sample.rs | 282 +--------------- zenoh/src/sample_builder.rs | 306 ++++++++++++++++++ 5 files changed, 315 insertions(+), 286 deletions(-) create mode 100644 zenoh/src/sample_builder.rs diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index f90ea01754..576f6adec2 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -24,7 +24,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::ZBuf; use zenoh::prelude::r#async::*; use zenoh::query::ConsolidationMode; -use zenoh::sample::SampleBuilder; +use zenoh::sample_builder::{SampleBuilder, SampleBuilderTrait}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; @@ -368,7 +368,7 @@ impl StorageService { } }; let sample = if sample.timestamp().is_none() { - SampleBuilder::new(sample).with_current_timestamp().res_sync() + SampleBuilder::from(sample).with_timestamp(new_reception_timestamp()).res_sync() } else { sample }; diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 7ca2730f57..eb6d6e9516 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -20,9 +20,9 @@ use std::time::Duration; use zenoh::handlers::{locked, DefaultHandler}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, QueryTarget, ReplyKeyExpr}; -use zenoh::sample::SampleBuilder; +use zenoh::sample_builder::{SampleBuilder, SampleBuilderTrait}; use zenoh::subscriber::{Reliability, Subscriber}; -use zenoh::time::Timestamp; +use zenoh::time::{new_reception_timestamp, Timestamp}; use zenoh::Result as ZResult; use zenoh::SessionRef; use zenoh_core::{zlock, AsyncResolve, Resolvable, SyncResolve}; @@ -665,7 +665,9 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { // ensure the sample has a timestamp, thus it will always be sorted into the MergeQueue // after any timestamped Sample possibly coming from a fetch reply. let s = if s.timestamp().is_none() { - SampleBuilder::new(s).with_current_timestamp().res_sync() + SampleBuilder::from(s) + .with_timestamp(new_reception_timestamp()) + .res_sync() } else { s }; diff --git a/zenoh/src/lib.rs b/zenoh/src/lib.rs index eb1ba1bcd1..8618cb9a88 100644 --- a/zenoh/src/lib.rs +++ b/zenoh/src/lib.rs @@ -146,6 +146,7 @@ pub mod publication; pub mod query; pub mod queryable; pub mod sample; +pub mod sample_builder; pub mod subscriber; pub mod value; #[cfg(feature = "shared-memory")] diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 29d46cca3e..2c98d5ead1 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -16,12 +16,11 @@ use crate::encoding::Encoding; use crate::payload::Payload; use crate::prelude::{KeyExpr, Value}; -use crate::time::{new_reception_timestamp, Timestamp}; +use crate::time::Timestamp; use crate::Priority; #[zenoh_macros::unstable] use serde::Serialize; use std::{convert::TryFrom, fmt}; -use zenoh_core::{zresult, AsyncResolve, Resolvable, SyncResolve}; use zenoh_protocol::core::EntityGlobalId; use zenoh_protocol::{core::CongestionControl, network::push::ext::QoSType}; @@ -434,269 +433,6 @@ impl TryFrom for SampleKind { #[zenoh_macros::unstable] pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; -pub struct SampleBuilder(Sample); - -impl SampleBuilder { - pub fn new(sample: Sample) -> Self { - Self(sample) - } - - pub fn with_keyexpr(mut self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - self.0.key_expr = key_expr.into(); - self - } - - // pub(crate) fn with_kind(mut self, kind: SampleKind) -> Self { - // self.0.kind = kind; - // self - // } - - pub(crate) fn with_encoding(mut self, encoding: Encoding) -> Self { - self.0.encoding = encoding; - self - } - - pub(crate) fn with_payload(mut self, payload: IntoPayload) -> Self - where - IntoPayload: Into, - { - self.0.payload = payload.into(); - self - } - - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.0.timestamp = Some(timestamp); - self - } - - pub fn with_current_timestamp(mut self) -> Self { - self.0.timestamp = Some(new_reception_timestamp()); - self - } - - pub fn with_qos(mut self, qos: QoS) -> Self { - self.0.qos = qos; - self - } - - pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { - self.0.source_info = source_info; - self - } - - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.0.attachment = Some(attachment); - self - } -} - -pub struct PutSampleBuilder(SampleBuilder); - -impl PutSampleBuilder { - pub fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self - where - IntoKeyExpr: Into>, - IntoPayload: Into, - { - Self(SampleBuilder::new(Sample { - key_expr: key_expr.into(), - payload: payload.into(), - kind: SampleKind::Put, - encoding: Encoding::default(), - timestamp: None, - qos: QoS::default(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, - })) - } - - pub fn with_payload(mut self, payload: IntoPayload) -> Self - where - IntoPayload: Into, - { - self.0 = self.0.with_payload(payload); - self - } - - pub fn with_encoding(mut self, encoding: Encoding) -> Self { - self.0 = self.0.with_encoding(encoding); - self - } - - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.0 = self.0.with_timestamp(timestamp); - self - } - - pub fn with_current_timestamp(mut self) -> Self { - self.0 = self.0.with_current_timestamp(); - self - } - - pub fn with_qos(mut self, qos: QoS) -> Self { - self.0 = self.0.with_qos(qos); - self - } - - #[zenoh_macros::unstable] - pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { - self.0 = self.0.with_source_info(source_info); - self - } - - #[zenoh_macros::unstable] - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.0 = self.0.with_attachment(attachment); - self - } -} - -pub struct DeleteSampleBuilder(SampleBuilder); - -impl DeleteSampleBuilder { - pub fn new(key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self(SampleBuilder::new(Sample { - key_expr: key_expr.into(), - payload: Payload::empty(), - kind: SampleKind::Delete, - encoding: Encoding::default(), - timestamp: None, - qos: QoS::default(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, - })) - } - pub fn with_keyexpr(mut self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - self.0 = self.0.with_keyexpr(key_expr); - self - } - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.0 = self.0.with_timestamp(timestamp); - self - } - pub fn with_current_timestamp(mut self) -> Self { - self.0 = self.0.with_current_timestamp(); - self - } - pub fn with_qos(mut self, qos: QoS) -> Self { - self.0 = self.0.with_qos(qos); - self - } - #[zenoh_macros::unstable] - pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { - self.0 = self.0.with_source_info(source_info); - self - } - #[zenoh_macros::unstable] - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.0 = self.0.with_attachment(attachment); - self - } -} - -impl From for SampleBuilder { - fn from(sample: Sample) -> Self { - SampleBuilder(sample) - } -} - -impl TryFrom for PutSampleBuilder { - type Error = zresult::Error; - fn try_from(sample: Sample) -> Result { - if sample.kind != SampleKind::Put { - bail!("Sample is not a put sample") - } - Ok(Self(SampleBuilder(sample))) - } -} - -impl TryFrom for DeleteSampleBuilder { - type Error = zresult::Error; - fn try_from(sample: Sample) -> Result { - if sample.kind != SampleKind::Delete { - bail!("Sample is not a delete sample") - } - Ok(Self(SampleBuilder(sample))) - } -} - -impl Resolvable for SampleBuilder { - type To = Sample; -} - -impl Resolvable for PutSampleBuilder { - type To = Sample; -} - -impl Resolvable for DeleteSampleBuilder { - type To = Sample; -} - -impl SyncResolve for SampleBuilder { - fn res_sync(self) -> Self::To { - self.0 - } -} - -impl SyncResolve for PutSampleBuilder { - fn res_sync(self) -> Self::To { - self.0.res_sync() - } -} - -impl SyncResolve for DeleteSampleBuilder { - fn res_sync(self) -> Self::To { - self.0.res_sync() - } -} - -impl AsyncResolve for SampleBuilder { - type Future = futures::future::Ready; - fn res_async(self) -> Self::Future { - futures::future::ready(self.0) - } -} - -impl AsyncResolve for PutSampleBuilder { - type Future = futures::future::Ready; - fn res_async(self) -> Self::Future { - self.0.res_async() - } -} - -impl AsyncResolve for DeleteSampleBuilder { - type Future = futures::future::Ready; - fn res_async(self) -> Self::Future { - self.0.res_async() - } -} - -pub struct SampleDecomposed { - pub key_expr: KeyExpr<'static>, - pub payload: Payload, - pub kind: SampleKind, - pub encoding: Encoding, - pub timestamp: Option, - pub qos: QoS, - #[cfg(feature = "unstable")] - pub source_info: SourceInfo, - #[cfg(feature = "unstable")] - pub attachment: Option, -} - /// A zenoh sample. #[non_exhaustive] #[derive(Clone, Debug)] @@ -765,22 +501,6 @@ impl Sample { pub fn attachment(&self) -> Option<&Attachment> { self.attachment.as_ref() } - - /// Decomposes the Sample into its components - pub fn decompose(self) -> SampleDecomposed { - SampleDecomposed { - key_expr: self.key_expr, - payload: self.payload, - kind: self.kind, - encoding: self.encoding, - timestamp: self.timestamp, - qos: self.qos, - #[cfg(feature = "unstable")] - source_info: self.source_info, - #[cfg(feature = "unstable")] - attachment: self.attachment, - } - } } impl From for Value { diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs new file mode 100644 index 0000000000..a9cffb22d1 --- /dev/null +++ b/zenoh/src/sample_builder.rs @@ -0,0 +1,306 @@ +// +// Copyright (c) 2024 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// + +use crate::sample::Attachment; +use crate::sample::QoS; +use crate::sample::SourceInfo; +use crate::Encoding; +use crate::KeyExpr; +use crate::Payload; +use crate::Priority; +use crate::Sample; +use crate::SampleKind; +use uhlc::Timestamp; +use zenoh_core::zresult; +use zenoh_core::AsyncResolve; +use zenoh_core::Resolvable; +use zenoh_core::SyncResolve; +use zenoh_protocol::core::CongestionControl; + +pub trait SampleBuilderTrait { + fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>; + fn with_timestamp(self, timestamp: Timestamp) -> Self; + #[zenoh_macros::unstable] + fn with_source_info(self, source_info: SourceInfo) -> Self; + #[zenoh_macros::unstable] + fn with_attachment(self, attachment: Attachment) -> Self; + fn congestion_control(self, congestion_control: CongestionControl) -> Self; + fn priority(self, priority: Priority) -> Self; + fn express(self, is_express: bool) -> Self; +} + +pub trait PutSampleBuilderTrait: SampleBuilderTrait { + fn with_encoding(self, encoding: Encoding) -> Self; + fn with_payload(self, payload: IntoPayload) -> Self + where + IntoPayload: Into; +} + +pub trait DeleteSampleBuilderTrait: SampleBuilderTrait {} + +pub struct SampleBuilder(Sample); + +impl SampleBuilderTrait for SampleBuilder { + fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + let mut this = self; + this.0.key_expr = key_expr.into(); + this + } + + fn with_timestamp(self, timestamp: Timestamp) -> Self { + let mut this = self; + this.0.timestamp = Some(timestamp); + this + } + #[zenoh_macros::unstable] + fn with_source_info(self, source_info: SourceInfo) -> Self { + let mut this = self; + this.0.source_info = source_info; + this + } + #[zenoh_macros::unstable] + fn with_attachment(self, attachment: Attachment) -> Self { + let mut this = self; + this.0.attachment = Some(attachment); + this + } + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + let mut this = self; + this.0.qos = this.0.qos.with_congestion_control(congestion_control); + this + } + fn priority(self, priority: Priority) -> Self { + let mut this = self; + this.0.qos = this.0.qos.with_priority(priority); + this + } + fn express(self, is_express: bool) -> Self { + let mut this = self; + this.0.qos = this.0.qos.with_express(is_express); + this + } +} + +pub struct PutSampleBuilder(SampleBuilder); + +impl PutSampleBuilder { + pub fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self + where + IntoKeyExpr: Into>, + IntoPayload: Into, + { + Self(SampleBuilder::from(Sample { + key_expr: key_expr.into(), + payload: payload.into(), + kind: SampleKind::Put, + encoding: Encoding::default(), + timestamp: None, + qos: QoS::default(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + })) + } + pub fn without_timestamp(self) -> Self { + let mut this = self; + this.0 .0.timestamp = None; + this + } + pub fn without_attachment(self) -> Self { + let mut this = self; + this.0 .0.attachment = None; + this + } +} + +impl SampleBuilderTrait for PutSampleBuilder { + fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self(self.0.with_keyexpr(key_expr)) + } + fn with_timestamp(self, timestamp: Timestamp) -> Self { + Self(self.0.with_timestamp(timestamp)) + } + #[zenoh_macros::unstable] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self(self.0.with_source_info(source_info)) + } + #[zenoh_macros::unstable] + fn with_attachment(self, attachment: Attachment) -> Self { + Self(self.0.with_attachment(attachment)) + } + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self(self.0.congestion_control(congestion_control)) + } + fn priority(self, priority: Priority) -> Self { + Self(self.0.priority(priority)) + } + fn express(self, is_express: bool) -> Self { + Self(self.0.express(is_express)) + } +} + +impl PutSampleBuilderTrait for PutSampleBuilder { + fn with_encoding(self, encoding: Encoding) -> Self { + let mut this = self; + this.0 .0.encoding = encoding; + this + } + fn with_payload(self, payload: IntoPayload) -> Self + where + IntoPayload: Into, + { + let mut this = self; + this.0 .0.payload = payload.into(); + this + } +} + +pub struct DeleteSampleBuilder(SampleBuilder); + +impl DeleteSampleBuilder { + pub fn new(key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self(SampleBuilder::from(Sample { + key_expr: key_expr.into(), + payload: Payload::empty(), + kind: SampleKind::Delete, + encoding: Encoding::default(), + timestamp: None, + qos: QoS::default(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + })) + } +} + +impl SampleBuilderTrait for DeleteSampleBuilder { + fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self(self.0.with_keyexpr(key_expr)) + } + fn with_timestamp(self, timestamp: Timestamp) -> Self { + Self(self.0.with_timestamp(timestamp)) + } + #[zenoh_macros::unstable] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self(self.0.with_source_info(source_info)) + } + #[zenoh_macros::unstable] + fn with_attachment(self, attachment: Attachment) -> Self { + Self(self.0.with_attachment(attachment)) + } + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self(self.0.congestion_control(congestion_control)) + } + fn priority(self, priority: Priority) -> Self { + Self(self.0.priority(priority)) + } + fn express(self, is_express: bool) -> Self { + Self(self.0.express(is_express)) + } +} + +impl DeleteSampleBuilderTrait for DeleteSampleBuilder {} + +impl From for SampleBuilder { + fn from(sample: Sample) -> Self { + SampleBuilder(sample) + } +} + +impl TryFrom for PutSampleBuilder { + type Error = zresult::Error; + fn try_from(sample: Sample) -> Result { + if sample.kind != SampleKind::Put { + bail!("Sample is not a put sample") + } + Ok(Self(SampleBuilder(sample))) + } +} + +impl TryFrom for DeleteSampleBuilder { + type Error = zresult::Error; + fn try_from(sample: Sample) -> Result { + if sample.kind != SampleKind::Delete { + bail!("Sample is not a delete sample") + } + Ok(Self(SampleBuilder(sample))) + } +} + +impl Resolvable for SampleBuilder { + type To = Sample; +} + +impl Resolvable for PutSampleBuilder { + type To = Sample; +} + +impl Resolvable for DeleteSampleBuilder { + type To = Sample; +} + +impl SyncResolve for SampleBuilder { + fn res_sync(self) -> Self::To { + self.0 + } +} + +impl SyncResolve for PutSampleBuilder { + fn res_sync(self) -> Self::To { + self.0.res_sync() + } +} + +impl SyncResolve for DeleteSampleBuilder { + fn res_sync(self) -> Self::To { + self.0.res_sync() + } +} + +impl AsyncResolve for SampleBuilder { + type Future = futures::future::Ready; + fn res_async(self) -> Self::Future { + futures::future::ready(self.0) + } +} + +impl AsyncResolve for PutSampleBuilder { + type Future = futures::future::Ready; + fn res_async(self) -> Self::Future { + self.0.res_async() + } +} + +impl AsyncResolve for DeleteSampleBuilder { + type Future = futures::future::Ready; + fn res_async(self) -> Self::Future { + self.0.res_async() + } +} From a05b93de8c9507e597d2f85bce88c9787241590b Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sat, 23 Mar 2024 15:43:58 +0100 Subject: [PATCH 033/124] reply builder unfinished --- .../src/replica/storage.rs | 4 +- zenoh-ext/src/querying_subscriber.rs | 2 +- zenoh/src/queryable.rs | 268 +++++++++++------- zenoh/src/sample_builder.rs | 99 ++++--- 4 files changed, 238 insertions(+), 135 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 576f6adec2..1aadc88611 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -24,7 +24,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::ZBuf; use zenoh::prelude::r#async::*; use zenoh::query::ConsolidationMode; -use zenoh::sample_builder::{SampleBuilder, SampleBuilderTrait}; +use zenoh::sample_builder::{SampleBuilderTrait, SampleUpdater}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; @@ -368,7 +368,7 @@ impl StorageService { } }; let sample = if sample.timestamp().is_none() { - SampleBuilder::from(sample).with_timestamp(new_reception_timestamp()).res_sync() + SampleUpdater::from(sample).with_timestamp(new_reception_timestamp()).res_sync() } else { sample }; diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index eb6d6e9516..19388ea16f 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -20,7 +20,7 @@ use std::time::Duration; use zenoh::handlers::{locked, DefaultHandler}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, QueryTarget, ReplyKeyExpr}; -use zenoh::sample_builder::{SampleBuilder, SampleBuilderTrait}; +use zenoh::sample_builder::{SampleBuilderTrait, SampleUpdater}; use zenoh::subscriber::{Reliability, Subscriber}; use zenoh::time::{new_reception_timestamp, Timestamp}; use zenoh::Result as ZResult; diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index d2eabcdc2a..eb6ef013c7 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -20,6 +20,10 @@ use crate::net::primitives::Primitives; use crate::prelude::*; use crate::sample::QoS; use crate::sample::SourceInfo; +use crate::sample_builder::{ + DeleteSampleBuilder, DeleteSampleBuilderTrait, PutSampleBuilder, PutSampleBuilderTrait, + SampleBuilder, SampleBuilderTrait, +}; use crate::Id; use crate::SessionRef; use crate::Undeclarable; @@ -102,43 +106,6 @@ impl Query { pub fn attachment(&self) -> Option<&Attachment> { self.inner.attachment.as_ref() } - /// Sends a reply in the form of [`Sample`] to this Query. - /// - /// By default, queries only accept replies whose key expression intersects with the query's. - /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), - /// replying on a disjoint key expression will result in an error when resolving the reply. - /// This api is for internal use only. - #[inline(always)] - #[cfg(feature = "unstable")] - #[doc(hidden)] - pub fn reply_sample(&self, sample: Sample) -> ReplyBuilder<'_> { - let Sample { - key_expr, - payload, - kind, - encoding, - timestamp, - qos, - #[cfg(feature = "unstable")] - source_info, - #[cfg(feature = "unstable")] - attachment, - } = sample; - ReplyBuilder { - query: self, - key_expr, - payload, - kind, - encoding, - timestamp, - qos, - #[cfg(feature = "unstable")] - source_info, - #[cfg(feature = "unstable")] - attachment, - } - } - /// Sends a reply to this Query. /// /// By default, queries only accept replies whose key expression intersects with the query's. @@ -154,18 +121,11 @@ impl Query { IntoKeyExpr: Into>, IntoPayload: Into, { + let sample_builder = PutSampleBuilder::new(key_expr, payload) + .with_qos(response::ext::QoSType::RESPONSE.into()); ReplyBuilder { query: self, - key_expr: key_expr.into(), - payload: payload.into(), - kind: SampleKind::Put, - timestamp: None, - encoding: Encoding::default(), - qos: response::ext::QoSType::RESPONSE.into(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, + sample_builder, } } /// Sends a error reply to this Query. @@ -187,22 +147,15 @@ impl Query { /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), /// replying on a disjoint key expression will result in an error when resolving the reply. #[inline(always)] - pub fn reply_del(&self, key_expr: IntoKeyExpr) -> ReplyBuilder<'_> + pub fn reply_del(&self, key_expr: IntoKeyExpr) -> ReplyDelBuilder<'_> where IntoKeyExpr: Into>, { - ReplyBuilder { + let sample_builder = + DeleteSampleBuilder::new(key_expr).with_qos(response::ext::QoSType::RESPONSE.into()); + ReplyDelBuilder { query: self, - key_expr: key_expr.into(), - payload: Payload::empty(), - kind: SampleKind::Delete, - timestamp: None, - encoding: Encoding::default(), - qos: response::ext::QoSType::RESPONSE.into(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, + sample_builder, } } @@ -250,45 +203,161 @@ impl fmt::Display for Query { #[derive(Debug)] pub struct ReplyBuilder<'a> { query: &'a Query, - key_expr: KeyExpr<'static>, - payload: Payload, - kind: SampleKind, - encoding: Encoding, - timestamp: Option, - qos: QoS, + sample_builder: PutSampleBuilder, +} + +impl SampleBuilderTrait for ReplyBuilder<'_> { + fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self { + sample_builder: self.sample_builder.with_keyexpr(key_expr), + ..self + } + } + + fn with_timestamp(self, timestamp: Timestamp) -> Self { + Self { + sample_builder: self.sample_builder.with_timestamp(timestamp), + ..self + } + } + #[cfg(feature = "unstable")] - source_info: SourceInfo, + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + sample_builder: self.sample_builder.with_source_info(source_info), + ..self + } + } + #[cfg(feature = "unstable")] - attachment: Option, + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + sample_builder: self.sample_builder.with_attachment(attachment), + ..self + } + } + + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self { + sample_builder: self.sample_builder.congestion_control(congestion_control), + ..self + } + } + + fn priority(self, priority: Priority) -> Self { + Self { + sample_builder: self.sample_builder.priority(priority), + ..self + } + } + + fn express(self, is_express: bool) -> Self { + Self { + sample_builder: self.sample_builder.express(is_express), + ..self + } + } } -/// A builder returned by [`Query::reply_err()`](Query::reply_err). +impl PutSampleBuilderTrait for ReplyBuilder<'_> { + fn with_encoding(self, encoding: Encoding) -> Self { + Self { + sample_builder: self.sample_builder.with_encoding(encoding), + ..self + } + } + + fn with_payload(self, payload: IntoPayload) -> Self + where + IntoPayload: Into, + { + Self { + sample_builder: self.sample_builder.with_payload(payload), + ..self + } + } +} + +/// A builder returned by [`Query::reply_del()`](Query::reply) #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] -pub struct ReplyErrBuilder<'a> { +pub struct ReplyDelBuilder<'a> { query: &'a Query, - value: Value, + sample_builder: DeleteSampleBuilder, } -impl<'a> ReplyBuilder<'a> { - #[zenoh_macros::unstable] - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.attachment = Some(attachment); - self +impl SampleBuilderTrait for ReplyDelBuilder<'_> { + fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self { + sample_builder: self.sample_builder.with_keyexpr(key_expr), + ..self + } } - #[zenoh_macros::unstable] - pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { - self.source_info = source_info; - self + + fn with_timestamp(self, timestamp: Timestamp) -> Self { + Self { + sample_builder: self.sample_builder.with_timestamp(timestamp), + ..self + } } - pub fn with_timestamp(mut self, timestamp: Timestamp) -> Self { - self.timestamp = Some(timestamp); - self + + #[cfg(feature = "unstable")] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + sample_builder: self.sample_builder.with_source_info(source_info), + ..self + } } - pub fn with_encoding(mut self, encoding: Encoding) -> Self { - self.encoding = encoding; - self + #[cfg(feature = "unstable")] + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + sample_builder: self.sample_builder.with_attachment(attachment), + ..self + } + } + + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self { + sample_builder: self.sample_builder.congestion_control(congestion_control), + ..self + } + } + + fn priority(self, priority: Priority) -> Self { + Self { + sample_builder: self.sample_builder.priority(priority), + ..self + } + } + + fn express(self, is_express: bool) -> Self { + Self { + sample_builder: self.sample_builder.express(is_express), + ..self + } + } +} + +impl DeleteSampleBuilderTrait for ReplyDelBuilder<'_> {} + +/// A builder returned by [`Query::reply_err()`](Query::reply_err). +#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] +#[derive(Debug)] +pub struct ReplyErrBuilder<'a> { + query: &'a Query, + value: Value, +} + +impl AsRef for ReplyBuilder<'_> { + fn as_ref(&self) -> &PutSampleBuilder { + &self.sample_builder } } @@ -298,19 +367,20 @@ impl<'a> Resolvable for ReplyBuilder<'a> { impl SyncResolve for ReplyBuilder<'_> { fn res_sync(self) -> ::To { + let sample = self.sample_builder.res_sync(); if !self.query._accepts_any_replies().unwrap_or(false) - && !self.query.key_expr().intersects(&self.key_expr) + && !self.query.key_expr().intersects(&sample.key_expr) { - bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", self.key_expr, self.query.key_expr()) + bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", sample.key_expr, self.query.key_expr()) } #[allow(unused_mut)] // will be unused if feature = "unstable" is not enabled let mut ext_sinfo = None; #[cfg(feature = "unstable")] { - if self.source_info.source_id.is_some() || self.source_info.source_sn.is_some() { + if sample.source_info.source_id.is_some() || sample.source_info.source_sn.is_some() { ext_sinfo = Some(zenoh::put::ext::SourceInfoType { - id: self.source_info.source_id.unwrap_or_default(), - sn: self.source_info.source_sn.unwrap_or_default() as u32, + id: sample.source_info.source_id.unwrap_or_default(), + sn: sample.source_info.source_sn.unwrap_or_default() as u32, }) } } @@ -318,38 +388,38 @@ impl SyncResolve for ReplyBuilder<'_> { rid: self.query.inner.qid, wire_expr: WireExpr { scope: 0, - suffix: std::borrow::Cow::Owned(self.key_expr.into()), + suffix: std::borrow::Cow::Owned(sample.key_expr.into()), mapping: Mapping::Sender, }, payload: ResponseBody::Reply(zenoh::Reply { consolidation: zenoh::Consolidation::DEFAULT, ext_unknown: vec![], - payload: match self.kind { + payload: match sample.kind { SampleKind::Put => ReplyBody::Put(Put { - timestamp: self.timestamp, - encoding: self.encoding.into(), + timestamp: sample.timestamp, + encoding: sample.encoding.into(), ext_sinfo, #[cfg(feature = "shared-memory")] ext_shm: None, #[cfg(feature = "unstable")] - ext_attachment: self.attachment.map(|a| a.into()), + ext_attachment: sample.attachment.map(|a| a.into()), #[cfg(not(feature = "unstable"))] ext_attachment: None, ext_unknown: vec![], - payload: self.payload.into(), + payload: sample.payload.into(), }), SampleKind::Delete => ReplyBody::Del(Del { - timestamp: self.timestamp, + timestamp: sample.timestamp, ext_sinfo, #[cfg(feature = "unstable")] - ext_attachment: self.attachment.map(|a| a.into()), + ext_attachment: sample.attachment.map(|a| a.into()), #[cfg(not(feature = "unstable"))] ext_attachment: None, ext_unknown: vec![], }), }, }), - ext_qos: self.qos.into(), + ext_qos: sample.qos.into(), ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { zid: self.query.inner.zid, diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index a9cffb22d1..fcf3a64182 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -51,52 +51,76 @@ pub trait PutSampleBuilderTrait: SampleBuilderTrait { pub trait DeleteSampleBuilderTrait: SampleBuilderTrait {} +#[derive(Debug)] pub struct SampleBuilder(Sample); +impl SampleBuilder { + pub(crate) fn without_timestamp(self) -> Self { + Self(Sample { + timestamp: None, + ..self.0 + }) + } + pub(crate) fn without_attachment(self) -> Self { + Self(Sample { + attachment: None, + ..self.0 + }) + } +} + impl SampleBuilderTrait for SampleBuilder { fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self where IntoKeyExpr: Into>, { - let mut this = self; - this.0.key_expr = key_expr.into(); - this + Self(Sample { + key_expr: key_expr.into(), + ..self.0 + }) } fn with_timestamp(self, timestamp: Timestamp) -> Self { - let mut this = self; - this.0.timestamp = Some(timestamp); - this + Self(Sample { + timestamp: Some(timestamp), + ..self.0 + }) } #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self { - let mut this = self; - this.0.source_info = source_info; - this + Self(Sample { + source_info, + ..self.0 + }) } #[zenoh_macros::unstable] fn with_attachment(self, attachment: Attachment) -> Self { - let mut this = self; - this.0.attachment = Some(attachment); - this + Self(Sample { + attachment: Some(attachment), + ..self.0 + }) } fn congestion_control(self, congestion_control: CongestionControl) -> Self { - let mut this = self; - this.0.qos = this.0.qos.with_congestion_control(congestion_control); - this + Self(Sample { + qos: self.0.qos.with_congestion_control(congestion_control), + ..self.0 + }) } fn priority(self, priority: Priority) -> Self { - let mut this = self; - this.0.qos = this.0.qos.with_priority(priority); - this + Self(Sample { + qos: self.0.qos.with_priority(priority), + ..self.0 + }) } fn express(self, is_express: bool) -> Self { - let mut this = self; - this.0.qos = this.0.qos.with_express(is_express); - this + Self(Sample { + qos: self.0.qos.with_express(is_express), + ..self.0 + }) } } +#[derive(Debug)] pub struct PutSampleBuilder(SampleBuilder); impl PutSampleBuilder { @@ -118,15 +142,17 @@ impl PutSampleBuilder { attachment: None, })) } + #[zenoh_macros::unstable] pub fn without_timestamp(self) -> Self { - let mut this = self; - this.0 .0.timestamp = None; - this + Self(self.0.without_timestamp()) } + #[zenoh_macros::unstable] pub fn without_attachment(self) -> Self { - let mut this = self; - this.0 .0.attachment = None; - this + Self(self.0.without_attachment()) + } + // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. + pub(crate) fn with_qos(self, qos: QoS) -> Self { + Self(SampleBuilder(Sample { qos, ..self.0 .0 })) } } @@ -161,20 +187,23 @@ impl SampleBuilderTrait for PutSampleBuilder { impl PutSampleBuilderTrait for PutSampleBuilder { fn with_encoding(self, encoding: Encoding) -> Self { - let mut this = self; - this.0 .0.encoding = encoding; - this + Self(SampleBuilder(Sample { + encoding, + ..self.0 .0 + })) } fn with_payload(self, payload: IntoPayload) -> Self where IntoPayload: Into, { - let mut this = self; - this.0 .0.payload = payload.into(); - this + Self(SampleBuilder(Sample { + payload: payload.into(), + ..self.0 .0 + })) } } +#[derive(Debug)] pub struct DeleteSampleBuilder(SampleBuilder); impl DeleteSampleBuilder { @@ -195,6 +224,10 @@ impl DeleteSampleBuilder { attachment: None, })) } + // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. + pub(crate) fn with_qos(self, qos: QoS) -> Self { + Self(SampleBuilder(Sample { qos, ..self.0 .0 })) + } } impl SampleBuilderTrait for DeleteSampleBuilder { From 0992ff8812df04e4b0dc9acc01a45763739d0792 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sat, 23 Mar 2024 22:44:36 +0100 Subject: [PATCH 034/124] replybuilder unfinished --- zenoh-ext/src/querying_subscriber.rs | 2 +- zenoh/src/net/runtime/adminspace.rs | 1 + zenoh/src/queryable.rs | 38 ++++++++++++++++++---------- 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 19388ea16f..eb6d6e9516 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -20,7 +20,7 @@ use std::time::Duration; use zenoh::handlers::{locked, DefaultHandler}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, QueryTarget, ReplyKeyExpr}; -use zenoh::sample_builder::{SampleBuilderTrait, SampleUpdater}; +use zenoh::sample_builder::{SampleBuilder, SampleBuilderTrait}; use zenoh::subscriber::{Reliability, Subscriber}; use zenoh::time::{new_reception_timestamp, Timestamp}; use zenoh::Result as ZResult; diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index 29106cb89d..01f29ba19b 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -20,6 +20,7 @@ use crate::plugins::sealed::{self as plugins}; use crate::prelude::sync::SyncResolve; use crate::queryable::Query; use crate::queryable::QueryInner; +use crate::sample_builder::PutSampleBuilderTrait; use crate::value::Value; use async_std::task; use log::{error, trace}; diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index eb6ef013c7..a5b6deca4c 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -18,11 +18,10 @@ use crate::encoding::Encoding; use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; -use crate::sample::QoS; use crate::sample::SourceInfo; use crate::sample_builder::{ DeleteSampleBuilder, DeleteSampleBuilderTrait, PutSampleBuilder, PutSampleBuilderTrait, - SampleBuilder, SampleBuilderTrait, + SampleBuilderTrait, }; use crate::Id; use crate::SessionRef; @@ -355,23 +354,34 @@ pub struct ReplyErrBuilder<'a> { value: Value, } -impl AsRef for ReplyBuilder<'_> { - fn as_ref(&self) -> &PutSampleBuilder { - &self.sample_builder +impl<'a> Resolvable for ReplyBuilder<'a> { + type To = ZResult<()>; +} + +impl SyncResolve for ReplyBuilder<'_> { + fn res_sync(self) -> ::To { + let sample = self.sample_builder.res_sync(); + self.query._reply_sample(sample) } } -impl<'a> Resolvable for ReplyBuilder<'a> { +impl<'a> Resolvable for ReplyDelBuilder<'a> { type To = ZResult<()>; } -impl SyncResolve for ReplyBuilder<'_> { +impl SyncResolve for ReplyDelBuilder<'_> { fn res_sync(self) -> ::To { let sample = self.sample_builder.res_sync(); - if !self.query._accepts_any_replies().unwrap_or(false) - && !self.query.key_expr().intersects(&sample.key_expr) + self.query._reply_sample(sample) + } +} + +impl Query { + fn _reply_sample(&self, sample: Sample) -> ZResult<()> { + if !self._accepts_any_replies().unwrap_or(false) + && !self.key_expr().intersects(&sample.key_expr) { - bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", sample.key_expr, self.query.key_expr()) + bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", sample.key_expr, self.key_expr()) } #[allow(unused_mut)] // will be unused if feature = "unstable" is not enabled let mut ext_sinfo = None; @@ -384,8 +394,8 @@ impl SyncResolve for ReplyBuilder<'_> { }) } } - self.query.inner.primitives.send_response(Response { - rid: self.query.inner.qid, + self.inner.primitives.send_response(Response { + rid: self.inner.qid, wire_expr: WireExpr { scope: 0, suffix: std::borrow::Cow::Owned(sample.key_expr.into()), @@ -422,8 +432,8 @@ impl SyncResolve for ReplyBuilder<'_> { ext_qos: sample.qos.into(), ext_tstamp: None, ext_respid: Some(response::ext::ResponderIdType { - zid: self.query.inner.zid, - eid: self.query.eid, + zid: self.inner.zid, + eid: self.eid, }), }); Ok(()) From 62378ad1805d3e13db06664f1176ca0f89393fe2 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 11:21:32 +0100 Subject: [PATCH 035/124] new reply_sample --- zenoh/src/queryable.rs | 102 +++++++++++++++++++++++++++++++++++- zenoh/src/sample_builder.rs | 43 ++++++++++++++- 2 files changed, 141 insertions(+), 4 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index a5b6deca4c..d0b80e9a11 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -21,7 +21,7 @@ use crate::prelude::*; use crate::sample::SourceInfo; use crate::sample_builder::{ DeleteSampleBuilder, DeleteSampleBuilderTrait, PutSampleBuilder, PutSampleBuilderTrait, - SampleBuilderTrait, + SampleBuilder, SampleBuilderTrait, }; use crate::Id; use crate::SessionRef; @@ -105,6 +105,24 @@ impl Query { pub fn attachment(&self) -> Option<&Attachment> { self.inner.attachment.as_ref() } + + /// Sends a reply or delete reply to this Query + /// + /// This function is useful when resending the samples which can be of [`SampleKind::Put`] or [`SampleKind::Delete`] + /// It allows to build the reply with same common parameters, like timestamp, attachment, source_info, etc. + /// and only on final step to choose the kind of reply by calling [`ReplySampleBuilder::put`] or [`ReplySampleBuilder::delete`] methods. + #[inline(always)] + pub fn reply_sample(&self, key_expr: IntoKeyExpr) -> ReplySampleBuilder + where + IntoKeyExpr: Into>, + { + let sample_builder = SampleBuilder::new(key_expr); + ReplySampleBuilder { + query: self, + sample_builder, + } + } + /// Sends a reply to this Query. /// /// By default, queries only accept replies whose key expression intersects with the query's. @@ -197,7 +215,87 @@ impl fmt::Display for Query { } } -/// A builder returned by [`Query::reply()`](Query::reply) or [`Query::reply()`](Query::reply). +pub struct ReplySampleBuilder<'a> { + query: &'a Query, + sample_builder: SampleBuilder, +} + +impl<'a> ReplySampleBuilder<'a> { + pub fn put(self, payload: IntoPayload) -> ReplyBuilder<'a> + where + IntoPayload: Into, + { + let builder = ReplyBuilder { + query: self.query, + sample_builder: self.sample_builder.into(), + }; + builder.with_payload(payload) + } + pub fn delete(self) -> ReplyDelBuilder<'a> { + ReplyDelBuilder { + query: self.query, + sample_builder: self.sample_builder.into(), + } + } +} + +impl SampleBuilderTrait for ReplySampleBuilder<'_> { + fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self { + sample_builder: self.sample_builder.with_keyexpr(key_expr), + ..self + } + } + + fn with_timestamp(self, timestamp: Timestamp) -> Self { + Self { + sample_builder: self.sample_builder.with_timestamp(timestamp), + ..self + } + } + + #[cfg(feature = "unstable")] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + sample_builder: self.sample_builder.with_source_info(source_info), + ..self + } + } + + #[cfg(feature = "unstable")] + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + sample_builder: self.sample_builder.with_attachment(attachment), + ..self + } + } + + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self { + sample_builder: self.sample_builder.congestion_control(congestion_control), + ..self + } + } + + fn priority(self, priority: Priority) -> Self { + Self { + sample_builder: self.sample_builder.priority(priority), + ..self + } + } + + fn express(self, is_express: bool) -> Self { + Self { + sample_builder: self.sample_builder.express(is_express), + ..self + } + } +} + +/// A builder returned by [`Query::reply()`](Query::reply) #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] pub struct ReplyBuilder<'a> { diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index fcf3a64182..61e4bf81fb 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -55,13 +55,30 @@ pub trait DeleteSampleBuilderTrait: SampleBuilderTrait {} pub struct SampleBuilder(Sample); impl SampleBuilder { - pub(crate) fn without_timestamp(self) -> Self { + pub fn new(key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self(Sample { + key_expr: key_expr.into(), + payload: Payload::empty(), + kind: SampleKind::default(), + encoding: Encoding::default(), + timestamp: None, + qos: QoS::default(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + }) + } + pub fn without_timestamp(self) -> Self { Self(Sample { timestamp: None, ..self.0 }) } - pub(crate) fn without_attachment(self) -> Self { + pub fn without_attachment(self) -> Self { Self(Sample { attachment: None, ..self.0 @@ -123,6 +140,17 @@ impl SampleBuilderTrait for SampleBuilder { #[derive(Debug)] pub struct PutSampleBuilder(SampleBuilder); +impl From for PutSampleBuilder { + fn from(sample_builder: SampleBuilder) -> Self { + Self(SampleBuilder { + 0: Sample { + kind: SampleKind::Put, + ..sample_builder.0 + }, + }) + } +} + impl PutSampleBuilder { pub fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self where @@ -206,6 +234,17 @@ impl PutSampleBuilderTrait for PutSampleBuilder { #[derive(Debug)] pub struct DeleteSampleBuilder(SampleBuilder); +impl From for DeleteSampleBuilder { + fn from(sample_builder: SampleBuilder) -> Self { + Self(SampleBuilder { + 0: Sample { + kind: SampleKind::Delete, + ..sample_builder.0 + }, + }) + } +} + impl DeleteSampleBuilder { pub fn new(key_expr: IntoKeyExpr) -> Self where From cc580a5dd4a30409b12ab4ae7c5a81d0b9d5ab1d Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 13:08:10 +0100 Subject: [PATCH 036/124] sample decompose, opt setters --- zenoh/src/queryable.rs | 45 +++++++++++++++++++++++++++++ zenoh/src/sample.rs | 35 +++++++++++++++++++++++ zenoh/src/sample_builder.rs | 56 +++++++++++++++++++++---------------- 3 files changed, 112 insertions(+), 24 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index d0b80e9a11..7d4a0903c2 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -250,6 +250,13 @@ impl SampleBuilderTrait for ReplySampleBuilder<'_> { } } + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self { + sample_builder: self.sample_builder.with_timestamp_opt(timestamp), + ..self + } + } + fn with_timestamp(self, timestamp: Timestamp) -> Self { Self { sample_builder: self.sample_builder.with_timestamp(timestamp), @@ -265,6 +272,14 @@ impl SampleBuilderTrait for ReplySampleBuilder<'_> { } } + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { + sample_builder: self.sample_builder.with_attachment_opt(attachment), + ..self + } + } + #[cfg(feature = "unstable")] fn with_attachment(self, attachment: Attachment) -> Self { Self { @@ -314,6 +329,13 @@ impl SampleBuilderTrait for ReplyBuilder<'_> { } } + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self { + sample_builder: self.sample_builder.with_timestamp_opt(timestamp), + ..self + } + } + fn with_timestamp(self, timestamp: Timestamp) -> Self { Self { sample_builder: self.sample_builder.with_timestamp(timestamp), @@ -329,6 +351,14 @@ impl SampleBuilderTrait for ReplyBuilder<'_> { } } + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { + sample_builder: self.sample_builder.with_attachment_opt(attachment), + ..self + } + } + #[cfg(feature = "unstable")] fn with_attachment(self, attachment: Attachment) -> Self { Self { @@ -397,6 +427,13 @@ impl SampleBuilderTrait for ReplyDelBuilder<'_> { } } + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self { + sample_builder: self.sample_builder.with_timestamp_opt(timestamp), + ..self + } + } + fn with_timestamp(self, timestamp: Timestamp) -> Self { Self { sample_builder: self.sample_builder.with_timestamp(timestamp), @@ -412,6 +449,14 @@ impl SampleBuilderTrait for ReplyDelBuilder<'_> { } } + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { + sample_builder: self.sample_builder.with_attachment_opt(attachment), + ..self + } + } + #[cfg(feature = "unstable")] fn with_attachment(self, attachment: Attachment) -> Self { Self { diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 2c98d5ead1..3ac3087836 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -433,6 +433,41 @@ impl TryFrom for SampleKind { #[zenoh_macros::unstable] pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; +/// Structure with public fields for sample. It's convenient if it's necessary to decompose a sample into its fields. +pub struct SampleFields { + pub key_expr: KeyExpr<'static>, + pub payload: Payload, + pub kind: SampleKind, + pub encoding: Encoding, + pub timestamp: Option, + pub express: bool, + pub priority: Priority, + pub congestion_control: CongestionControl, + #[cfg(feature = "unstable")] + pub source_info: SourceInfo, + #[cfg(feature = "unstable")] + pub attachment: Option, +} + +impl From for SampleFields { + fn from(sample: Sample) -> Self { + SampleFields { + key_expr: sample.key_expr, + payload: sample.payload, + kind: sample.kind, + encoding: sample.encoding, + timestamp: sample.timestamp, + express: sample.qos.express(), + priority: sample.qos.priority(), + congestion_control: sample.qos.congestion_control(), + #[cfg(feature = "unstable")] + source_info: sample.source_info, + #[cfg(feature = "unstable")] + attachment: sample.attachment, + } + } +} + /// A zenoh sample. #[non_exhaustive] #[derive(Clone, Debug)] diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 61e4bf81fb..c0ebf8c9d0 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -32,10 +32,13 @@ pub trait SampleBuilderTrait { fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self where IntoKeyExpr: Into>; + fn with_timestamp_opt(self, timestamp: Option) -> Self; fn with_timestamp(self, timestamp: Timestamp) -> Self; #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self; #[zenoh_macros::unstable] + fn with_attachment_opt(self, attachment: Option) -> Self; + #[zenoh_macros::unstable] fn with_attachment(self, attachment: Attachment) -> Self; fn congestion_control(self, congestion_control: CongestionControl) -> Self; fn priority(self, priority: Priority) -> Self; @@ -72,18 +75,6 @@ impl SampleBuilder { attachment: None, }) } - pub fn without_timestamp(self) -> Self { - Self(Sample { - timestamp: None, - ..self.0 - }) - } - pub fn without_attachment(self) -> Self { - Self(Sample { - attachment: None, - ..self.0 - }) - } } impl SampleBuilderTrait for SampleBuilder { @@ -97,12 +88,17 @@ impl SampleBuilderTrait for SampleBuilder { }) } - fn with_timestamp(self, timestamp: Timestamp) -> Self { + fn with_timestamp_opt(self, timestamp: Option) -> Self { Self(Sample { - timestamp: Some(timestamp), + timestamp, ..self.0 }) } + + fn with_timestamp(self, timestamp: Timestamp) -> Self { + self.with_timestamp_opt(Some(timestamp)) + } + #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self { Self(Sample { @@ -110,13 +106,19 @@ impl SampleBuilderTrait for SampleBuilder { ..self.0 }) } + #[zenoh_macros::unstable] - fn with_attachment(self, attachment: Attachment) -> Self { + fn with_attachment_opt(self, attachment: Option) -> Self { Self(Sample { - attachment: Some(attachment), + attachment, ..self.0 }) } + + #[zenoh_macros::unstable] + fn with_attachment(self, attachment: Attachment) -> Self { + self.with_attachment_opt(Some(attachment)) + } fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self(Sample { qos: self.0.qos.with_congestion_control(congestion_control), @@ -170,14 +172,6 @@ impl PutSampleBuilder { attachment: None, })) } - #[zenoh_macros::unstable] - pub fn without_timestamp(self) -> Self { - Self(self.0.without_timestamp()) - } - #[zenoh_macros::unstable] - pub fn without_attachment(self) -> Self { - Self(self.0.without_attachment()) - } // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. pub(crate) fn with_qos(self, qos: QoS) -> Self { Self(SampleBuilder(Sample { qos, ..self.0 .0 })) @@ -194,6 +188,9 @@ impl SampleBuilderTrait for PutSampleBuilder { fn with_timestamp(self, timestamp: Timestamp) -> Self { Self(self.0.with_timestamp(timestamp)) } + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self(self.0.with_timestamp_opt(timestamp)) + } #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self { Self(self.0.with_source_info(source_info)) @@ -202,6 +199,10 @@ impl SampleBuilderTrait for PutSampleBuilder { fn with_attachment(self, attachment: Attachment) -> Self { Self(self.0.with_attachment(attachment)) } + #[zenoh_macros::unstable] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self(self.0.with_attachment_opt(attachment)) + } fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self(self.0.congestion_control(congestion_control)) } @@ -279,6 +280,9 @@ impl SampleBuilderTrait for DeleteSampleBuilder { fn with_timestamp(self, timestamp: Timestamp) -> Self { Self(self.0.with_timestamp(timestamp)) } + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self(self.0.with_timestamp_opt(timestamp)) + } #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self { Self(self.0.with_source_info(source_info)) @@ -287,6 +291,10 @@ impl SampleBuilderTrait for DeleteSampleBuilder { fn with_attachment(self, attachment: Attachment) -> Self { Self(self.0.with_attachment(attachment)) } + #[zenoh_macros::unstable] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self(self.0.with_attachment_opt(attachment)) + } fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self(self.0.congestion_control(congestion_control)) } From 270840247c72238654be20f611c4d4cb6338cfc4 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 15:38:36 +0100 Subject: [PATCH 037/124] samples, plugins updated --- plugins/zenoh-plugin-example/src/lib.rs | 13 +++++++++- .../src/replica/align_queryable.rs | 2 ++ .../src/replica/storage.rs | 4 +-- zenoh-ext/src/publication_cache.rs | 25 ++++++++++++++----- zenoh/src/queryable.rs | 24 ++++++++++++------ 5 files changed, 51 insertions(+), 17 deletions(-) diff --git a/plugins/zenoh-plugin-example/src/lib.rs b/plugins/zenoh-plugin-example/src/lib.rs index 04f49b4739..9b9dda40de 100644 --- a/plugins/zenoh-plugin-example/src/lib.rs +++ b/plugins/zenoh-plugin-example/src/lib.rs @@ -24,6 +24,7 @@ use std::sync::{ use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::runtime::Runtime; +use zenoh::sample_builder::SampleBuilderTrait; use zenoh_core::zlock; use zenoh_plugin_trait::{plugin_long_version, plugin_version, Plugin, PluginControl}; use zenoh_result::{bail, ZResult}; @@ -174,7 +175,17 @@ async fn run(runtime: Runtime, selector: KeyExpr<'_>, flag: Arc) { info!("Handling query '{}'", query.selector()); for (key_expr, sample) in stored.iter() { if query.selector().key_expr.intersects(unsafe{keyexpr::from_str_unchecked(key_expr)}) { - query.reply_sample(sample.clone()).res().await.unwrap(); + let reply = query + .reply_sample(sample.key_expr().clone().into_owned()) + .with_timestamp_opt(sample.timestamp().cloned()); + #[cfg(feature = "unstable")] + let reply = reply + .with_attachment_opt(sample.attachment()) + .with_source_info(sample.source_info()); + match sample.kind() { + SampleKind::Put => reply.put(sample.payload().clone()).res().await.unwrap(), + SampleKind::Delete => reply.delete().res().await.unwrap(), + } } } } diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index 32be4a5534..691fabd7a7 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -20,6 +20,8 @@ use std::str; use std::str::FromStr; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; +use zenoh::sample_builder::PutSampleBuilderTrait; +use zenoh::sample_builder::SampleBuilderTrait; use zenoh::time::Timestamp; use zenoh::Session; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 1aadc88611..95af3c97a2 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -24,7 +24,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::ZBuf; use zenoh::prelude::r#async::*; use zenoh::query::ConsolidationMode; -use zenoh::sample_builder::{SampleBuilderTrait, SampleUpdater}; +use zenoh::sample_builder::{PutSampleBuilderTrait, SampleBuilder, SampleBuilderTrait}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; @@ -368,7 +368,7 @@ impl StorageService { } }; let sample = if sample.timestamp().is_none() { - SampleUpdater::from(sample).with_timestamp(new_reception_timestamp()).res_sync() + SampleBuilder::from(sample).with_timestamp(new_reception_timestamp()).res_sync() } else { sample }; diff --git a/zenoh-ext/src/publication_cache.rs b/zenoh-ext/src/publication_cache.rs index 85cb96cce2..78fff32014 100644 --- a/zenoh-ext/src/publication_cache.rs +++ b/zenoh-ext/src/publication_cache.rs @@ -20,6 +20,7 @@ use std::convert::TryInto; use std::future::Ready; use zenoh::prelude::r#async::*; use zenoh::queryable::{Query, Queryable}; +use zenoh::sample_builder::SampleBuilderTrait; use zenoh::subscriber::FlumeSubscriber; use zenoh::SessionRef; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; @@ -116,6 +117,22 @@ pub struct PublicationCache<'a> { _stoptx: Sender, } +async fn reply_sample(query: &Query, sample: &Sample) { + let reply = query + .reply_sample(sample.key_expr().clone().into_owned()) + .with_timestamp_opt(sample.timestamp().cloned()); + #[cfg(feature = "unstable")] + let reply = reply + .with_attachment_opt(sample.attachment()) + .with_source_info(sample.source_info()); + if let Err(e) = match sample.kind() { + SampleKind::Put => reply.put(sample.payload().clone()).res_async().await, + SampleKind::Delete => reply.delete().res_async().await, + } { + log::warn!("Error replying to query: {}", e); + } +} + impl<'a> PublicationCache<'a> { fn new(conf: PublicationCacheBuilder<'a, '_, '_>) -> ZResult> { let key_expr = conf.pub_key_expr?; @@ -212,9 +229,7 @@ impl<'a> PublicationCache<'a> { continue; } } - if let Err(e) = query.reply_sample(sample.clone()).res_async().await { - log::warn!("Error replying to query: {}", e); - } + reply_sample(&query, sample).await; } } } else { @@ -226,9 +241,7 @@ impl<'a> PublicationCache<'a> { continue; } } - if let Err(e) = query.reply_sample(sample.clone()).res_async().await { - log::warn!("Error replying to query: {}", e); - } + reply_sample(&query, sample).await; } } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 7d4a0903c2..f2e00e47c6 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -519,6 +519,22 @@ impl SyncResolve for ReplyDelBuilder<'_> { } } +impl<'a> AsyncResolve for ReplyBuilder<'a> { + type Future = Ready; + + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) + } +} + +impl<'a> AsyncResolve for ReplyDelBuilder<'a> { + type Future = Ready; + + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) + } +} + impl Query { fn _reply_sample(&self, sample: Sample) -> ZResult<()> { if !self._accepts_any_replies().unwrap_or(false) @@ -583,14 +599,6 @@ impl Query { } } -impl<'a> AsyncResolve for ReplyBuilder<'a> { - type Future = Ready; - - fn res_async(self) -> Self::Future { - std::future::ready(self.res_sync()) - } -} - impl<'a> Resolvable for ReplyErrBuilder<'a> { type To = ZResult<()>; } From b80fd0aa30842e607ae661547368df7f818f3a29 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 18:58:55 +0100 Subject: [PATCH 038/124] interceptors removed from plugin storage API --- plugins/zenoh-backend-example/src/lib.rs | 13 +--- plugins/zenoh-backend-traits/src/lib.rs | 68 +------------------ .../src/backends_mgt.rs | 7 -- .../zenoh-plugin-storage-manager/src/lib.rs | 4 -- .../src/memory_backend/mod.rs | 20 ------ .../src/replica/storage.rs | 23 ------- 6 files changed, 3 insertions(+), 132 deletions(-) diff --git a/plugins/zenoh-backend-example/src/lib.rs b/plugins/zenoh-backend-example/src/lib.rs index 602d29f375..f81231a498 100644 --- a/plugins/zenoh-backend-example/src/lib.rs +++ b/plugins/zenoh-backend-example/src/lib.rs @@ -13,11 +13,8 @@ // use async_std::sync::RwLock; use async_trait::async_trait; -use std::{ - collections::{hash_map::Entry, HashMap}, - sync::Arc, -}; -use zenoh::{prelude::OwnedKeyExpr, sample::Sample, time::Timestamp, value::Value}; +use std::collections::{hash_map::Entry, HashMap}; +use zenoh::{prelude::OwnedKeyExpr, time::Timestamp, value::Value}; use zenoh_backend_traits::{ config::{StorageConfig, VolumeConfig}, Capability, History, Persistence, Storage, StorageInsertionResult, StoredData, Volume, @@ -71,12 +68,6 @@ impl Volume for ExampleBackend { async fn create_storage(&self, _props: StorageConfig) -> ZResult> { Ok(Box::::default()) } - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { - None - } - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { - None - } } #[async_trait] diff --git a/plugins/zenoh-backend-traits/src/lib.rs b/plugins/zenoh-backend-traits/src/lib.rs index d17e6dfd77..40d022f1ec 100644 --- a/plugins/zenoh-backend-traits/src/lib.rs +++ b/plugins/zenoh-backend-traits/src/lib.rs @@ -68,16 +68,6 @@ //! // The properties are the ones passed via a PUT in the admin space for Storage creation. //! Ok(Box::new(MyStorage::new(properties).await?)) //! } -//! -//! fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { -//! // No interception point for incoming data (on PUT operations) -//! None -//! } -//! -//! fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { -//! // No interception point for outgoing data (on GET operations) -//! None -//! } //! } //! //! // Your Storage implementation @@ -135,9 +125,7 @@ use async_trait::async_trait; use const_format::concatcp; -use std::sync::Arc; -use zenoh::prelude::{KeyExpr, OwnedKeyExpr, Sample, Selector}; -use zenoh::queryable::ReplyBuilder; +use zenoh::prelude::OwnedKeyExpr; use zenoh::time::Timestamp; use zenoh::value::Value; pub use zenoh::Result as ZResult; @@ -210,14 +198,6 @@ pub trait Volume: Send + Sync { /// Creates a storage configured with some properties. async fn create_storage(&self, props: StorageConfig) -> ZResult>; - - /// Returns an interceptor that will be called before pushing any data - /// into a storage created by this backend. `None` can be returned for no interception point. - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>>; - - /// Returns an interceptor that will be called before sending any reply - /// to a query from a storage created by this backend. `None` can be returned for no interception point. - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>>; } pub type VolumeInstance = Box; @@ -282,49 +262,3 @@ pub trait Storage: Send + Sync { /// Remember to fetch the entry corresponding to the `None` key async fn get_all_entries(&self) -> ZResult, Timestamp)>>; } - -/// A wrapper around the [`zenoh::queryable::Query`] allowing to call the -/// OutgoingDataInterceptor (if any) before to send the reply -pub struct Query { - q: zenoh::queryable::Query, - interceptor: Option Sample + Send + Sync>>, -} - -impl Query { - pub fn new( - q: zenoh::queryable::Query, - interceptor: Option Sample + Send + Sync>>, - ) -> Query { - Query { q, interceptor } - } - - /// The full [`Selector`] of this Query. - #[inline(always)] - pub fn selector(&self) -> Selector<'_> { - self.q.selector() - } - - /// The key selector part of this Query. - #[inline(always)] - pub fn key_expr(&self) -> &KeyExpr<'static> { - self.q.key_expr() - } - - /// This Query's selector parameters. - #[inline(always)] - pub fn parameters(&self) -> &str { - self.q.parameters() - } - - /// Sends a Sample as a reply to this Query - pub fn reply(&self, sample: Sample) -> ReplyBuilder<'_> { - // Call outgoing intercerceptor - let sample = if let Some(ref interceptor) = self.interceptor { - interceptor(sample) - } else { - sample - }; - // Send reply - self.q.reply_sample(sample) - } -} diff --git a/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs b/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs index aa7260e868..90a6ae6250 100644 --- a/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs +++ b/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs @@ -14,7 +14,6 @@ use super::storages_mgt::*; use flume::Sender; use std::sync::Arc; -use zenoh::prelude::r#async::*; use zenoh::Session; use zenoh_backend_traits::config::StorageConfig; use zenoh_backend_traits::{Capability, VolumeInstance}; @@ -23,16 +22,12 @@ use zenoh_result::ZResult; pub struct StoreIntercept { pub storage: Box, pub capability: Capability, - pub in_interceptor: Option Sample + Send + Sync>>, - pub out_interceptor: Option Sample + Send + Sync>>, } pub(crate) async fn create_and_start_storage( admin_key: String, config: StorageConfig, backend: &VolumeInstance, - in_interceptor: Option Sample + Send + Sync>>, - out_interceptor: Option Sample + Send + Sync>>, zenoh: Arc, ) -> ZResult> { log::trace!("Create storage '{}'", &admin_key); @@ -41,8 +36,6 @@ pub(crate) async fn create_and_start_storage( let store_intercept = StoreIntercept { storage, capability, - in_interceptor, - out_interceptor, }; start_storage(store_intercept, config, admin_key, zenoh).await diff --git a/plugins/zenoh-plugin-storage-manager/src/lib.rs b/plugins/zenoh-plugin-storage-manager/src/lib.rs index 0db30bbd6a..91df2f108d 100644 --- a/plugins/zenoh-plugin-storage-manager/src/lib.rs +++ b/plugins/zenoh-plugin-storage-manager/src/lib.rs @@ -239,14 +239,10 @@ impl StorageRuntimeInner { volume_id, backend.name() ); - let in_interceptor = backend.instance().incoming_data_interceptor(); - let out_interceptor = backend.instance().outgoing_data_interceptor(); let stopper = async_std::task::block_on(create_and_start_storage( admin_key, storage.clone(), backend.instance(), - in_interceptor, - out_interceptor, self.session.clone(), ))?; self.storages diff --git a/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs b/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs index ebb4922c9d..4e333b8592 100644 --- a/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs +++ b/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs @@ -61,26 +61,6 @@ impl Volume for MemoryBackend { log::debug!("Create Memory Storage with configuration: {:?}", properties); Ok(Box::new(MemoryStorage::new(properties).await?)) } - - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { - // By default: no interception point - None - // To test interceptors, uncomment this line: - // Some(Arc::new(|sample| { - // trace!(">>>> IN INTERCEPTOR FOR {:?}", sample); - // sample - // })) - } - - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { - // By default: no interception point - None - // To test interceptors, uncomment this line: - // Some(Arc::new(|sample| { - // trace!("<<<< OUT INTERCEPTOR FOR {:?}", sample); - // sample - // })) - } } impl Drop for MemoryBackend { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 0708dcabd9..35134dfe43 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -60,8 +60,6 @@ pub struct StorageService { capability: Capability, tombstones: Arc>>, wildcard_updates: Arc>>, - in_interceptor: Option Sample + Send + Sync>>, - out_interceptor: Option Sample + Send + Sync>>, replication: Option, } @@ -85,8 +83,6 @@ impl StorageService { capability: store_intercept.capability, tombstones: Arc::new(RwLock::new(KeBoxTree::default())), wildcard_updates: Arc::new(RwLock::new(KeBoxTree::default())), - in_interceptor: store_intercept.in_interceptor, - out_interceptor: store_intercept.out_interceptor, replication, }; if storage_service @@ -263,13 +259,6 @@ impl StorageService { // the trimming during PUT and GET should be handled by the plugin async fn process_sample(&self, sample: Sample) { log::trace!("[STORAGE] Processing sample: {:?}", sample); - // Call incoming data interceptor (if any) - let sample = if let Some(ref interceptor) = self.in_interceptor { - interceptor(sample) - } else { - sample - }; - // if wildcard, update wildcard_updates if sample.key_expr().is_wild() { self.register_wildcard_update(sample.clone()).await; @@ -523,12 +512,6 @@ impl StorageService { let sample = Sample::new(key.clone(), payload) .with_encoding(encoding) .with_timestamp(entry.timestamp); - // apply outgoing interceptor on results - let sample = if let Some(ref interceptor) = self.out_interceptor { - interceptor(sample) - } else { - sample - }; if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", @@ -561,12 +544,6 @@ impl StorageService { let sample = Sample::new(q.key_expr().clone(), payload) .with_encoding(encoding) .with_timestamp(entry.timestamp); - // apply outgoing interceptor on results - let sample = if let Some(ref interceptor) = self.out_interceptor { - interceptor(sample) - } else { - sample - }; if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", From 7d2abd44b19ed7ba86713f1752990ba344d07235 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 19:20:11 +0100 Subject: [PATCH 039/124] deconstruct sample api used --- .../src/replica/aligner.rs | 3 +-- .../src/replica/storage.rs | 25 +++++++++++-------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 3f672382f1..f00029442f 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -12,11 +12,10 @@ // ZettaScale Zenoh Team, // -use crate::replica::storage::StorageSampleKind; - use super::storage::StorageSample; use super::{Digest, EraType, LogEntry, Snapshotter}; use super::{CONTENTS, ERA, INTERVALS, SUBINTERVALS}; +use crate::replica::storage::StorageSampleKind; use async_std::sync::{Arc, RwLock}; use flume::{Receiver, Sender}; use std::collections::{HashMap, HashSet}; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 95af3c97a2..fbc734d716 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -24,6 +24,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::ZBuf; use zenoh::prelude::r#async::*; use zenoh::query::ConsolidationMode; +use zenoh::sample::SampleFields; use zenoh::sample_builder::{PutSampleBuilderTrait, SampleBuilder, SampleBuilderTrait}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; @@ -55,19 +56,23 @@ pub struct StorageSample { impl From for StorageSample { fn from(sample: Sample) -> Self { - let timestamp = *sample.timestamp().unwrap_or(&new_reception_timestamp()); - // TODO: add API for disassembly of Sample - let key_expr = sample.key_expr().clone(); - let payload = sample.payload().clone(); - let encoding = sample.encoding().clone(); - let kind = match sample.kind() { - SampleKind::Put => StorageSampleKind::Put(Value::new(payload).with_encoding(encoding)), - SampleKind::Delete => StorageSampleKind::Delete, - }; - StorageSample { + let SampleFields { key_expr, timestamp, kind, + payload, + encoding, + .. + } = sample.into(); + StorageSample { + key_expr, + timestamp: timestamp.unwrap_or(new_reception_timestamp()), + kind: match kind { + SampleKind::Put => { + StorageSampleKind::Put(Value::new(payload).with_encoding(encoding)) + } + SampleKind::Delete => StorageSampleKind::Delete, + }, } } } From 2b1071f9b9b06dd10d401969ce5c8678560aea03 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 19:52:30 +0100 Subject: [PATCH 040/124] comment, clippy fix --- zenoh/src/sample.rs | 4 ++-- zenoh/src/sample_builder.rs | 20 ++++++++------------ 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 3ac3087836..acf8536a0e 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -67,10 +67,10 @@ pub(crate) trait DataInfoIntoSample { } impl DataInfoIntoSample for DataInfo { - // TODO: this is internal function. + // This function is for internal use only. // Technically it may create invalid sample (e.g. a delete sample with a payload and encoding) // The test for it is intentionally not added to avoid inserting extra "if" into hot path. - // This need to be additionally investigated and measured. + // The correctness of the data should be ensured by the caller. #[inline] fn into_sample( self, diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index c0ebf8c9d0..c7ee6e8368 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -144,12 +144,10 @@ pub struct PutSampleBuilder(SampleBuilder); impl From for PutSampleBuilder { fn from(sample_builder: SampleBuilder) -> Self { - Self(SampleBuilder { - 0: Sample { - kind: SampleKind::Put, - ..sample_builder.0 - }, - }) + Self(SampleBuilder(Sample { + kind: SampleKind::Put, + ..sample_builder.0 + })) } } @@ -237,12 +235,10 @@ pub struct DeleteSampleBuilder(SampleBuilder); impl From for DeleteSampleBuilder { fn from(sample_builder: SampleBuilder) -> Self { - Self(SampleBuilder { - 0: Sample { - kind: SampleKind::Delete, - ..sample_builder.0 - }, - }) + Self(SampleBuilder(Sample { + kind: SampleKind::Delete, + ..sample_builder.0 + })) } } From 3386237bea3e10f80ddb5089617f723577cef5b5 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 19:58:46 +0100 Subject: [PATCH 041/124] clippy fix --- plugins/zenoh-plugin-storage-manager/src/replica/storage.rs | 2 +- zenoh/tests/attachments.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index fbc734d716..f2fb0386c3 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -443,7 +443,7 @@ impl StorageService { match self.ovderriding_wild_update(&k, &sample.timestamp).await { Some(overriding_update) => overriding_update.into_sample(k.clone()), - None => sample.clone().into(), + None => sample.clone(), }; let stripped_key = match self.strip_prefix(&sample_to_store.key_expr) { diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index 38d03b0a84..8d26cc0344 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -51,7 +51,7 @@ fn pubsub() { #[cfg(feature = "unstable")] #[test] fn queries() { - use zenoh::{prelude::sync::*, sample::Attachment}; + use zenoh::{prelude::sync::*, sample::Attachment, sample_builder::SampleBuilderTrait}; let zenoh = zenoh::open(Config::default()).res().unwrap(); let _sub = zenoh From f52140aec5909389323cdad70d84b9fc4ba71395 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 20:13:49 +0100 Subject: [PATCH 042/124] zenoh-ext links zenoh with unstable --- zenoh-ext/src/publication_cache.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/zenoh-ext/src/publication_cache.rs b/zenoh-ext/src/publication_cache.rs index 78fff32014..a4eff1e932 100644 --- a/zenoh-ext/src/publication_cache.rs +++ b/zenoh-ext/src/publication_cache.rs @@ -121,10 +121,9 @@ async fn reply_sample(query: &Query, sample: &Sample) { let reply = query .reply_sample(sample.key_expr().clone().into_owned()) .with_timestamp_opt(sample.timestamp().cloned()); - #[cfg(feature = "unstable")] let reply = reply - .with_attachment_opt(sample.attachment()) - .with_source_info(sample.source_info()); + .with_attachment_opt(sample.attachment().cloned()) + .with_source_info(sample.source_info().clone()); if let Err(e) = match sample.kind() { SampleKind::Put => reply.put(sample.payload().clone()).res_async().await, SampleKind::Delete => reply.delete().res_async().await, From a629c765fb86823d3f4fa57d979936c49915221a Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Sun, 24 Mar 2024 22:06:24 +0100 Subject: [PATCH 043/124] samplefields used --- plugins/zenoh-plugin-example/src/lib.rs | 16 ++++++++-------- zenoh-ext/src/publication_cache.rs | 23 ++++++++++++++++------- 2 files changed, 24 insertions(+), 15 deletions(-) diff --git a/plugins/zenoh-plugin-example/src/lib.rs b/plugins/zenoh-plugin-example/src/lib.rs index 9b9dda40de..40f8d69488 100644 --- a/plugins/zenoh-plugin-example/src/lib.rs +++ b/plugins/zenoh-plugin-example/src/lib.rs @@ -24,6 +24,7 @@ use std::sync::{ use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::runtime::Runtime; +use zenoh::sample::SampleFields; use zenoh::sample_builder::SampleBuilderTrait; use zenoh_core::zlock; use zenoh_plugin_trait::{plugin_long_version, plugin_version, Plugin, PluginControl}; @@ -175,15 +176,14 @@ async fn run(runtime: Runtime, selector: KeyExpr<'_>, flag: Arc) { info!("Handling query '{}'", query.selector()); for (key_expr, sample) in stored.iter() { if query.selector().key_expr.intersects(unsafe{keyexpr::from_str_unchecked(key_expr)}) { + let SampleFields { key_expr, timestamp, attachment, source_info, payload, kind, .. } = sample.clone().into(); let reply = query - .reply_sample(sample.key_expr().clone().into_owned()) - .with_timestamp_opt(sample.timestamp().cloned()); - #[cfg(feature = "unstable")] - let reply = reply - .with_attachment_opt(sample.attachment()) - .with_source_info(sample.source_info()); - match sample.kind() { - SampleKind::Put => reply.put(sample.payload().clone()).res().await.unwrap(), + .reply_sample(key_expr) + .with_timestamp_opt(timestamp) + .with_attachment_opt(attachment) + .with_source_info(source_info); + match kind { + SampleKind::Put => reply.put(payload).res().await.unwrap(), SampleKind::Delete => reply.delete().res().await.unwrap(), } } diff --git a/zenoh-ext/src/publication_cache.rs b/zenoh-ext/src/publication_cache.rs index a4eff1e932..8a782a179e 100644 --- a/zenoh-ext/src/publication_cache.rs +++ b/zenoh-ext/src/publication_cache.rs @@ -20,6 +20,7 @@ use std::convert::TryInto; use std::future::Ready; use zenoh::prelude::r#async::*; use zenoh::queryable::{Query, Queryable}; +use zenoh::sample::SampleFields; use zenoh::sample_builder::SampleBuilderTrait; use zenoh::subscriber::FlumeSubscriber; use zenoh::SessionRef; @@ -118,14 +119,22 @@ pub struct PublicationCache<'a> { } async fn reply_sample(query: &Query, sample: &Sample) { + let SampleFields { + key_expr, + timestamp, + attachment, + source_info, + payload, + kind, + .. + } = sample.clone().into(); let reply = query - .reply_sample(sample.key_expr().clone().into_owned()) - .with_timestamp_opt(sample.timestamp().cloned()); - let reply = reply - .with_attachment_opt(sample.attachment().cloned()) - .with_source_info(sample.source_info().clone()); - if let Err(e) = match sample.kind() { - SampleKind::Put => reply.put(sample.payload().clone()).res_async().await, + .reply_sample(key_expr) + .with_timestamp_opt(timestamp) + .with_attachment_opt(attachment) + .with_source_info(source_info); + if let Err(e) = match kind { + SampleKind::Put => reply.put(payload).res_async().await, SampleKind::Delete => reply.delete().res_async().await, } { log::warn!("Error replying to query: {}", e); From 1945492ec9a27546e211e5bffac5bd5206cbdcd1 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 25 Mar 2024 10:41:14 +0100 Subject: [PATCH 044/124] restored old storage manager code --- .../src/replica/aligner.rs | 22 +- .../src/replica/storage.rs | 343 +++++++----------- 2 files changed, 146 insertions(+), 219 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index f00029442f..a899196e7e 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -12,10 +12,8 @@ // ZettaScale Zenoh Team, // -use super::storage::StorageSample; use super::{Digest, EraType, LogEntry, Snapshotter}; use super::{CONTENTS, ERA, INTERVALS, SUBINTERVALS}; -use crate::replica::storage::StorageSampleKind; use async_std::sync::{Arc, RwLock}; use flume::{Receiver, Sender}; use std::collections::{HashMap, HashSet}; @@ -23,15 +21,17 @@ use std::str; use zenoh::key_expr::{KeyExpr, OwnedKeyExpr}; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; +use zenoh::sample_builder::{PutSampleBuilder, PutSampleBuilderTrait, SampleBuilderTrait}; use zenoh::time::Timestamp; use zenoh::Session; +use zenoh_core::{AsyncResolve, SyncResolve}; pub struct Aligner { session: Arc, digest_key: OwnedKeyExpr, snapshotter: Arc, rx_digest: Receiver<(String, Digest)>, - tx_sample: Sender, + tx_sample: Sender, digests_processed: RwLock>, } @@ -40,7 +40,7 @@ impl Aligner { session: Arc, digest_key: OwnedKeyExpr, rx_digest: Receiver<(String, Digest)>, - tx_sample: Sender, + tx_sample: Sender, snapshotter: Arc, ) { let aligner = Aligner { @@ -107,11 +107,13 @@ impl Aligner { log::trace!("[ALIGNER] Received queried samples: {missing_data:?}"); for (key, (ts, value)) in missing_data { - let sample = StorageSample { - key_expr: key.into(), - timestamp: ts, - kind: StorageSampleKind::Put(value), - }; + let Value { + payload, encoding, .. + } = value; + let sample = PutSampleBuilder::new(key, payload) + .with_encoding(encoding) + .with_timestamp(ts) + .res_sync(); log::debug!("[ALIGNER] Adding {:?} to storage", sample); self.tx_sample.send_async(sample).await.unwrap_or_else(|e| { log::error!("[ALIGNER] Error adding sample to storage: {}", e) @@ -329,7 +331,7 @@ impl Aligner { .get(&selector) .consolidation(zenoh::query::ConsolidationMode::None) .accept_replies(zenoh::query::ReplyKeyExpr::Any) - .res() + .res_async() .await { Ok(replies) => { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index f2fb0386c3..67ce871bb0 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -19,18 +19,19 @@ use async_trait::async_trait; use flume::{Receiver, Sender}; use futures::select; use std::collections::{HashMap, HashSet}; -use std::str::FromStr; +use std::str::{self, FromStr}; use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::ZBuf; use zenoh::prelude::r#async::*; use zenoh::query::ConsolidationMode; -use zenoh::sample::SampleFields; -use zenoh::sample_builder::{PutSampleBuilderTrait, SampleBuilder, SampleBuilderTrait}; +use zenoh::sample_builder::{ + PutSampleBuilder, PutSampleBuilderTrait, SampleBuilder, SampleBuilderTrait, +}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; use zenoh_backend_traits::{Capability, History, Persistence, StorageInsertionResult, StoredData}; -use zenoh_core::SyncResolve; +use zenoh_core::{AsyncResolve, SyncResolve}; use zenoh_keyexpr::key_expr::OwnedKeyExpr; use zenoh_keyexpr::keyexpr_tree::impls::KeyedSetProvider; use zenoh_keyexpr::keyexpr_tree::{support::NonWild, support::UnknownWildness, KeBoxTree}; @@ -41,152 +42,15 @@ use zenoh_util::{zenoh_home, Timed, TimedEvent, Timer}; pub const WILDCARD_UPDATES_FILENAME: &str = "wildcard_updates"; pub const TOMBSTONE_FILENAME: &str = "tombstones"; -#[derive(Clone, Debug)] -pub enum StorageSampleKind { - Put(Value), - Delete, -} - -#[derive(Clone, Debug)] -pub struct StorageSample { - pub key_expr: KeyExpr<'static>, - pub timestamp: Timestamp, - pub kind: StorageSampleKind, -} - -impl From for StorageSample { - fn from(sample: Sample) -> Self { - let SampleFields { - key_expr, - timestamp, - kind, - payload, - encoding, - .. - } = sample.into(); - StorageSample { - key_expr, - timestamp: timestamp.unwrap_or(new_reception_timestamp()), - kind: match kind { - SampleKind::Put => { - StorageSampleKind::Put(Value::new(payload).with_encoding(encoding)) - } - SampleKind::Delete => StorageSampleKind::Delete, - }, - } - } -} - #[derive(Clone)] -enum Update { - Put(StoredData), - Delete(Timestamp), -} - -impl From for Update { - fn from(value: StorageSample) -> Self { - match value.kind { - StorageSampleKind::Put(data) => Update::Put(StoredData { - value: data, - timestamp: value.timestamp, - }), - StorageSampleKind::Delete => Update::Delete(value.timestamp), - } - } -} - -impl Update { - fn timestamp(&self) -> &Timestamp { - match self { - Update::Put(data) => &data.timestamp, - Update::Delete(ts) => ts, - } - } -} - -// implement from String for Update -impl TryFrom for Update { - type Error = zenoh::Error; - - fn try_from(value: String) -> Result { - let result: (String, String, String, Vec<&[u8]>) = serde_json::from_str(&value)?; - let mut payload = ZBuf::default(); - for slice in result.3 { - payload.push_zslice(slice.to_vec().into()); - } - let value = Value::new(payload).with_encoding(result.2); - let timestamp = Timestamp::from_str(&result.1).map_err(|_| "Error parsing timestamp")?; - if result.0.eq(&(SampleKind::Put).to_string()) { - Ok(Update::Put(StoredData { value, timestamp })) - } else { - Ok(Update::Delete(timestamp)) - } - } -} - -// implement to_string for Update -impl ToString for Update { - fn to_string(&self) -> String { - let result = match self { - Update::Put(data) => ( - SampleKind::Put.to_string(), - data.timestamp.to_string(), - data.value.encoding.to_string(), - data.value.payload.slices().collect::>(), - ), - Update::Delete(ts) => ( - SampleKind::Delete.to_string(), - ts.to_string(), - "".to_string(), - vec![], - ), - }; - serde_json::to_string_pretty(&result).unwrap() - } -} - -trait IntoStorageSample { - fn into_sample(self, key_expr: IntoKeyExpr) -> StorageSample - where - IntoKeyExpr: Into>; -} - -impl IntoStorageSample for StoredData { - fn into_sample(self, key_expr: IntoKeyExpr) -> StorageSample - where - IntoKeyExpr: Into>, - { - StorageSample { - key_expr: key_expr.into(), - timestamp: self.timestamp, - kind: StorageSampleKind::Put(self.value), - } - } -} - -impl IntoStorageSample for Update { - fn into_sample(self, key_expr: IntoKeyExpr) -> StorageSample - where - IntoKeyExpr: Into>, - { - match self { - Update::Put(data) => StorageSample { - key_expr: key_expr.into(), - timestamp: data.timestamp, - kind: StorageSampleKind::Put(data.value), - }, - Update::Delete(ts) => StorageSample { - key_expr: key_expr.into(), - timestamp: ts, - kind: StorageSampleKind::Delete, - }, - } - } +struct Update { + kind: SampleKind, + data: StoredData, } pub struct ReplicationService { pub empty_start: bool, - pub aligner_updates: Receiver, + pub aligner_updates: Receiver, pub log_propagation: Sender<(OwnedKeyExpr, Timestamp)>, } @@ -245,11 +109,10 @@ impl StorageService { let saved_wc = std::fs::read_to_string(zenoh_home().join(WILDCARD_UPDATES_FILENAME)).unwrap(); let saved_wc: HashMap = - serde_json::from_str(&saved_wc).unwrap(); // TODO: Remove unwrap + serde_json::from_str(&saved_wc).unwrap(); let mut wildcard_updates = storage_service.wildcard_updates.write().await; for (k, data) in saved_wc { - wildcard_updates.insert(&k, Update::try_from(data).unwrap()); - // TODO: Remove unwrap + wildcard_updates.insert(&k, construct_update(data)); } } } @@ -325,7 +188,7 @@ impl StorageService { log::error!("Sample {:?} is not timestamped. Please timestamp samples meant for replicated storage.", sample); } else { - self.process_sample(sample.into()).await; + self.process_sample(sample).await; } }, // on query on key_expr @@ -377,7 +240,7 @@ impl StorageService { } else { sample }; - self.process_sample(sample.into()).await; + self.process_sample(sample).await; }, // on query on key_expr query = storage_queryable.recv_async() => { @@ -407,46 +270,61 @@ impl StorageService { // The storage should only simply save the key, sample pair while put and retrieve the same during get // the trimming during PUT and GET should be handled by the plugin - async fn process_sample(&self, sample: StorageSample) { + async fn process_sample(&self, sample: Sample) { log::trace!("[STORAGE] Processing sample: {:?}", sample); - // if wildcard, update wildcard_updates - if sample.key_expr.is_wild() { + if sample.key_expr().is_wild() { self.register_wildcard_update(sample.clone()).await; } - let matching_keys = if sample.key_expr.is_wild() { - self.get_matching_keys(&sample.key_expr).await + let matching_keys = if sample.key_expr().is_wild() { + self.get_matching_keys(sample.key_expr()).await } else { - vec![sample.key_expr.clone().into()] + vec![sample.key_expr().clone().into()] }; log::trace!( "The list of keys matching `{}` is : {:?}", - sample.key_expr, + sample.key_expr(), matching_keys ); for k in matching_keys { - if !self.is_deleted(&k.clone(), &sample.timestamp).await + if !self + .is_deleted(&k.clone(), sample.timestamp().unwrap()) + .await && (self.capability.history.eq(&History::All) || (self.capability.history.eq(&History::Latest) - && self.is_latest(&k, &sample.timestamp).await)) + && self.is_latest(&k, sample.timestamp().unwrap()).await)) { log::trace!( "Sample `{:?}` identified as neded processing for key {}", sample, - &k + k ); // there might be the case that the actual update was outdated due to a wild card update, but not stored yet in the storage. // get the relevant wild card entry and use that value and timestamp to update the storage - let sample_to_store = - match self.ovderriding_wild_update(&k, &sample.timestamp).await { - Some(overriding_update) => overriding_update.into_sample(k.clone()), - - None => sample.clone(), - }; + let sample_to_store = match self + .ovderriding_wild_update(&k, sample.timestamp().unwrap()) + .await + { + Some(overriding_update) => { + let Value { + payload, encoding, .. + } = overriding_update.data.value; + PutSampleBuilder::new(KeyExpr::from(k.clone()), payload) + .with_encoding(encoding) + .with_timestamp(overriding_update.data.timestamp) + .res_sync() + } + None => { + PutSampleBuilder::new(KeyExpr::from(k.clone()), sample.payload().clone()) + .with_encoding(sample.encoding().clone()) + .with_timestamp(*sample.timestamp().unwrap()) + .res_sync() + } + }; - let stripped_key = match self.strip_prefix(&sample_to_store.key_expr) { + let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { Ok(stripped) => stripped, Err(e) => { log::error!("{}", e); @@ -454,17 +332,23 @@ impl StorageService { } }; let mut storage = self.storage.lock().await; - let result = match sample_to_store.kind { - StorageSampleKind::Put(data) => { + let result = match sample.kind() { + SampleKind::Put => { storage - .put(stripped_key, data, sample_to_store.timestamp) + .put( + stripped_key, + Value::new(sample_to_store.payload().clone()) + .with_encoding(sample_to_store.encoding().clone()), + *sample_to_store.timestamp().unwrap(), + ) .await } - StorageSampleKind::Delete => { + SampleKind::Delete => { // register a tombstone - self.mark_tombstone(&k, sample_to_store.timestamp).await; + self.mark_tombstone(&k, *sample_to_store.timestamp().unwrap()) + .await; storage - .delete(stripped_key, sample_to_store.timestamp) + .delete(stripped_key, *sample_to_store.timestamp().unwrap()) .await } }; @@ -478,7 +362,7 @@ impl StorageService { .as_ref() .unwrap() .log_propagation - .send((k.clone(), sample_to_store.timestamp)); + .send((k.clone(), *sample_to_store.timestamp().unwrap())); match sending { Ok(_) => (), Err(e) => { @@ -509,16 +393,26 @@ impl StorageService { } } - async fn register_wildcard_update(&self, sample: StorageSample) { + async fn register_wildcard_update(&self, sample: Sample) { // @TODO: change into a better store that does incremental writes - let key = sample.key_expr.clone(); + let key = sample.key_expr().clone(); let mut wildcards = self.wildcard_updates.write().await; - wildcards.insert(&key, sample.into()); + let timestamp = *sample.timestamp().unwrap(); + wildcards.insert( + &key, + Update { + kind: sample.kind(), + data: StoredData { + value: Value::from(sample), + timestamp, + }, + }, + ); if self.capability.persistence.eq(&Persistence::Durable) { // flush to disk to makeit durable let mut serialized_data = HashMap::new(); for (k, update) in wildcards.key_value_pairs() { - serialized_data.insert(k, update.to_string()); + serialized_data.insert(k, serialize_update(update)); } if let Err(e) = std::fs::write( zenoh_home().join(WILDCARD_UPDATES_FILENAME), @@ -547,36 +441,34 @@ impl StorageService { let mut update = None; for node in wildcards.intersecting_keys(key_expr) { let weight = wildcards.weight_at(&node); - if let Some(weight) = weight { - if weight.timestamp() > ts { - // if the key matches a wild card update, check whether it was saved in storage - // remember that wild card updates change only existing keys - let stripped_key = match self.strip_prefix(&key_expr.into()) { - Ok(stripped) => stripped, - Err(e) => { - log::error!("{}", e); - break; - } - }; - let mut storage = self.storage.lock().await; - match storage.get(stripped_key, "").await { - Ok(stored_data) => { - for entry in stored_data { - if entry.timestamp > *ts { - return None; - } + if weight.is_some() && weight.unwrap().data.timestamp > *ts { + // if the key matches a wild card update, check whether it was saved in storage + // remember that wild card updates change only existing keys + let stripped_key = match self.strip_prefix(&key_expr.into()) { + Ok(stripped) => stripped, + Err(e) => { + log::error!("{}", e); + break; + } + }; + let mut storage = self.storage.lock().await; + match storage.get(stripped_key, "").await { + Ok(stored_data) => { + for entry in stored_data { + if entry.timestamp > *ts { + return None; } } - Err(e) => { - log::warn!( - "Storage '{}' raised an error fetching a query on key {} : {}", - self.name, - key_expr, - e - ); - ts = weight.timestamp(); - update = Some(weight.clone()); - } + } + Err(e) => { + log::warn!( + "Storage '{}' raised an error fetching a query on key {} : {}", + self.name, + key_expr, + e + ); + ts = &weight.unwrap().data.timestamp; + update = Some(weight.unwrap().clone()); } } } @@ -629,8 +521,12 @@ impl StorageService { match storage.get(stripped_key, q.parameters()).await { Ok(stored_data) => { for entry in stored_data { + let Value { + payload, encoding, .. + } = entry.value; if let Err(e) = q - .reply(key.clone(), entry.value.payload) + .reply(key.clone(), payload) + .with_encoding(encoding) .with_timestamp(entry.timestamp) .res_async() .await @@ -776,7 +672,7 @@ impl StorageService { while let Ok(reply) = replies.recv_async().await { match reply.sample { Ok(sample) => { - self.process_sample(sample.into()).await; + self.process_sample(sample).await; } Err(e) => log::warn!( "Storage '{}' received an error to align query: {:?}", @@ -789,6 +685,35 @@ impl StorageService { } } +fn serialize_update(update: &Update) -> String { + let result = ( + update.kind.to_string(), + update.data.timestamp.to_string(), + update.data.value.encoding.to_string(), + update.data.value.payload.slices().collect::>(), + ); + serde_json::to_string_pretty(&result).unwrap() +} + +fn construct_update(data: String) -> Update { + let result: (String, String, String, Vec<&[u8]>) = serde_json::from_str(&data).unwrap(); // @TODO: remove the unwrap() + let mut payload = ZBuf::default(); + for slice in result.3 { + payload.push_zslice(slice.to_vec().into()); + } + let value = Value::new(payload).with_encoding(result.2); + let data = StoredData { + value, + timestamp: Timestamp::from_str(&result.1).unwrap(), // @TODO: remove the unwrap() + }; + let kind = if result.0.eq(&(SampleKind::Put).to_string()) { + SampleKind::Put + } else { + SampleKind::Delete + }; + Update { kind, data } +} + // Periodic event cleaning-up data info for old metadata struct GarbageCollectionEvent { config: GarbageCollectionConfig, @@ -820,7 +745,7 @@ impl Timed for GarbageCollectionEvent { let mut to_be_removed = HashSet::new(); for (k, update) in wildcard_updates.key_value_pairs() { - let ts = update.timestamp(); + let ts = update.data.timestamp; if ts.get_time() < &time_limit { // mark key to be removed to_be_removed.insert(k); From 65a4d7f8646b159641df015a9a47608d5bae26af Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 25 Mar 2024 10:54:56 +0100 Subject: [PATCH 045/124] interceptors removed from plugin storage API (#859) --- plugins/zenoh-backend-example/src/lib.rs | 13 +--- plugins/zenoh-backend-traits/src/lib.rs | 68 +------------------ .../src/backends_mgt.rs | 7 -- .../zenoh-plugin-storage-manager/src/lib.rs | 4 -- .../src/memory_backend/mod.rs | 20 ------ .../src/replica/storage.rs | 23 ------- 6 files changed, 3 insertions(+), 132 deletions(-) diff --git a/plugins/zenoh-backend-example/src/lib.rs b/plugins/zenoh-backend-example/src/lib.rs index 602d29f375..f81231a498 100644 --- a/plugins/zenoh-backend-example/src/lib.rs +++ b/plugins/zenoh-backend-example/src/lib.rs @@ -13,11 +13,8 @@ // use async_std::sync::RwLock; use async_trait::async_trait; -use std::{ - collections::{hash_map::Entry, HashMap}, - sync::Arc, -}; -use zenoh::{prelude::OwnedKeyExpr, sample::Sample, time::Timestamp, value::Value}; +use std::collections::{hash_map::Entry, HashMap}; +use zenoh::{prelude::OwnedKeyExpr, time::Timestamp, value::Value}; use zenoh_backend_traits::{ config::{StorageConfig, VolumeConfig}, Capability, History, Persistence, Storage, StorageInsertionResult, StoredData, Volume, @@ -71,12 +68,6 @@ impl Volume for ExampleBackend { async fn create_storage(&self, _props: StorageConfig) -> ZResult> { Ok(Box::::default()) } - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { - None - } - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { - None - } } #[async_trait] diff --git a/plugins/zenoh-backend-traits/src/lib.rs b/plugins/zenoh-backend-traits/src/lib.rs index d17e6dfd77..40d022f1ec 100644 --- a/plugins/zenoh-backend-traits/src/lib.rs +++ b/plugins/zenoh-backend-traits/src/lib.rs @@ -68,16 +68,6 @@ //! // The properties are the ones passed via a PUT in the admin space for Storage creation. //! Ok(Box::new(MyStorage::new(properties).await?)) //! } -//! -//! fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { -//! // No interception point for incoming data (on PUT operations) -//! None -//! } -//! -//! fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { -//! // No interception point for outgoing data (on GET operations) -//! None -//! } //! } //! //! // Your Storage implementation @@ -135,9 +125,7 @@ use async_trait::async_trait; use const_format::concatcp; -use std::sync::Arc; -use zenoh::prelude::{KeyExpr, OwnedKeyExpr, Sample, Selector}; -use zenoh::queryable::ReplyBuilder; +use zenoh::prelude::OwnedKeyExpr; use zenoh::time::Timestamp; use zenoh::value::Value; pub use zenoh::Result as ZResult; @@ -210,14 +198,6 @@ pub trait Volume: Send + Sync { /// Creates a storage configured with some properties. async fn create_storage(&self, props: StorageConfig) -> ZResult>; - - /// Returns an interceptor that will be called before pushing any data - /// into a storage created by this backend. `None` can be returned for no interception point. - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>>; - - /// Returns an interceptor that will be called before sending any reply - /// to a query from a storage created by this backend. `None` can be returned for no interception point. - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>>; } pub type VolumeInstance = Box; @@ -282,49 +262,3 @@ pub trait Storage: Send + Sync { /// Remember to fetch the entry corresponding to the `None` key async fn get_all_entries(&self) -> ZResult, Timestamp)>>; } - -/// A wrapper around the [`zenoh::queryable::Query`] allowing to call the -/// OutgoingDataInterceptor (if any) before to send the reply -pub struct Query { - q: zenoh::queryable::Query, - interceptor: Option Sample + Send + Sync>>, -} - -impl Query { - pub fn new( - q: zenoh::queryable::Query, - interceptor: Option Sample + Send + Sync>>, - ) -> Query { - Query { q, interceptor } - } - - /// The full [`Selector`] of this Query. - #[inline(always)] - pub fn selector(&self) -> Selector<'_> { - self.q.selector() - } - - /// The key selector part of this Query. - #[inline(always)] - pub fn key_expr(&self) -> &KeyExpr<'static> { - self.q.key_expr() - } - - /// This Query's selector parameters. - #[inline(always)] - pub fn parameters(&self) -> &str { - self.q.parameters() - } - - /// Sends a Sample as a reply to this Query - pub fn reply(&self, sample: Sample) -> ReplyBuilder<'_> { - // Call outgoing intercerceptor - let sample = if let Some(ref interceptor) = self.interceptor { - interceptor(sample) - } else { - sample - }; - // Send reply - self.q.reply_sample(sample) - } -} diff --git a/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs b/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs index aa7260e868..90a6ae6250 100644 --- a/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs +++ b/plugins/zenoh-plugin-storage-manager/src/backends_mgt.rs @@ -14,7 +14,6 @@ use super::storages_mgt::*; use flume::Sender; use std::sync::Arc; -use zenoh::prelude::r#async::*; use zenoh::Session; use zenoh_backend_traits::config::StorageConfig; use zenoh_backend_traits::{Capability, VolumeInstance}; @@ -23,16 +22,12 @@ use zenoh_result::ZResult; pub struct StoreIntercept { pub storage: Box, pub capability: Capability, - pub in_interceptor: Option Sample + Send + Sync>>, - pub out_interceptor: Option Sample + Send + Sync>>, } pub(crate) async fn create_and_start_storage( admin_key: String, config: StorageConfig, backend: &VolumeInstance, - in_interceptor: Option Sample + Send + Sync>>, - out_interceptor: Option Sample + Send + Sync>>, zenoh: Arc, ) -> ZResult> { log::trace!("Create storage '{}'", &admin_key); @@ -41,8 +36,6 @@ pub(crate) async fn create_and_start_storage( let store_intercept = StoreIntercept { storage, capability, - in_interceptor, - out_interceptor, }; start_storage(store_intercept, config, admin_key, zenoh).await diff --git a/plugins/zenoh-plugin-storage-manager/src/lib.rs b/plugins/zenoh-plugin-storage-manager/src/lib.rs index 0db30bbd6a..91df2f108d 100644 --- a/plugins/zenoh-plugin-storage-manager/src/lib.rs +++ b/plugins/zenoh-plugin-storage-manager/src/lib.rs @@ -239,14 +239,10 @@ impl StorageRuntimeInner { volume_id, backend.name() ); - let in_interceptor = backend.instance().incoming_data_interceptor(); - let out_interceptor = backend.instance().outgoing_data_interceptor(); let stopper = async_std::task::block_on(create_and_start_storage( admin_key, storage.clone(), backend.instance(), - in_interceptor, - out_interceptor, self.session.clone(), ))?; self.storages diff --git a/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs b/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs index ebb4922c9d..4e333b8592 100644 --- a/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs +++ b/plugins/zenoh-plugin-storage-manager/src/memory_backend/mod.rs @@ -61,26 +61,6 @@ impl Volume for MemoryBackend { log::debug!("Create Memory Storage with configuration: {:?}", properties); Ok(Box::new(MemoryStorage::new(properties).await?)) } - - fn incoming_data_interceptor(&self) -> Option Sample + Send + Sync>> { - // By default: no interception point - None - // To test interceptors, uncomment this line: - // Some(Arc::new(|sample| { - // trace!(">>>> IN INTERCEPTOR FOR {:?}", sample); - // sample - // })) - } - - fn outgoing_data_interceptor(&self) -> Option Sample + Send + Sync>> { - // By default: no interception point - None - // To test interceptors, uncomment this line: - // Some(Arc::new(|sample| { - // trace!("<<<< OUT INTERCEPTOR FOR {:?}", sample); - // sample - // })) - } } impl Drop for MemoryBackend { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 0708dcabd9..35134dfe43 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -60,8 +60,6 @@ pub struct StorageService { capability: Capability, tombstones: Arc>>, wildcard_updates: Arc>>, - in_interceptor: Option Sample + Send + Sync>>, - out_interceptor: Option Sample + Send + Sync>>, replication: Option, } @@ -85,8 +83,6 @@ impl StorageService { capability: store_intercept.capability, tombstones: Arc::new(RwLock::new(KeBoxTree::default())), wildcard_updates: Arc::new(RwLock::new(KeBoxTree::default())), - in_interceptor: store_intercept.in_interceptor, - out_interceptor: store_intercept.out_interceptor, replication, }; if storage_service @@ -263,13 +259,6 @@ impl StorageService { // the trimming during PUT and GET should be handled by the plugin async fn process_sample(&self, sample: Sample) { log::trace!("[STORAGE] Processing sample: {:?}", sample); - // Call incoming data interceptor (if any) - let sample = if let Some(ref interceptor) = self.in_interceptor { - interceptor(sample) - } else { - sample - }; - // if wildcard, update wildcard_updates if sample.key_expr().is_wild() { self.register_wildcard_update(sample.clone()).await; @@ -523,12 +512,6 @@ impl StorageService { let sample = Sample::new(key.clone(), payload) .with_encoding(encoding) .with_timestamp(entry.timestamp); - // apply outgoing interceptor on results - let sample = if let Some(ref interceptor) = self.out_interceptor { - interceptor(sample) - } else { - sample - }; if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", @@ -561,12 +544,6 @@ impl StorageService { let sample = Sample::new(q.key_expr().clone(), payload) .with_encoding(encoding) .with_timestamp(entry.timestamp); - // apply outgoing interceptor on results - let sample = if let Some(ref interceptor) = self.out_interceptor { - interceptor(sample) - } else { - sample - }; if let Err(e) = q.reply_sample(sample).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", From 48d8d776986ba31d0030b23250be00da93043b64 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 25 Mar 2024 11:49:52 +0100 Subject: [PATCH 046/124] separate qosbuilder trait --- zenoh/src/queryable.rs | 8 +++++++- zenoh/src/sample_builder.rs | 18 +++++++++++++++--- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index f2e00e47c6..68b27526ce 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -21,7 +21,7 @@ use crate::prelude::*; use crate::sample::SourceInfo; use crate::sample_builder::{ DeleteSampleBuilder, DeleteSampleBuilderTrait, PutSampleBuilder, PutSampleBuilderTrait, - SampleBuilder, SampleBuilderTrait, + QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, }; use crate::Id; use crate::SessionRef; @@ -287,7 +287,9 @@ impl SampleBuilderTrait for ReplySampleBuilder<'_> { ..self } } +} +impl QoSBuilderTrait for ReplySampleBuilder<'_> { fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self { sample_builder: self.sample_builder.congestion_control(congestion_control), @@ -366,7 +368,9 @@ impl SampleBuilderTrait for ReplyBuilder<'_> { ..self } } +} +impl QoSBuilderTrait for ReplyBuilder<'_> { fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self { sample_builder: self.sample_builder.congestion_control(congestion_control), @@ -464,7 +468,9 @@ impl SampleBuilderTrait for ReplyDelBuilder<'_> { ..self } } +} +impl QoSBuilderTrait for ReplyDelBuilder<'_> { fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self { sample_builder: self.sample_builder.congestion_control(congestion_control), diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index c7ee6e8368..7545646b91 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -28,6 +28,12 @@ use zenoh_core::Resolvable; use zenoh_core::SyncResolve; use zenoh_protocol::core::CongestionControl; +pub trait QoSBuilderTrait { + fn congestion_control(self, congestion_control: CongestionControl) -> Self; + fn priority(self, priority: Priority) -> Self; + fn express(self, is_express: bool) -> Self; +} + pub trait SampleBuilderTrait { fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self where @@ -40,9 +46,6 @@ pub trait SampleBuilderTrait { fn with_attachment_opt(self, attachment: Option) -> Self; #[zenoh_macros::unstable] fn with_attachment(self, attachment: Attachment) -> Self; - fn congestion_control(self, congestion_control: CongestionControl) -> Self; - fn priority(self, priority: Priority) -> Self; - fn express(self, is_express: bool) -> Self; } pub trait PutSampleBuilderTrait: SampleBuilderTrait { @@ -119,6 +122,9 @@ impl SampleBuilderTrait for SampleBuilder { fn with_attachment(self, attachment: Attachment) -> Self { self.with_attachment_opt(Some(attachment)) } +} + +impl QoSBuilderTrait for SampleBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self(Sample { qos: self.0.qos.with_congestion_control(congestion_control), @@ -201,6 +207,9 @@ impl SampleBuilderTrait for PutSampleBuilder { fn with_attachment_opt(self, attachment: Option) -> Self { Self(self.0.with_attachment_opt(attachment)) } +} + +impl QoSBuilderTrait for PutSampleBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self(self.0.congestion_control(congestion_control)) } @@ -291,6 +300,9 @@ impl SampleBuilderTrait for DeleteSampleBuilder { fn with_attachment_opt(self, attachment: Option) -> Self { Self(self.0.with_attachment_opt(attachment)) } +} + +impl QoSBuilderTrait for DeleteSampleBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self(self.0.congestion_control(congestion_control)) } From 322a4e06b0acdc5dc25be1b6e4abcd0a5c04bf82 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 25 Mar 2024 15:56:17 +0100 Subject: [PATCH 047/124] removed `with_keyexpr` from trait --- zenoh/src/queryable.rs | 30 -------------------------- zenoh/src/sample_builder.rs | 42 ++++++++++++++++++++----------------- 2 files changed, 23 insertions(+), 49 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 68b27526ce..9edb9fb26c 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -240,16 +240,6 @@ impl<'a> ReplySampleBuilder<'a> { } impl SampleBuilderTrait for ReplySampleBuilder<'_> { - fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self { - sample_builder: self.sample_builder.with_keyexpr(key_expr), - ..self - } - } - fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp_opt(timestamp), @@ -321,16 +311,6 @@ pub struct ReplyBuilder<'a> { } impl SampleBuilderTrait for ReplyBuilder<'_> { - fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self { - sample_builder: self.sample_builder.with_keyexpr(key_expr), - ..self - } - } - fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp_opt(timestamp), @@ -421,16 +401,6 @@ pub struct ReplyDelBuilder<'a> { } impl SampleBuilderTrait for ReplyDelBuilder<'_> { - fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self { - sample_builder: self.sample_builder.with_keyexpr(key_expr), - ..self - } - } - fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp_opt(timestamp), diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 7545646b91..0df98773fc 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -29,15 +29,17 @@ use zenoh_core::SyncResolve; use zenoh_protocol::core::CongestionControl; pub trait QoSBuilderTrait { + /// Change the `congestion_control` to apply when routing the data. fn congestion_control(self, congestion_control: CongestionControl) -> Self; + /// Change the priority of the written data. fn priority(self, priority: Priority) -> Self; + /// Change the `express` policy to apply when routing the data. + /// When express is set to `true`, then the message will not be batched. + /// This usually has a positive impact on latency but negative impact on throughput. fn express(self, is_express: bool) -> Self; } pub trait SampleBuilderTrait { - fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>; fn with_timestamp_opt(self, timestamp: Option) -> Self; fn with_timestamp(self, timestamp: Timestamp) -> Self; #[zenoh_macros::unstable] @@ -78,10 +80,8 @@ impl SampleBuilder { attachment: None, }) } -} - -impl SampleBuilderTrait for SampleBuilder { - fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + /// Allows to change keyexpr of [`Sample`] + pub fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self where IntoKeyExpr: Into>, { @@ -90,7 +90,9 @@ impl SampleBuilderTrait for SampleBuilder { ..self.0 }) } +} +impl SampleBuilderTrait for SampleBuilder { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self(Sample { timestamp, @@ -176,6 +178,13 @@ impl PutSampleBuilder { attachment: None, })) } + /// Allows to change keyexpr of [`Sample`] + pub fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self(self.0.with_keyexpr(key_expr)) + } // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. pub(crate) fn with_qos(self, qos: QoS) -> Self { Self(SampleBuilder(Sample { qos, ..self.0 .0 })) @@ -183,12 +192,6 @@ impl PutSampleBuilder { } impl SampleBuilderTrait for PutSampleBuilder { - fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self(self.0.with_keyexpr(key_expr)) - } fn with_timestamp(self, timestamp: Timestamp) -> Self { Self(self.0.with_timestamp(timestamp)) } @@ -269,6 +272,13 @@ impl DeleteSampleBuilder { attachment: None, })) } + /// Allows to change keyexpr of [`Sample`] + pub fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + where + IntoKeyExpr: Into>, + { + Self(self.0.with_keyexpr(key_expr)) + } // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. pub(crate) fn with_qos(self, qos: QoS) -> Self { Self(SampleBuilder(Sample { qos, ..self.0 .0 })) @@ -276,12 +286,6 @@ impl DeleteSampleBuilder { } impl SampleBuilderTrait for DeleteSampleBuilder { - fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self(self.0.with_keyexpr(key_expr)) - } fn with_timestamp(self, timestamp: Timestamp) -> Self { Self(self.0.with_timestamp(timestamp)) } From 9515c7d63bec0744d9a1bf2e86b7242ee9121480 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 25 Mar 2024 17:36:36 +0100 Subject: [PATCH 048/124] put, delete builder --- .../zenoh-plugin-rest/examples/z_serve_sse.rs | 1 + plugins/zenoh-plugin-rest/src/lib.rs | 1 + zenoh-ext/src/group.rs | 1 + zenoh/src/publication.rs | 284 +++++++++++++----- zenoh/src/sample_builder.rs | 6 + zenoh/src/session.rs | 12 +- zenoh/tests/qos.rs | 1 + zenoh/tests/routing.rs | 1 + zenoh/tests/session.rs | 1 + zenoh/tests/unicity.rs | 1 + 10 files changed, 233 insertions(+), 76 deletions(-) diff --git a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs index bb76005d6e..48f152e488 100644 --- a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs +++ b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs @@ -15,6 +15,7 @@ use clap::{arg, Command}; use std::time::Duration; use zenoh::prelude::r#async::*; use zenoh::publication::CongestionControl; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh::{config::Config, key_expr::keyexpr}; const HTML: &str = r#" diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index e2718f6579..cc97590636 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -34,6 +34,7 @@ use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, Reply}; use zenoh::runtime::Runtime; +use zenoh::sample_builder::PutSampleBuilderTrait; use zenoh::selector::TIME_RANGE_KEY; use zenoh::Session; use zenoh_plugin_trait::{plugin_long_version, plugin_version, Plugin, PluginControl}; diff --git a/zenoh-ext/src/group.rs b/zenoh-ext/src/group.rs index 41007d8b87..973baf271b 100644 --- a/zenoh-ext/src/group.rs +++ b/zenoh-ext/src/group.rs @@ -28,6 +28,7 @@ use std::time::{Duration, Instant}; use zenoh::prelude::r#async::*; use zenoh::publication::Publisher; use zenoh::query::ConsolidationMode; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh::Error as ZError; use zenoh::Result as ZResult; use zenoh::Session; diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 392c0bf8c1..97f485f1e3 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -18,6 +18,9 @@ use crate::prelude::*; #[zenoh_macros::unstable] use crate::sample::Attachment; use crate::sample::{DataInfo, QoS, Sample, SampleKind}; +use crate::sample_builder::{ + DeleteSampleBuilderTrait, PutSampleBuilderTrait, QoSBuilderTrait, SampleBuilderTrait, +}; use crate::SessionRef; use crate::Undeclarable; #[cfg(feature = "unstable")] @@ -56,7 +59,14 @@ pub use zenoh_protocol::core::CongestionControl; /// .unwrap(); /// # }) /// ``` -pub type DeleteBuilder<'a, 'b> = PutBuilder<'a, 'b>; +pub struct DeleteBuilder<'a, 'b> { + pub(crate) publisher: PublisherBuilder<'a, 'b>, + pub(crate) timestamp: Option, + #[cfg(feature = "unstable")] + pub(crate) source_info: Option, + #[cfg(feature = "unstable")] + pub(crate) attachment: Option, +} /// A builder for initializing a [`put`](crate::Session::put) operation. /// @@ -81,36 +91,141 @@ pub type DeleteBuilder<'a, 'b> = PutBuilder<'a, 'b>; pub struct PutBuilder<'a, 'b> { pub(crate) publisher: PublisherBuilder<'a, 'b>, pub(crate) payload: Payload, - pub(crate) kind: SampleKind, pub(crate) encoding: Encoding, + pub(crate) timestamp: Option, + #[cfg(feature = "unstable")] + pub(crate) source_info: Option, #[cfg(feature = "unstable")] pub(crate) attachment: Option, } -impl PutBuilder<'_, '_> { - /// Change the `congestion_control` to apply when routing the data. +impl QoSBuilderTrait for PutBuilder<'_, '_> { #[inline] - pub fn congestion_control(mut self, congestion_control: CongestionControl) -> Self { - self.publisher = self.publisher.congestion_control(congestion_control); - self + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self { + publisher: self.publisher.congestion_control(congestion_control), + ..self + } } - - /// Change the priority of the written data. #[inline] - pub fn priority(mut self, priority: Priority) -> Self { - self.publisher = self.publisher.priority(priority); - self + fn priority(self, priority: Priority) -> Self { + Self { + publisher: self.publisher.priority(priority), + ..self + } } + #[inline] + fn express(self, is_express: bool) -> Self { + Self { + publisher: self.publisher.express(is_express), + ..self + } + } +} - /// Change the `express` policy to apply when routing the data. - /// When express is set to `true`, then the message will not be batched. - /// This usually has a positive impact on latency but negative impact on throughput. +impl QoSBuilderTrait for DeleteBuilder<'_, '_> { #[inline] - pub fn express(mut self, is_express: bool) -> Self { - self.publisher = self.publisher.express(is_express); - self + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self { + publisher: self.publisher.congestion_control(congestion_control), + ..self + } + } + #[inline] + fn priority(self, priority: Priority) -> Self { + Self { + publisher: self.publisher.priority(priority), + ..self + } + } + #[inline] + fn express(self, is_express: bool) -> Self { + Self { + publisher: self.publisher.express(is_express), + ..self + } + } +} + +impl SampleBuilderTrait for PutBuilder<'_, '_> { + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self { timestamp, ..self } + } + fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { + Self { + timestamp: Some(timestamp), + ..self + } + } + #[cfg(feature = "unstable")] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + source_info: Some(source_info), + ..self + } + } + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { attachment, ..self } + } + #[cfg(feature = "unstable")] + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + attachment: Some(attachment), + ..self + } + } +} + +impl SampleBuilderTrait for DeleteBuilder<'_, '_> { + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self { timestamp, ..self } + } + fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { + Self { + timestamp: Some(timestamp), + ..self + } + } + #[cfg(feature = "unstable")] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + source_info: Some(source_info), + ..self + } + } + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { attachment, ..self } + } + #[cfg(feature = "unstable")] + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + attachment: Some(attachment), + ..self + } + } +} + +impl PutSampleBuilderTrait for PutBuilder<'_, '_> { + fn with_encoding(self, encoding: Encoding) -> Self { + Self { encoding, ..self } + } + + fn with_payload(self, payload: IntoPayload) -> Self + where + IntoPayload: Into, + { + Self { + payload: payload.into(), + ..self + } } +} +impl DeleteSampleBuilderTrait for DeleteBuilder<'_, '_> {} + +impl PutBuilder<'_, '_> { /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] @@ -119,21 +234,15 @@ impl PutBuilder<'_, '_> { self.publisher = self.publisher.allowed_destination(destination); self } +} - /// Set the [`Encoding`] of the written data. - #[inline] - pub fn with_encoding(mut self, encoding: IntoEncoding) -> Self - where - IntoEncoding: Into, - { - self.encoding = encoding.into(); - self - } - +impl DeleteBuilder<'_, '_> { + /// Restrict the matching subscribers that will receive the published data + /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] - /// Attach user-provided data to the written data. - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.attachment = Some(attachment); + #[inline] + pub fn allowed_destination(mut self, destination: Locality) -> Self { + self.publisher = self.publisher.allowed_destination(destination); self } } @@ -142,36 +251,40 @@ impl Resolvable for PutBuilder<'_, '_> { type To = ZResult<()>; } +impl Resolvable for DeleteBuilder<'_, '_> { + type To = ZResult<()>; +} + impl SyncResolve for PutBuilder<'_, '_> { #[inline] fn res_sync(self) -> ::To { - let PublisherBuilder { - session, - key_expr, - congestion_control, - priority, - is_express, - destination, - } = self.publisher; - - let publisher = Publisher { - session, - #[cfg(feature = "unstable")] - eid: 0, // This is a one shot Publisher - key_expr: key_expr?, - congestion_control, - priority, - is_express, - destination, - }; - + let publisher = self.publisher.one_time_res_sync()?; resolve_put( &publisher, self.payload, - self.kind, + SampleKind::Put, self.encoding, + self.timestamp, + #[cfg(feature = "unstable")] + self.source_info, + #[cfg(feature = "unstable")] + self.attachment, + ) + } +} + +impl SyncResolve for DeleteBuilder<'_, '_> { + #[inline] + fn res_sync(self) -> ::To { + let publisher = self.publisher.one_time_res_sync()?; + resolve_put( + &publisher, + Payload::empty(), + SampleKind::Delete, + Encoding::ZENOH_BYTES, + self.timestamp, #[cfg(feature = "unstable")] - None, + self.source_info, #[cfg(feature = "unstable")] self.attachment, ) @@ -186,6 +299,14 @@ impl AsyncResolve for PutBuilder<'_, '_> { } } +impl AsyncResolve for DeleteBuilder<'_, '_> { + type Future = Ready; + + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) + } +} + use futures::Sink; use std::convert::TryFrom; use std::convert::TryInto; @@ -293,25 +414,22 @@ impl<'a> Publisher<'a> { /// Change the `congestion_control` to apply when routing the data. #[inline] - pub fn congestion_control(mut self, congestion_control: CongestionControl) -> Self { + pub fn set_congestion_control(&mut self, congestion_control: CongestionControl) { self.congestion_control = congestion_control; - self } /// Change the priority of the written data. #[inline] - pub fn priority(mut self, priority: Priority) -> Self { + pub fn set_priority(&mut self, priority: Priority) { self.priority = priority; - self } /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] #[inline] - pub fn allowed_destination(mut self, destination: Locality) -> Self { + pub fn set_allowed_destination(&mut self, destination: Locality) { self.destination = destination; - self } /// Consumes the given `Publisher`, returning a thread-safe reference-counting @@ -355,6 +473,7 @@ impl<'a> Publisher<'a> { payload, kind, encoding: Encoding::ZENOH_BYTES, + timestamp: None, #[cfg(feature = "unstable")] source_info: None, #[cfg(feature = "unstable")] @@ -625,6 +744,7 @@ pub struct Publication<'a> { payload: Payload, kind: SampleKind, encoding: Encoding, + timestamp: Option, #[cfg(feature = "unstable")] pub(crate) source_info: Option, #[cfg(feature = "unstable")] @@ -676,6 +796,7 @@ impl SyncResolve for Publication<'_> { self.payload, self.kind, self.encoding, + self.timestamp, #[cfg(feature = "unstable")] self.source_info, #[cfg(feature = "unstable")] @@ -707,6 +828,7 @@ impl<'a> Sink for Publisher<'a> { payload: item.payload, kind: item.kind, encoding: item.encoding, + timestamp: None, #[cfg(feature = "unstable")] source_info: None, #[cfg(feature = "unstable")] @@ -770,30 +892,32 @@ impl<'a, 'b> Clone for PublisherBuilder<'a, 'b> { } } -impl<'a, 'b> PublisherBuilder<'a, 'b> { +impl QoSBuilderTrait for PublisherBuilder<'_, '_> { /// Change the `congestion_control` to apply when routing the data. #[inline] - pub fn congestion_control(mut self, congestion_control: CongestionControl) -> Self { - self.congestion_control = congestion_control; - self + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + Self { + congestion_control, + ..self + } } /// Change the priority of the written data. #[inline] - pub fn priority(mut self, priority: Priority) -> Self { - self.priority = priority; - self + fn priority(self, priority: Priority) -> Self { + Self { priority, ..self } } /// Change the `express` policy to apply when routing the data. /// When express is set to `true`, then the message will not be batched. /// This usually has a positive impact on latency but negative impact on throughput. #[inline] - pub fn express(mut self, is_express: bool) -> Self { - self.is_express = is_express; - self + fn express(self, is_express: bool) -> Self { + Self { is_express, ..self } } +} +impl<'a, 'b> PublisherBuilder<'a, 'b> { /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] @@ -802,6 +926,20 @@ impl<'a, 'b> PublisherBuilder<'a, 'b> { self.destination = destination; self } + + // internal function for `PutBuilder` and `DeleteBuilder` + fn one_time_res_sync(self) -> ZResult> { + Ok(Publisher { + session: self.session, + #[cfg(feature = "unstable")] + eid: 0, // This is a one shot Publisher + key_expr: self.key_expr?, + congestion_control: self.congestion_control, + priority: self.priority, + is_express: self.is_express, + destination: self.destination, + }) + } } impl<'a, 'b> Resolvable for PublisherBuilder<'a, 'b> { @@ -874,6 +1012,7 @@ fn resolve_put( payload: Payload, kind: SampleKind, encoding: Encoding, + timestamp: Option, #[cfg(feature = "unstable")] source_info: Option, #[cfg(feature = "unstable")] attachment: Option, ) -> ZResult<()> { @@ -883,8 +1022,11 @@ fn resolve_put( .as_ref() .unwrap() .clone(); - let timestamp = publisher.session.runtime.new_timestamp(); - + let timestamp = if timestamp.is_none() { + publisher.session.runtime.new_timestamp() + } else { + timestamp + }; if publisher.destination != Locality::SessionLocal { primitives.send_push(Push { wire_expr: publisher.key_expr.to_wire(&publisher.session).to_owned(), diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 0df98773fc..1710cbc85b 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -40,17 +40,23 @@ pub trait QoSBuilderTrait { } pub trait SampleBuilderTrait { + /// Sets of clears timestamp fn with_timestamp_opt(self, timestamp: Option) -> Self; + /// Sets timestamp fn with_timestamp(self, timestamp: Timestamp) -> Self; + /// Attach source information #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self; + /// Attach or remove user-provided data in key-value format #[zenoh_macros::unstable] fn with_attachment_opt(self, attachment: Option) -> Self; + /// Attach user-provided data in key-value format #[zenoh_macros::unstable] fn with_attachment(self, attachment: Attachment) -> Self; } pub trait PutSampleBuilderTrait: SampleBuilderTrait { + /// Set the [`Encoding`] fn with_encoding(self, encoding: Encoding) -> Self; fn with_payload(self, payload: IntoPayload) -> Self where diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 89c18ec4a8..e26bdeadaf 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -706,10 +706,12 @@ impl Session { PutBuilder { publisher: self.declare_publisher(key_expr), payload: payload.into(), - kind: SampleKind::Put, + timestamp: None, encoding: Encoding::default(), #[cfg(feature = "unstable")] attachment: None, + #[cfg(feature = "unstable")] + source_info: None, } } @@ -737,13 +739,13 @@ impl Session { TryIntoKeyExpr: TryInto>, >>::Error: Into, { - PutBuilder { + DeleteBuilder { publisher: self.declare_publisher(key_expr), - payload: Payload::empty(), - kind: SampleKind::Delete, - encoding: Encoding::default(), + timestamp: None, #[cfg(feature = "unstable")] attachment: None, + #[cfg(feature = "unstable")] + source_info: None, } } /// Query data from the matching queryables in the system. diff --git a/zenoh/tests/qos.rs b/zenoh/tests/qos.rs index 1a9df306b2..8dc39423cb 100644 --- a/zenoh/tests/qos.rs +++ b/zenoh/tests/qos.rs @@ -15,6 +15,7 @@ use async_std::prelude::FutureExt; use async_std::task; use std::time::Duration; use zenoh::prelude::r#async::*; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh::{publication::Priority, SessionDeclarations}; use zenoh_core::zasync_executor_init; diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index 06a8f5da45..123550852e 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -20,6 +20,7 @@ use std::sync::{atomic::AtomicUsize, Arc}; use std::time::Duration; use zenoh::config::{Config, ModeDependentValue}; use zenoh::prelude::r#async::*; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh::{value::Value, Result}; use zenoh_core::zasync_executor_init; use zenoh_protocol::core::{WhatAmI, WhatAmIMatcher}; diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index e3f5e2df63..955ec7a73f 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -17,6 +17,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use zenoh::prelude::r#async::*; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh_core::zasync_executor_init; const TIMEOUT: Duration = Duration::from_secs(60); diff --git a/zenoh/tests/unicity.rs b/zenoh/tests/unicity.rs index 8eb007b0c0..3d1327398d 100644 --- a/zenoh/tests/unicity.rs +++ b/zenoh/tests/unicity.rs @@ -17,6 +17,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use zenoh::prelude::r#async::*; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh_core::zasync_executor_init; const TIMEOUT: Duration = Duration::from_secs(60); From 4e14cf9e24f5bc7ba2cde3e1494f398d58ed1415 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Tue, 26 Mar 2024 01:49:14 +0100 Subject: [PATCH 049/124] build fixes --- examples/examples/z_ping.rs | 1 + examples/examples/z_pong.rs | 1 + examples/examples/z_pub_thr.rs | 1 + zenoh/tests/attachments.rs | 2 +- 4 files changed, 4 insertions(+), 1 deletion(-) diff --git a/examples/examples/z_ping.rs b/examples/examples/z_ping.rs index a57c937e48..b40afc1f53 100644 --- a/examples/examples/z_ping.rs +++ b/examples/examples/z_ping.rs @@ -16,6 +16,7 @@ use std::time::{Duration, Instant}; use zenoh::config::Config; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index baa5683f62..0003958b5d 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -15,6 +15,7 @@ use clap::Parser; use zenoh::config::Config; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index 4354ad2e68..7e7c1ac9b5 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -16,6 +16,7 @@ use clap::Parser; use std::convert::TryInto; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index 8d26cc0344..04ed28b761 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -1,7 +1,7 @@ #[cfg(feature = "unstable")] #[test] fn pubsub() { - use zenoh::prelude::sync::*; + use zenoh::{prelude::sync::*, sample_builder::SampleBuilderTrait}; let zenoh = zenoh::open(Config::default()).res().unwrap(); let _sub = zenoh From c7cc5758138ba6e9fea380acf1605cee2d650624 Mon Sep 17 00:00:00 2001 From: "ChenYing Kuo (CY)" Date: Tue, 26 Mar 2024 19:46:29 +0800 Subject: [PATCH 050/124] Support RingBuffer to get the latest sample. (#851) * Add RingQueue to support getting the latest sample. Signed-off-by: ChenYing Kuo * Rename RingQueue to RingBuffer. Signed-off-by: ChenYing Kuo * Update examples. Signed-off-by: ChenYing Kuo * Add document. Signed-off-by: ChenYing Kuo * Add test for RingBuffer. Signed-off-by: ChenYing Kuo * Use the correct naming convention (CameCase) Signed-off-by: ChenYing Kuo * Add file header. Signed-off-by: ChenYing Kuo * gename z_pull and update the usage. Signed-off-by: ChenYing Kuo * Use ring instead of cache. Signed-off-by: ChenYing Kuo * Add sleep to wait for the result in pubsub_with_ringbuffer. Signed-off-by: ChenYing Kuo --------- Signed-off-by: ChenYing Kuo --- examples/README.md | 4 +- examples/examples/z_pull.rs | 38 +++++++----------- zenoh/src/handlers.rs | 52 ++++++++++++++++++++++++ zenoh/tests/attachments.rs | 13 ++++++ zenoh/tests/formatters.rs | 13 ++++++ zenoh/tests/handler.rs | 80 +++++++++++++++++++++++++++++++++++++ zenoh/tests/interceptors.rs | 13 ++++++ 7 files changed, 188 insertions(+), 25 deletions(-) create mode 100644 zenoh/tests/handler.rs diff --git a/examples/README.md b/examples/README.md index 8e5b3085ba..dab1c99911 100644 --- a/examples/README.md +++ b/examples/README.md @@ -80,7 +80,7 @@ ### z_pull Declares a key expression and a pull subscriber. - On each pull, the pull subscriber will be notified of the last `put` or `delete` made on each key expression matching the subscriber key expression, and will print this notification. + On each pull, the pull subscriber will be notified of the last N `put` or `delete` made on each key expression matching the subscriber key expression, and will print this notification. Typical usage: @@ -89,7 +89,7 @@ ``` or ```bash - z_pull -k demo/** + z_pull -k demo/** --size 3 ``` ### z_get diff --git a/examples/examples/z_pull.rs b/examples/examples/z_pull.rs index d2c9a5380b..9d64b7b758 100644 --- a/examples/examples/z_pull.rs +++ b/examples/examples/z_pull.rs @@ -13,12 +13,8 @@ // use async_std::task::sleep; use clap::Parser; -use std::{ - sync::{Arc, Mutex}, - time::Duration, -}; -use zenoh::{config::Config, prelude::r#async::*}; -use zenoh_collections::RingBuffer; +use std::time::Duration; +use zenoh::{config::Config, handlers::RingBuffer, prelude::r#async::*}; use zenoh_examples::CommonArgs; #[async_std::main] @@ -26,31 +22,24 @@ async fn main() { // initiate logging env_logger::init(); - let (config, key_expr, cache, interval) = parse_args(); + let (config, key_expr, size, interval) = parse_args(); println!("Opening session..."); let session = zenoh::open(config).res().await.unwrap(); - println!("Creating a local queue keeping the last {cache} elements..."); - let arb = Arc::new(Mutex::new(RingBuffer::new(cache))); - let arb_c = arb.clone(); - println!("Declaring Subscriber on '{key_expr}'..."); - let _subscriber = session + let subscriber = session .declare_subscriber(&key_expr) - .callback(move |sample| { - arb_c.lock().unwrap().push_force(sample); - }) + .with(RingBuffer::new(size)) .res() .await .unwrap(); println!("Pulling data every {:#?} seconds", interval); loop { - let mut res = arb.lock().unwrap().pull(); print!(">> [Subscriber] Pulling "); - match res.take() { - Some(sample) => { + match subscriber.recv() { + Ok(Some(sample)) => { let payload = sample .payload() .deserialize::() @@ -62,10 +51,13 @@ async fn main() { payload, ); } - None => { + Ok(None) => { println!("nothing... sleep for {:#?}", interval); sleep(interval).await; } + Err(e) => { + println!("Pull error: {e}"); + } } } } @@ -75,10 +67,10 @@ struct SubArgs { #[arg(short, long, default_value = "demo/example/**")] /// The Key Expression to subscribe to. key: KeyExpr<'static>, - /// The size of the cache. + /// The size of the ringbuffer. #[arg(long, default_value = "3")] - cache: usize, - /// The interval for pulling the cache. + size: usize, + /// The interval for pulling the ringbuffer. #[arg(long, default_value = "5.0")] interval: f32, #[command(flatten)] @@ -88,5 +80,5 @@ struct SubArgs { fn parse_args() -> (Config, KeyExpr<'static>, usize, Duration) { let args = SubArgs::parse(); let interval = Duration::from_secs_f32(args.interval); - (args.common.into(), args.key, args.cache, interval) + (args.common.into(), args.key, args.size, interval) } diff --git a/zenoh/src/handlers.rs b/zenoh/src/handlers.rs index e5ec3bb0dc..c5d2c6bb90 100644 --- a/zenoh/src/handlers.rs +++ b/zenoh/src/handlers.rs @@ -15,6 +15,10 @@ //! Callback handler trait. use crate::API_DATA_RECEPTION_CHANNEL_SIZE; +use std::sync::{Arc, Mutex, Weak}; +use zenoh_collections::RingBuffer as RingBufferInner; +use zenoh_result::ZResult; + /// An alias for `Arc`. pub type Dyn = std::sync::Arc; @@ -88,6 +92,54 @@ impl IntoHandler<'static, T> } } +/// Ring buffer with a limited queue size, which allows users to keep the last N data. +pub struct RingBuffer { + ring: Arc>>, +} + +impl RingBuffer { + /// Initialize the RingBuffer with the capacity size. + pub fn new(capacity: usize) -> Self { + RingBuffer { + ring: Arc::new(Mutex::new(RingBufferInner::new(capacity))), + } + } +} + +pub struct RingBufferHandler { + ring: Weak>>, +} + +impl RingBufferHandler { + pub fn recv(&self) -> ZResult> { + let Some(ring) = self.ring.upgrade() else { + bail!("The ringbuffer has been deleted."); + }; + let mut guard = ring.lock().map_err(|e| zerror!("{}", e))?; + Ok(guard.pull()) + } +} + +impl IntoHandler<'static, T> for RingBuffer { + type Handler = RingBufferHandler; + + fn into_handler(self) -> (Callback<'static, T>, Self::Handler) { + let receiver = RingBufferHandler { + ring: Arc::downgrade(&self.ring), + }; + ( + Dyn::new(move |t| match self.ring.lock() { + Ok(mut g) => { + // Eventually drop the oldest element. + g.push_force(t); + } + Err(e) => log::error!("{}", e), + }), + receiver, + ) + } +} + /// A function that can transform a [`FnMut`]`(T)` to /// a [`Fn`]`(T)` with the help of a [`Mutex`](std::sync::Mutex). pub fn locked(fnmut: impl FnMut(T)) -> impl Fn(T) { diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index 38d03b0a84..603939bc0e 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -1,3 +1,16 @@ +// +// Copyright (c) 2024 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// #[cfg(feature = "unstable")] #[test] fn pubsub() { diff --git a/zenoh/tests/formatters.rs b/zenoh/tests/formatters.rs index ae894e44b6..22600b6cc0 100644 --- a/zenoh/tests/formatters.rs +++ b/zenoh/tests/formatters.rs @@ -1,3 +1,16 @@ +// +// Copyright (c) 2024 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// #[test] fn reuse() { zenoh::kedefine!( diff --git a/zenoh/tests/handler.rs b/zenoh/tests/handler.rs new file mode 100644 index 0000000000..c1e912fc75 --- /dev/null +++ b/zenoh/tests/handler.rs @@ -0,0 +1,80 @@ +// +// Copyright (c) 2024 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// +#[test] +fn pubsub_with_ringbuffer() { + use std::{thread, time::Duration}; + use zenoh::{handlers::RingBuffer, prelude::sync::*}; + + let zenoh = zenoh::open(Config::default()).res().unwrap(); + let sub = zenoh + .declare_subscriber("test/ringbuffer") + .with(RingBuffer::new(3)) + .res() + .unwrap(); + for i in 0..10 { + zenoh + .put("test/ringbuffer", format!("put{i}")) + .res() + .unwrap(); + } + // Should only receive the last three samples ("put7", "put8", "put9") + for i in 7..10 { + assert_eq!( + sub.recv() + .unwrap() + .unwrap() + .payload() + .deserialize::() + .unwrap(), + format!("put{i}") + ); + } + // Wait for the subscriber to get the value + thread::sleep(Duration::from_millis(1000)); +} + +#[test] +fn query_with_ringbuffer() { + use zenoh::{handlers::RingBuffer, prelude::sync::*}; + + let zenoh = zenoh::open(Config::default()).res().unwrap(); + let queryable = zenoh + .declare_queryable("test/ringbuffer_query") + .with(RingBuffer::new(1)) + .res() + .unwrap(); + + let _reply1 = zenoh + .get("test/ringbuffer_query") + .with_value("query1") + .res() + .unwrap(); + let _reply2 = zenoh + .get("test/ringbuffer_query") + .with_value("query2") + .res() + .unwrap(); + + let query = queryable.recv().unwrap().unwrap(); + // Only receive the latest query + assert_eq!( + query + .value() + .unwrap() + .payload + .deserialize::() + .unwrap(), + "query2" + ); +} diff --git a/zenoh/tests/interceptors.rs b/zenoh/tests/interceptors.rs index 1f502138e4..bf7ec3d7eb 100644 --- a/zenoh/tests/interceptors.rs +++ b/zenoh/tests/interceptors.rs @@ -1,3 +1,16 @@ +// +// Copyright (c) 2024 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// use std::sync::{Arc, Mutex}; use zenoh_core::zlock; From fa4b98d0a791d16b9f7c19865aeee4d08ced1766 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 26 Mar 2024 13:33:44 +0100 Subject: [PATCH 051/124] Remove unmantained complete_n feature (#862) * Remove unmantained complete_n feature * Refined QueryableInfo message format * Remove useless bitflag --- commons/zenoh-codec/Cargo.toml | 1 - commons/zenoh-codec/src/network/declare.rs | 53 +++++- commons/zenoh-codec/src/network/request.rs | 5 - commons/zenoh-protocol/Cargo.toml | 1 - commons/zenoh-protocol/src/network/declare.rs | 49 ++--- commons/zenoh-protocol/src/network/request.rs | 4 - zenoh/Cargo.toml | 1 - zenoh/src/lib.rs | 1 - zenoh/src/net/routing/dispatcher/queries.rs | 178 +++++------------- zenoh/src/net/routing/dispatcher/resource.rs | 9 +- zenoh/src/net/routing/hat/client/mod.rs | 6 +- zenoh/src/net/routing/hat/client/queries.rs | 32 ++-- .../src/net/routing/hat/linkstate_peer/mod.rs | 6 +- .../net/routing/hat/linkstate_peer/queries.rs | 49 ++--- zenoh/src/net/routing/hat/mod.rs | 4 +- zenoh/src/net/routing/hat/p2p_peer/mod.rs | 4 +- zenoh/src/net/routing/hat/p2p_peer/queries.rs | 32 ++-- zenoh/src/net/routing/hat/router/mod.rs | 8 +- zenoh/src/net/routing/hat/router/queries.rs | 60 +++--- zenoh/src/net/runtime/adminspace.rs | 7 +- zenoh/src/session.rs | 6 +- zenohd/src/main.rs | 2 - 22 files changed, 194 insertions(+), 324 deletions(-) diff --git a/commons/zenoh-codec/Cargo.toml b/commons/zenoh-codec/Cargo.toml index 72f507a596..5b7b8de6ed 100644 --- a/commons/zenoh-codec/Cargo.toml +++ b/commons/zenoh-codec/Cargo.toml @@ -41,7 +41,6 @@ shared-memory = [ "zenoh-shm", "zenoh-protocol/shared-memory" ] -complete_n = ["zenoh-protocol/complete_n"] [dependencies] log = { workspace = true, optional = true } diff --git a/commons/zenoh-codec/src/network/declare.rs b/commons/zenoh-codec/src/network/declare.rs index bcc55ed62b..c81514ab3e 100644 --- a/commons/zenoh-codec/src/network/declare.rs +++ b/commons/zenoh-codec/src/network/declare.rs @@ -511,7 +511,46 @@ where } // QueryableInfo -crate::impl_zextz64!(queryable::ext::QueryableInfo, queryable::ext::Info::ID); +impl WCodec<(&queryable::ext::QueryableInfoType, bool), &mut W> for Zenoh080 +where + W: Writer, +{ + type Output = Result<(), DidntWrite>; + fn write(self, writer: &mut W, x: (&queryable::ext::QueryableInfoType, bool)) -> Self::Output { + let (x, more) = x; + + let mut flags: u8 = 0; + if x.complete { + flags |= queryable::ext::flag::C; + } + let v: u64 = (flags as u64) | ((x.distance as u64) << 8); + let ext = queryable::ext::QueryableInfo::new(v); + + self.write(&mut *writer, (&ext, more)) + } +} + +impl RCodec<(queryable::ext::QueryableInfoType, bool), &mut R> for Zenoh080Header +where + R: Reader, +{ + type Error = DidntRead; + + fn read( + self, + reader: &mut R, + ) -> Result<(queryable::ext::QueryableInfoType, bool), Self::Error> { + let (ext, more): (queryable::ext::QueryableInfo, bool) = self.read(&mut *reader)?; + + let complete = imsg::has_flag(ext.value as u8, queryable::ext::flag::C); + let distance = (ext.value >> 8) as u16; + + Ok(( + queryable::ext::QueryableInfoType { complete, distance }, + more, + )) + } +} // DeclareQueryable impl WCodec<&queryable::DeclareQueryable, &mut W> for Zenoh080 @@ -529,7 +568,7 @@ where // Header let mut header = declare::id::D_QUERYABLE; - let mut n_exts = (ext_info != &queryable::ext::QueryableInfo::DEFAULT) as u8; + let mut n_exts = (ext_info != &queryable::ext::QueryableInfoType::DEFAULT) as u8; if n_exts != 0 { header |= subscriber::flag::Z; } @@ -544,9 +583,9 @@ where // Body self.write(&mut *writer, id)?; self.write(&mut *writer, wire_expr)?; - if ext_info != &queryable::ext::QueryableInfo::DEFAULT { + if ext_info != &queryable::ext::QueryableInfoType::DEFAULT { n_exts -= 1; - self.write(&mut *writer, (*ext_info, n_exts != 0))?; + self.write(&mut *writer, (ext_info, n_exts != 0))?; } Ok(()) @@ -589,15 +628,15 @@ where }; // Extensions - let mut ext_info = queryable::ext::QueryableInfo::DEFAULT; + let mut ext_info = queryable::ext::QueryableInfoType::DEFAULT; let mut has_ext = imsg::has_flag(self.header, queryable::flag::Z); while has_ext { let ext: u8 = self.codec.read(&mut *reader)?; let eodec = Zenoh080Header::new(ext); match iext::eid(ext) { - queryable::ext::Info::ID => { - let (i, ext): (queryable::ext::QueryableInfo, bool) = + queryable::ext::QueryableInfo::ID => { + let (i, ext): (queryable::ext::QueryableInfoType, bool) = eodec.read(&mut *reader)?; ext_info = i; has_ext = ext; diff --git a/commons/zenoh-codec/src/network/request.rs b/commons/zenoh-codec/src/network/request.rs index 364c1af3d0..6173840d7e 100644 --- a/commons/zenoh-codec/src/network/request.rs +++ b/commons/zenoh-codec/src/network/request.rs @@ -43,8 +43,6 @@ where ext::TargetType::BestMatching => 0, ext::TargetType::All => 1, ext::TargetType::AllComplete => 2, - #[cfg(feature = "complete_n")] - ext::TargetType::Complete(n) => 3 + *n, }; let ext = ext::Target::new(v); self.write(&mut *writer, (&ext, more)) @@ -63,9 +61,6 @@ where 0 => ext::TargetType::BestMatching, 1 => ext::TargetType::All, 2 => ext::TargetType::AllComplete, - #[cfg(feature = "complete_n")] - n => ext::TargetType::Complete(n - 3), - #[cfg(not(feature = "complete_n"))] _ => return Err(DidntRead), }; Ok((rt, more)) diff --git a/commons/zenoh-protocol/Cargo.toml b/commons/zenoh-protocol/Cargo.toml index 93c92ee33f..9d7e35d690 100644 --- a/commons/zenoh-protocol/Cargo.toml +++ b/commons/zenoh-protocol/Cargo.toml @@ -36,7 +36,6 @@ std = [ test = ["rand", "zenoh-buffers/test"] shared-memory = ["std", "zenoh-buffers/shared-memory"] stats = [] -complete_n = [] [dependencies] const_format = { workspace = true } diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 187fa87662..d41d8bf67f 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -464,31 +464,35 @@ pub mod queryable { pub struct DeclareQueryable { pub id: QueryableId, pub wire_expr: WireExpr<'static>, - pub ext_info: ext::QueryableInfo, + pub ext_info: ext::QueryableInfoType, } pub mod ext { use super::*; - pub type Info = zextz64!(0x01, false); + pub type QueryableInfo = zextz64!(0x01, false); + pub mod flag { + pub const C: u8 = 1; // 0x01 Complete if C==1 then the queryable is complete + } + /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ /// |Z|0_1| ID | /// +-+-+-+---------+ - /// ~ complete_n ~ + /// |x|x|x|x|x|x|x|C| /// +---------------+ - /// ~ distance ~ + /// ~ distance ~ /// +---------------+ #[derive(Debug, Clone, Copy, PartialEq, Eq)] - pub struct QueryableInfo { - pub complete: u8, // Default 0: incomplete // @TODO: maybe a bitflag - pub distance: u32, // Default 0: no distance + pub struct QueryableInfoType { + pub complete: bool, // Default false: incomplete + pub distance: u16, // Default 0: no distance } - impl QueryableInfo { + impl QueryableInfoType { pub const DEFAULT: Self = Self { - complete: 0, + complete: false, distance: 0, }; @@ -496,35 +500,18 @@ pub mod queryable { pub fn rand() -> Self { use rand::Rng; let mut rng = rand::thread_rng(); - let complete: u8 = rng.gen(); - let distance: u32 = rng.gen(); + let complete: bool = rng.gen_bool(0.5); + let distance: u16 = rng.gen(); Self { complete, distance } } } - impl Default for QueryableInfo { + impl Default for QueryableInfoType { fn default() -> Self { Self::DEFAULT } } - - impl From for QueryableInfo { - fn from(ext: Info) -> Self { - let complete = ext.value as u8; - let distance = (ext.value >> 8) as u32; - - Self { complete, distance } - } - } - - impl From for Info { - fn from(ext: QueryableInfo) -> Self { - let mut v: u64 = ext.complete as u64; - v |= (ext.distance as u64) << 8; - Info::new(v) - } - } } impl DeclareQueryable { @@ -535,7 +522,7 @@ pub mod queryable { let id: QueryableId = rng.gen(); let wire_expr = WireExpr::rand(); - let ext_info = ext::QueryableInfo::rand(); + let ext_info = ext::QueryableInfoType::rand(); Self { id, @@ -553,7 +540,7 @@ pub mod queryable { /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ - /// |Z|X|X| U_QBL | + /// |Z|0_2| U_QBL | /// +---------------+ /// ~ qbls_id:z32 ~ /// +---------------+ diff --git a/commons/zenoh-protocol/src/network/request.rs b/commons/zenoh-protocol/src/network/request.rs index aba6bb057a..ff978744e8 100644 --- a/commons/zenoh-protocol/src/network/request.rs +++ b/commons/zenoh-protocol/src/network/request.rs @@ -93,8 +93,6 @@ pub mod ext { BestMatching, All, AllComplete, - #[cfg(feature = "complete_n")] - Complete(u64), } impl TargetType { @@ -109,8 +107,6 @@ pub mod ext { TargetType::All, TargetType::AllComplete, TargetType::BestMatching, - #[cfg(feature = "complete_n")] - TargetType::Complete(rng.gen()), ] .choose(&mut rng) .unwrap() diff --git a/zenoh/Cargo.toml b/zenoh/Cargo.toml index e6f7a4d9aa..1333ea6a57 100644 --- a/zenoh/Cargo.toml +++ b/zenoh/Cargo.toml @@ -31,7 +31,6 @@ maintenance = { status = "actively-developed" } [features] auth_pubkey = ["zenoh-transport/auth_pubkey"] auth_usrpwd = ["zenoh-transport/auth_usrpwd"] -complete_n = ["zenoh-codec/complete_n"] shared-memory = [ "zenoh-shm", "zenoh-protocol/shared-memory", diff --git a/zenoh/src/lib.rs b/zenoh/src/lib.rs index eb1ba1bcd1..ed2f01f180 100644 --- a/zenoh/src/lib.rs +++ b/zenoh/src/lib.rs @@ -106,7 +106,6 @@ pub const FEATURES: &str = concat_enabled_features!( features = [ "auth_pubkey", "auth_usrpwd", - "complete_n", "shared-memory", "stats", "transport_multilink", diff --git a/zenoh/src/net/routing/dispatcher/queries.rs b/zenoh/src/net/routing/dispatcher/queries.rs index 04262e555d..753a4003e1 100644 --- a/zenoh/src/net/routing/dispatcher/queries.rs +++ b/zenoh/src/net/routing/dispatcher/queries.rs @@ -24,7 +24,7 @@ use zenoh_config::WhatAmI; use zenoh_protocol::{ core::{key_expr::keyexpr, Encoding, WireExpr}, network::{ - declare::{ext, queryable::ext::QueryableInfo, QueryableId}, + declare::{ext, queryable::ext::QueryableInfoType, QueryableId}, request::{ext::TargetType, Request, RequestId}, response::{self, ext::ResponderIdType, Response, ResponseFinal}, }, @@ -44,7 +44,7 @@ pub(crate) fn declare_queryable( face: &mut Arc, id: QueryableId, expr: &WireExpr, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, node_id: NodeId, ) { let rtables = zread!(tables.tables); @@ -287,22 +287,11 @@ fn compute_final_route( .hat_code .egress_filter(tables, src_face, &qabl.direction.0, expr) { - #[cfg(feature = "complete_n")] - { - route.entry(qabl.direction.0.id).or_insert_with(|| { - let mut direction = qabl.direction.clone(); - let qid = insert_pending_query(&mut direction.0, query.clone()); - (direction, qid, *target) - }); - } - #[cfg(not(feature = "complete_n"))] - { - route.entry(qabl.direction.0.id).or_insert_with(|| { - let mut direction = qabl.direction.clone(); - let qid = insert_pending_query(&mut direction.0, query.clone()); - (direction, qid) - }); - } + route.entry(qabl.direction.0.id).or_insert_with(|| { + let mut direction = qabl.direction.clone(); + let qid = insert_pending_query(&mut direction.0, query.clone()); + (direction, qid) + }); } } route @@ -315,46 +304,11 @@ fn compute_final_route( .hat_code .egress_filter(tables, src_face, &qabl.direction.0, expr) { - #[cfg(feature = "complete_n")] - { - route.entry(qabl.direction.0.id).or_insert_with(|| { - let mut direction = qabl.direction.clone(); - let qid = insert_pending_query(&mut direction.0, query.clone()); - (direction, qid, *target) - }); - } - #[cfg(not(feature = "complete_n"))] - { - route.entry(qabl.direction.0.id).or_insert_with(|| { - let mut direction = qabl.direction.clone(); - let qid = insert_pending_query(&mut direction.0, query.clone()); - (direction, qid) - }); - } - } - } - route - } - #[cfg(feature = "complete_n")] - TargetType::Complete(n) => { - let mut route = HashMap::new(); - let mut remaining = *n; - for qabl in qabls.iter() { - if qabl.complete > 0 - && tables - .hat_code - .egress_filter(tables, src_face, &qabl.direction.0, expr) - { - let nb = std::cmp::min(qabl.complete, remaining); route.entry(qabl.direction.0.id).or_insert_with(|| { let mut direction = qabl.direction.clone(); let qid = insert_pending_query(&mut direction.0, query.clone()); - (direction, qid, TargetType::Complete(nb)) + (direction, qid) }); - remaining -= nb; - if remaining == 0 { - break; - } } } route @@ -365,18 +319,11 @@ fn compute_final_route( .find(|qabl| qabl.direction.0.id != src_face.id && qabl.complete > 0) { let mut route = HashMap::new(); - #[cfg(feature = "complete_n")] - { - let mut direction = qabl.direction.clone(); - let qid = insert_pending_query(&mut direction.0, query); - route.insert(direction.0.id, (direction, qid, *target)); - } - #[cfg(not(feature = "complete_n"))] - { - let mut direction = qabl.direction.clone(); - let qid = insert_pending_query(&mut direction.0, query); - route.insert(direction.0.id, (direction, qid)); - } + + let mut direction = qabl.direction.clone(); + let qid = insert_pending_query(&mut direction.0, query); + route.insert(direction.0.id, (direction, qid)); + route } else { compute_final_route(tables, qabls, src_face, expr, &TargetType::All, query) @@ -624,78 +571,37 @@ pub fn route_query( expr.full_expr().to_string(), )); } else { - // let timer = tables.timer.clone(); - // let timeout = tables.queries_default_timeout; - #[cfg(feature = "complete_n")] - { - for ((outface, key_expr, context), qid, t) in route.values() { - // timer.add(TimedEvent::once( - // Instant::now() + timeout, - // QueryCleanup { - // tables: tables_ref.clone(), - // face: Arc::downgrade(&outface), - // *qid, - // }, - // )); - #[cfg(feature = "stats")] - if !admin { - inc_req_stats!(outface, tx, user, body) - } else { - inc_req_stats!(outface, tx, admin, body) - } - - log::trace!("Propagate query {}:{} to {}", face, qid, outface); - outface.primitives.send_request(RoutingContext::with_expr( - Request { - id: *qid, - wire_expr: key_expr.into(), - ext_qos: ext::QoSType::REQUEST, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType { node_id: *context }, - ext_target: *t, - ext_budget: None, - ext_timeout: None, - payload: body.clone(), - }, - expr.full_expr().to_string(), - )); + for ((outface, key_expr, context), qid) in route.values() { + // timer.add(TimedEvent::once( + // Instant::now() + timeout, + // QueryCleanup { + // tables: tables_ref.clone(), + // face: Arc::downgrade(&outface), + // *qid, + // }, + // )); + #[cfg(feature = "stats")] + if !admin { + inc_req_stats!(outface, tx, user, body) + } else { + inc_req_stats!(outface, tx, admin, body) } - } - #[cfg(not(feature = "complete_n"))] - { - for ((outface, key_expr, context), qid) in route.values() { - // timer.add(TimedEvent::once( - // Instant::now() + timeout, - // QueryCleanup { - // tables: tables_ref.clone(), - // face: Arc::downgrade(&outface), - // *qid, - // }, - // )); - #[cfg(feature = "stats")] - if !admin { - inc_req_stats!(outface, tx, user, body) - } else { - inc_req_stats!(outface, tx, admin, body) - } - - log::trace!("Propagate query {}:{} to {}", face, qid, outface); - outface.primitives.send_request(RoutingContext::with_expr( - Request { - id: *qid, - wire_expr: key_expr.into(), - ext_qos: ext::QoSType::REQUEST, - ext_tstamp: None, - ext_nodeid: ext::NodeIdType { node_id: *context }, - ext_target: target, - ext_budget: None, - ext_timeout: None, - payload: body.clone(), - }, - expr.full_expr().to_string(), - )); - } + log::trace!("Propagate query {}:{} to {}", face, qid, outface); + outface.primitives.send_request(RoutingContext::with_expr( + Request { + id: *qid, + wire_expr: key_expr.into(), + ext_qos: ext::QoSType::REQUEST, + ext_tstamp: None, + ext_nodeid: ext::NodeIdType { node_id: *context }, + ext_target: target, + ext_budget: None, + ext_timeout: None, + payload: body.clone(), + }, + expr.full_expr().to_string(), + )); } } } else { diff --git a/zenoh/src/net/routing/dispatcher/resource.rs b/zenoh/src/net/routing/dispatcher/resource.rs index 3e35db14b6..0450dab38a 100644 --- a/zenoh/src/net/routing/dispatcher/resource.rs +++ b/zenoh/src/net/routing/dispatcher/resource.rs @@ -21,14 +21,12 @@ use std::convert::TryInto; use std::hash::{Hash, Hasher}; use std::sync::{Arc, Weak}; use zenoh_config::WhatAmI; -#[cfg(feature = "complete_n")] -use zenoh_protocol::network::request::ext::TargetType; use zenoh_protocol::network::RequestId; use zenoh_protocol::{ core::{key_expr::keyexpr, ExprId, WireExpr}, network::{ declare::{ - ext, queryable::ext::QueryableInfo, subscriber::ext::SubscriberInfo, Declare, + ext, queryable::ext::QueryableInfoType, subscriber::ext::SubscriberInfo, Declare, DeclareBody, DeclareKeyExpr, }, Mapping, @@ -40,9 +38,6 @@ pub(crate) type NodeId = u16; pub(crate) type Direction = (Arc, WireExpr<'static>, NodeId); pub(crate) type Route = HashMap; -#[cfg(feature = "complete_n")] -pub(crate) type QueryRoute = HashMap; -#[cfg(not(feature = "complete_n"))] pub(crate) type QueryRoute = HashMap; pub(crate) struct QueryTargetQabl { pub(crate) direction: Direction, @@ -56,7 +51,7 @@ pub(crate) struct SessionContext { pub(crate) local_expr_id: Option, pub(crate) remote_expr_id: Option, pub(crate) subs: Option, - pub(crate) qabl: Option, + pub(crate) qabl: Option, pub(crate) in_interceptor_cache: Option>, pub(crate) e_interceptor_cache: Option>, } diff --git a/zenoh/src/net/routing/hat/client/mod.rs b/zenoh/src/net/routing/hat/client/mod.rs index a9908f5f58..8b7031152a 100644 --- a/zenoh/src/net/routing/hat/client/mod.rs +++ b/zenoh/src/net/routing/hat/client/mod.rs @@ -42,7 +42,9 @@ use std::{ sync::{atomic::AtomicU32, Arc}, }; use zenoh_config::WhatAmI; -use zenoh_protocol::network::declare::{queryable::ext::QueryableInfo, QueryableId, SubscriberId}; +use zenoh_protocol::network::declare::{ + queryable::ext::QueryableInfoType, QueryableId, SubscriberId, +}; use zenoh_protocol::network::Oam; use zenoh_result::ZResult; use zenoh_sync::get_mut_unchecked; @@ -284,7 +286,7 @@ struct HatFace { next_id: AtomicU32, // @TODO: manage rollover and uniqueness local_subs: HashMap, SubscriberId>, remote_subs: HashMap>, - local_qabls: HashMap, (QueryableId, QueryableInfo)>, + local_qabls: HashMap, (QueryableId, QueryableInfoType)>, remote_qabls: HashMap>, } diff --git a/zenoh/src/net/routing/hat/client/queries.rs b/zenoh/src/net/routing/hat/client/queries.rs index 81e5ba52d9..2ac3f1b993 100644 --- a/zenoh/src/net/routing/hat/client/queries.rs +++ b/zenoh/src/net/routing/hat/client/queries.rs @@ -32,29 +32,24 @@ use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::declare::{ - common::ext::WireExprType, ext, queryable::ext::QueryableInfo, Declare, DeclareBody, + common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; -#[cfg(feature = "complete_n")] #[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete += info.complete; +fn merge_qabl_infos(mut this: QueryableInfoType, info: &QueryableInfoType) -> QueryableInfoType { + this.complete = this.complete || info.complete; this.distance = std::cmp::min(this.distance, info.distance); this } -#[cfg(not(feature = "complete_n"))] -#[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete = u8::from(this.complete != 0 || info.complete != 0); - this.distance = std::cmp::min(this.distance, info.distance); - this -} - -fn local_qabl_info(_tables: &Tables, res: &Arc, face: &Arc) -> QueryableInfo { +fn local_qabl_info( + _tables: &Tables, + res: &Arc, + face: &Arc, +) -> QueryableInfoType { res.session_ctxs .values() .fold(None, |accu, ctx| { @@ -71,10 +66,7 @@ fn local_qabl_info(_tables: &Tables, res: &Arc, face: &Arc) accu } }) - .unwrap_or(QueryableInfo { - complete: 0, - distance: 0, - }) + .unwrap_or(QueryableInfoType::DEFAULT) } fn propagate_simple_queryable( @@ -121,7 +113,7 @@ fn register_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { // Register queryable { @@ -147,7 +139,7 @@ fn declare_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { register_client_queryable(tables, face, id, res, qabl_info); propagate_simple_queryable(tables, res, Some(face)); @@ -263,7 +255,7 @@ impl HatQueriesTrait for HatCode { face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, _node_id: NodeId, ) { declare_client_queryable(tables, face, id, res, qabl_info); diff --git a/zenoh/src/net/routing/hat/linkstate_peer/mod.rs b/zenoh/src/net/routing/hat/linkstate_peer/mod.rs index 3c4e2091f0..71c483e7bd 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/mod.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/mod.rs @@ -51,7 +51,7 @@ use zenoh_config::{unwrap_or_default, ModeDependent, WhatAmI, WhatAmIMatcher, Ze use zenoh_protocol::{ common::ZExtBody, network::{ - declare::{queryable::ext::QueryableInfo, QueryableId, SubscriberId}, + declare::{queryable::ext::QueryableInfoType, QueryableId, SubscriberId}, oam::id::OAM_LINKSTATE, Oam, }, @@ -449,7 +449,7 @@ impl HatBaseTrait for HatCode { struct HatContext { router_subs: HashSet, peer_subs: HashSet, - peer_qabls: HashMap, + peer_qabls: HashMap, } impl HatContext { @@ -467,7 +467,7 @@ struct HatFace { next_id: AtomicU32, // @TODO: manage rollover and uniqueness local_subs: HashMap, SubscriberId>, remote_subs: HashMap>, - local_qabls: HashMap, (QueryableId, QueryableInfo)>, + local_qabls: HashMap, (QueryableId, QueryableInfoType)>, remote_qabls: HashMap>, } diff --git a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs index fa553e5121..9fba744a9c 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs @@ -35,29 +35,20 @@ use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr, ZenohId}, network::declare::{ - common::ext::WireExprType, ext, queryable::ext::QueryableInfo, Declare, DeclareBody, + common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; -#[cfg(feature = "complete_n")] #[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete += info.complete; +fn merge_qabl_infos(mut this: QueryableInfoType, info: &QueryableInfoType) -> QueryableInfoType { + this.complete = this.complete || info.complete; this.distance = std::cmp::min(this.distance, info.distance); this } -#[cfg(not(feature = "complete_n"))] -#[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete = u8::from(this.complete != 0 || info.complete != 0); - this.distance = std::cmp::min(this.distance, info.distance); - this -} - -fn local_peer_qabl_info(_tables: &Tables, res: &Arc) -> QueryableInfo { +fn local_peer_qabl_info(_tables: &Tables, res: &Arc) -> QueryableInfoType { res.session_ctxs .values() .fold(None, |accu, ctx| { @@ -70,13 +61,14 @@ fn local_peer_qabl_info(_tables: &Tables, res: &Arc) -> QueryableInfo accu } }) - .unwrap_or(QueryableInfo { - complete: 0, - distance: 0, - }) + .unwrap_or(QueryableInfoType::DEFAULT) } -fn local_qabl_info(tables: &Tables, res: &Arc, face: &Arc) -> QueryableInfo { +fn local_qabl_info( + tables: &Tables, + res: &Arc, + face: &Arc, +) -> QueryableInfoType { let info = if res.context.is_some() { res_hat!(res) .peer_qabls @@ -112,10 +104,7 @@ fn local_qabl_info(tables: &Tables, res: &Arc, face: &Arc) accu } }) - .unwrap_or(QueryableInfo { - complete: 0, - distance: 0, - }) + .unwrap_or(QueryableInfoType::DEFAULT) } #[inline] @@ -124,7 +113,7 @@ fn send_sourced_queryable_to_net_childs( net: &Network, childs: &[NodeIndex], res: &Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, src_face: Option<&mut Arc>, routing_context: NodeId, ) { @@ -198,7 +187,7 @@ fn propagate_simple_queryable( fn propagate_sourced_queryable( tables: &Tables, res: &Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, src_face: Option<&mut Arc>, source: &ZenohId, ) { @@ -236,7 +225,7 @@ fn register_peer_queryable( tables: &mut Tables, mut face: Option<&mut Arc>, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, peer: ZenohId, ) { let current_info = res_hat!(res).peer_qabls.get(&peer); @@ -261,7 +250,7 @@ fn declare_peer_queryable( tables: &mut Tables, face: &mut Arc, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, peer: ZenohId, ) { let face = Some(face); @@ -273,7 +262,7 @@ fn register_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { // Register queryable { @@ -299,7 +288,7 @@ fn declare_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { register_client_queryable(tables, face, id, res, qabl_info); let local_details = local_peer_qabl_info(tables, res); @@ -599,7 +588,7 @@ fn insert_target_for_qabls( tables: &Tables, net: &Network, source: NodeId, - qabls: &HashMap, + qabls: &HashMap, complete: bool, ) { if net.trees.len() > source as usize { @@ -645,7 +634,7 @@ impl HatQueriesTrait for HatCode { face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, node_id: NodeId, ) { if face.whatami != WhatAmI::Client { diff --git a/zenoh/src/net/routing/hat/mod.rs b/zenoh/src/net/routing/hat/mod.rs index d9feb687f2..70e94ac176 100644 --- a/zenoh/src/net/routing/hat/mod.rs +++ b/zenoh/src/net/routing/hat/mod.rs @@ -32,7 +32,7 @@ use zenoh_protocol::{ core::WireExpr, network::{ declare::{ - queryable::ext::QueryableInfo, subscriber::ext::SubscriberInfo, QueryableId, + queryable::ext::QueryableInfoType, subscriber::ext::SubscriberInfo, QueryableId, SubscriberId, }, Oam, @@ -154,7 +154,7 @@ pub(crate) trait HatQueriesTrait { face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, node_id: NodeId, ); fn undeclare_queryable( diff --git a/zenoh/src/net/routing/hat/p2p_peer/mod.rs b/zenoh/src/net/routing/hat/p2p_peer/mod.rs index 59b39d4284..1d87c2eb23 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/mod.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/mod.rs @@ -53,7 +53,7 @@ use zenoh_protocol::network::{ }; use zenoh_protocol::{ common::ZExtBody, - network::{declare::queryable::ext::QueryableInfo, oam::id::OAM_LINKSTATE}, + network::{declare::queryable::ext::QueryableInfoType, oam::id::OAM_LINKSTATE}, }; use zenoh_result::ZResult; use zenoh_sync::get_mut_unchecked; @@ -360,7 +360,7 @@ struct HatFace { next_id: AtomicU32, // @TODO: manage rollover and uniqueness local_subs: HashMap, SubscriberId>, remote_subs: HashMap>, - local_qabls: HashMap, (QueryableId, QueryableInfo)>, + local_qabls: HashMap, (QueryableId, QueryableInfoType)>, remote_qabls: HashMap>, } diff --git a/zenoh/src/net/routing/hat/p2p_peer/queries.rs b/zenoh/src/net/routing/hat/p2p_peer/queries.rs index caea6fe6b8..38f77bec45 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/queries.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/queries.rs @@ -32,29 +32,24 @@ use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::declare::{ - common::ext::WireExprType, ext, queryable::ext::QueryableInfo, Declare, DeclareBody, + common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; -#[cfg(feature = "complete_n")] #[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete += info.complete; +fn merge_qabl_infos(mut this: QueryableInfoType, info: &QueryableInfoType) -> QueryableInfoType { + this.complete = this.complete || info.complete; this.distance = std::cmp::min(this.distance, info.distance); this } -#[cfg(not(feature = "complete_n"))] -#[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete = u8::from(this.complete != 0 || info.complete != 0); - this.distance = std::cmp::min(this.distance, info.distance); - this -} - -fn local_qabl_info(_tables: &Tables, res: &Arc, face: &Arc) -> QueryableInfo { +fn local_qabl_info( + _tables: &Tables, + res: &Arc, + face: &Arc, +) -> QueryableInfoType { res.session_ctxs .values() .fold(None, |accu, ctx| { @@ -71,10 +66,7 @@ fn local_qabl_info(_tables: &Tables, res: &Arc, face: &Arc) accu } }) - .unwrap_or(QueryableInfo { - complete: 0, - distance: 0, - }) + .unwrap_or(QueryableInfoType::DEFAULT) } fn propagate_simple_queryable( @@ -121,7 +113,7 @@ fn register_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { // Register queryable { @@ -147,7 +139,7 @@ fn declare_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { register_client_queryable(tables, face, id, res, qabl_info); propagate_simple_queryable(tables, res, Some(face)); @@ -263,7 +255,7 @@ impl HatQueriesTrait for HatCode { face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, _node_id: NodeId, ) { declare_client_queryable(tables, face, id, res, qabl_info); diff --git a/zenoh/src/net/routing/hat/router/mod.rs b/zenoh/src/net/routing/hat/router/mod.rs index 47cf02db46..27db136eda 100644 --- a/zenoh/src/net/routing/hat/router/mod.rs +++ b/zenoh/src/net/routing/hat/router/mod.rs @@ -56,7 +56,7 @@ use zenoh_config::{unwrap_or_default, ModeDependent, WhatAmI, WhatAmIMatcher, Ze use zenoh_protocol::{ common::ZExtBody, network::{ - declare::{queryable::ext::QueryableInfo, QueryableId, SubscriberId}, + declare::{queryable::ext::QueryableInfoType, QueryableId, SubscriberId}, oam::id::OAM_LINKSTATE, Oam, }, @@ -748,8 +748,8 @@ impl HatBaseTrait for HatCode { struct HatContext { router_subs: HashSet, peer_subs: HashSet, - router_qabls: HashMap, - peer_qabls: HashMap, + router_qabls: HashMap, + peer_qabls: HashMap, } impl HatContext { @@ -768,7 +768,7 @@ struct HatFace { next_id: AtomicU32, // @TODO: manage rollover and uniqueness local_subs: HashMap, SubscriberId>, remote_subs: HashMap>, - local_qabls: HashMap, (QueryableId, QueryableInfo)>, + local_qabls: HashMap, (QueryableId, QueryableInfoType)>, remote_qabls: HashMap>, } diff --git a/zenoh/src/net/routing/hat/router/queries.rs b/zenoh/src/net/routing/hat/router/queries.rs index aca6f71b3e..61abaa7c55 100644 --- a/zenoh/src/net/routing/hat/router/queries.rs +++ b/zenoh/src/net/routing/hat/router/queries.rs @@ -35,29 +35,20 @@ use zenoh_protocol::network::declare::QueryableId; use zenoh_protocol::{ core::{WhatAmI, WireExpr, ZenohId}, network::declare::{ - common::ext::WireExprType, ext, queryable::ext::QueryableInfo, Declare, DeclareBody, + common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; -#[cfg(feature = "complete_n")] #[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete += info.complete; +fn merge_qabl_infos(mut this: QueryableInfoType, info: &QueryableInfoType) -> QueryableInfoType { + this.complete = this.complete || info.complete; this.distance = std::cmp::min(this.distance, info.distance); this } -#[cfg(not(feature = "complete_n"))] -#[inline] -fn merge_qabl_infos(mut this: QueryableInfo, info: &QueryableInfo) -> QueryableInfo { - this.complete = u8::from(this.complete != 0 || info.complete != 0); - this.distance = std::cmp::min(this.distance, info.distance); - this -} - -fn local_router_qabl_info(tables: &Tables, res: &Arc) -> QueryableInfo { +fn local_router_qabl_info(tables: &Tables, res: &Arc) -> QueryableInfoType { let info = if hat!(tables).full_net(WhatAmI::Peer) { res.context.as_ref().and_then(|_| { res_hat!(res) @@ -89,13 +80,10 @@ fn local_router_qabl_info(tables: &Tables, res: &Arc) -> QueryableInfo accu } }) - .unwrap_or(QueryableInfo { - complete: 0, - distance: 0, - }) + .unwrap_or(QueryableInfoType::DEFAULT) } -fn local_peer_qabl_info(tables: &Tables, res: &Arc) -> QueryableInfo { +fn local_peer_qabl_info(tables: &Tables, res: &Arc) -> QueryableInfoType { let info = if res.context.is_some() { res_hat!(res) .router_qabls @@ -125,13 +113,14 @@ fn local_peer_qabl_info(tables: &Tables, res: &Arc) -> QueryableInfo { accu } }) - .unwrap_or(QueryableInfo { - complete: 0, - distance: 0, - }) + .unwrap_or(QueryableInfoType::DEFAULT) } -fn local_qabl_info(tables: &Tables, res: &Arc, face: &Arc) -> QueryableInfo { +fn local_qabl_info( + tables: &Tables, + res: &Arc, + face: &Arc, +) -> QueryableInfoType { let mut info = if res.context.is_some() { res_hat!(res) .router_qabls @@ -183,10 +172,7 @@ fn local_qabl_info(tables: &Tables, res: &Arc, face: &Arc) accu } }) - .unwrap_or(QueryableInfo { - complete: 0, - distance: 0, - }) + .unwrap_or(QueryableInfoType::DEFAULT) } #[inline] @@ -195,7 +181,7 @@ fn send_sourced_queryable_to_net_childs( net: &Network, childs: &[NodeIndex], res: &Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, src_face: Option<&mut Arc>, routing_context: NodeId, ) { @@ -279,7 +265,7 @@ fn propagate_simple_queryable( fn propagate_sourced_queryable( tables: &Tables, res: &Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, src_face: Option<&mut Arc>, source: &ZenohId, net_type: WhatAmI, @@ -318,7 +304,7 @@ fn register_router_queryable( tables: &mut Tables, mut face: Option<&mut Arc>, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, router: ZenohId, ) { let current_info = res_hat!(res).router_qabls.get(&router); @@ -356,7 +342,7 @@ fn declare_router_queryable( tables: &mut Tables, face: &mut Arc, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, router: ZenohId, ) { register_router_queryable(tables, Some(face), res, qabl_info, router); @@ -366,7 +352,7 @@ fn register_peer_queryable( tables: &mut Tables, face: Option<&mut Arc>, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, peer: ZenohId, ) { let current_info = res_hat!(res).peer_qabls.get(&peer); @@ -386,7 +372,7 @@ fn declare_peer_queryable( tables: &mut Tables, face: &mut Arc, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, peer: ZenohId, ) { let mut face = Some(face); @@ -401,7 +387,7 @@ fn register_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { // Register queryable { @@ -427,7 +413,7 @@ fn declare_client_queryable( face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, ) { register_client_queryable(tables, face, id, res, qabl_info); let local_details = local_router_qabl_info(tables, res); @@ -975,7 +961,7 @@ fn insert_target_for_qabls( tables: &Tables, net: &Network, source: NodeId, - qabls: &HashMap, + qabls: &HashMap, complete: bool, ) { if net.trees.len() > source as usize { @@ -1021,7 +1007,7 @@ impl HatQueriesTrait for HatCode { face: &mut Arc, id: QueryableId, res: &mut Arc, - qabl_info: &QueryableInfo, + qabl_info: &QueryableInfoType, node_id: NodeId, ) { match face.whatami { diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index 29106cb89d..343199e367 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -39,7 +39,7 @@ use zenoh_protocol::{ ExprId, WireExpr, ZenohId, EMPTY_EXPR_ID, }, network::{ - declare::{queryable::ext::QueryableInfo, subscriber::ext::SubscriberInfo}, + declare::{queryable::ext::QueryableInfoType, subscriber::ext::SubscriberInfo}, ext, Declare, DeclareBody, DeclareQueryable, DeclareSubscriber, Push, Request, Response, ResponseFinal, }, @@ -283,10 +283,7 @@ impl AdminSpace { body: DeclareBody::DeclareQueryable(DeclareQueryable { id: runtime.next_id(), wire_expr: [&root_key, "/**"].concat().into(), - ext_info: QueryableInfo { - complete: 0, - distance: 0, - }, + ext_info: QueryableInfoType::DEFAULT, }), }); diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 496c6879ce..58d315c848 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -71,7 +71,7 @@ use zenoh_protocol::{ }, network::{ declare::{ - self, common::ext::WireExprType, queryable::ext::QueryableInfo, + self, common::ext::WireExprType, queryable::ext::QueryableInfoType, subscriber::ext::SubscriberInfo, Declare, DeclareBody, DeclareKeyExpr, DeclareQueryable, DeclareSubscriber, UndeclareQueryable, UndeclareSubscriber, }, @@ -1177,8 +1177,8 @@ impl Session { if origin != Locality::SessionLocal { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); - let qabl_info = QueryableInfo { - complete: if complete { 1 } else { 0 }, + let qabl_info = QueryableInfoType { + complete, distance: 0, }; primitives.send_declare(Declare { diff --git a/zenohd/src/main.rs b/zenohd/src/main.rs index b0d29ea89b..af7ec3bf43 100644 --- a/zenohd/src/main.rs +++ b/zenohd/src/main.rs @@ -371,7 +371,6 @@ fn test_default_features() { concat!( " zenoh/auth_pubkey", " zenoh/auth_usrpwd", - // " zenoh/complete_n", // " zenoh/shared-memory", // " zenoh/stats", " zenoh/transport_multilink", @@ -397,7 +396,6 @@ fn test_no_default_features() { concat!( // " zenoh/auth_pubkey", // " zenoh/auth_usrpwd", - // " zenoh/complete_n", // " zenoh/shared-memory", // " zenoh/stats", // " zenoh/transport_multilink", From 8cd60d0afaeec5ab0468e899db300302f65c62e6 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 27 Mar 2024 00:47:48 +0100 Subject: [PATCH 052/124] Publication updated --- examples/examples/z_pub.rs | 1 + zenoh/src/publication.rs | 254 +++++++++++++++++++++++++------------ zenoh/src/queryable.rs | 13 +- zenoh/src/sample.rs | 18 ++- zenoh/src/session.rs | 5 +- 5 files changed, 196 insertions(+), 95 deletions(-) diff --git a/examples/examples/z_pub.rs b/examples/examples/z_pub.rs index 7ba17745b5..d22d4d55ee 100644 --- a/examples/examples/z_pub.rs +++ b/examples/examples/z_pub.rs @@ -16,6 +16,7 @@ use clap::Parser; use std::time::Duration; use zenoh::config::Config; use zenoh::prelude::r#async::*; +use zenoh::sample_builder::SampleBuilderTrait; use zenoh_examples::CommonArgs; #[async_std::main] diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 97f485f1e3..1e1c0cb509 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -17,7 +17,7 @@ use crate::net::primitives::Primitives; use crate::prelude::*; #[zenoh_macros::unstable] use crate::sample::Attachment; -use crate::sample::{DataInfo, QoS, Sample, SampleKind}; +use crate::sample::{DataInfo, QoS, Sample, SampleFields, SampleKind}; use crate::sample_builder::{ DeleteSampleBuilderTrait, PutSampleBuilderTrait, QoSBuilderTrait, SampleBuilderTrait, }; @@ -33,8 +33,6 @@ use zenoh_core::{zread, AsyncResolve, Resolvable, Resolve, SyncResolve}; use zenoh_protocol::network::push::ext; use zenoh_protocol::network::Mapping; use zenoh_protocol::network::Push; -#[zenoh_macros::unstable] -use zenoh_protocol::zenoh::ext::SourceInfoType; use zenoh_protocol::zenoh::Del; use zenoh_protocol::zenoh::PushBody; use zenoh_protocol::zenoh::Put; @@ -63,7 +61,7 @@ pub struct DeleteBuilder<'a, 'b> { pub(crate) publisher: PublisherBuilder<'a, 'b>, pub(crate) timestamp: Option, #[cfg(feature = "unstable")] - pub(crate) source_info: Option, + pub(crate) source_info: SourceInfo, #[cfg(feature = "unstable")] pub(crate) attachment: Option, } @@ -94,7 +92,7 @@ pub struct PutBuilder<'a, 'b> { pub(crate) encoding: Encoding, pub(crate) timestamp: Option, #[cfg(feature = "unstable")] - pub(crate) source_info: Option, + pub(crate) source_info: SourceInfo, #[cfg(feature = "unstable")] pub(crate) attachment: Option, } @@ -160,7 +158,7 @@ impl SampleBuilderTrait for PutBuilder<'_, '_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { - source_info: Some(source_info), + source_info, ..self } } @@ -190,7 +188,7 @@ impl SampleBuilderTrait for DeleteBuilder<'_, '_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { - source_info: Some(source_info), + source_info, ..self } } @@ -258,7 +256,7 @@ impl Resolvable for DeleteBuilder<'_, '_> { impl SyncResolve for PutBuilder<'_, '_> { #[inline] fn res_sync(self) -> ::To { - let publisher = self.publisher.one_time_res_sync()?; + let publisher = self.publisher.create_one_shot_publisher()?; resolve_put( &publisher, self.payload, @@ -276,7 +274,7 @@ impl SyncResolve for PutBuilder<'_, '_> { impl SyncResolve for DeleteBuilder<'_, '_> { #[inline] fn res_sync(self) -> ::To { - let publisher = self.publisher.one_time_res_sync()?; + let publisher = self.publisher.create_one_shot_publisher()?; resolve_put( &publisher, Payload::empty(), @@ -467,20 +465,6 @@ impl<'a> Publisher<'a> { std::sync::Arc::new(self) } - fn _write(&self, kind: SampleKind, payload: Payload) -> Publication { - Publication { - publisher: self, - payload, - kind, - encoding: Encoding::ZENOH_BYTES, - timestamp: None, - #[cfg(feature = "unstable")] - source_info: None, - #[cfg(feature = "unstable")] - attachment: None, - } - } - /// Put data. /// /// # Examples @@ -494,11 +478,20 @@ impl<'a> Publisher<'a> { /// # }) /// ``` #[inline] - pub fn put(&self, payload: IntoPayload) -> Publication + pub fn put(&self, payload: IntoPayload) -> PutPublication where IntoPayload: Into, { - self._write(SampleKind::Put, payload.into()) + PutPublication { + publisher: self, + payload: payload.into(), + encoding: Encoding::ZENOH_BYTES, + timestamp: None, + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + } } /// Delete data. @@ -513,8 +506,15 @@ impl<'a> Publisher<'a> { /// publisher.delete().res().await.unwrap(); /// # }) /// ``` - pub fn delete(&self) -> Publication { - self._write(SampleKind::Delete, Payload::empty()) + pub fn delete(&self) -> DeletePublication { + DeletePublication { + publisher: self, + timestamp: None, + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + } } /// Return the [`MatchingStatus`] of the publisher. @@ -737,64 +737,129 @@ impl Drop for Publisher<'_> { } /// A [`Resolvable`] returned by [`Publisher::put()`](Publisher::put), -/// [`Publisher::delete()`](Publisher::delete) and [`Publisher::write()`](Publisher::write). #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] -pub struct Publication<'a> { +pub struct PutPublication<'a> { publisher: &'a Publisher<'a>, payload: Payload, - kind: SampleKind, encoding: Encoding, timestamp: Option, #[cfg(feature = "unstable")] - pub(crate) source_info: Option, + pub(crate) source_info: SourceInfo, #[cfg(feature = "unstable")] pub(crate) attachment: Option, } -impl<'a> Publication<'a> { - pub fn with_encoding(mut self, encoding: Encoding) -> Self { - self.encoding = encoding; - self +/// A [`Resolvable`] returned by [`Publisher::delete()`](Publisher::delete) +#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] +pub struct DeletePublication<'a> { + publisher: &'a Publisher<'a>, + timestamp: Option, + #[cfg(feature = "unstable")] + pub(crate) source_info: SourceInfo, + #[cfg(feature = "unstable")] + pub(crate) attachment: Option, +} + +impl SampleBuilderTrait for PutPublication<'_> { + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self { timestamp, ..self } } - #[zenoh_macros::unstable] - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.attachment = Some(attachment); - self + fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { + Self { + timestamp: Some(timestamp), + ..self + } } - /// Send data with the given [`SourceInfo`]. - /// - /// # Examples - /// ``` - /// # async_std::task::block_on(async { - /// use zenoh::prelude::r#async::*; - /// - /// let session = zenoh::open(config::peer()).res().await.unwrap(); - /// let publisher = session.declare_publisher("key/expression").res().await.unwrap(); - /// publisher.put("Value").with_source_info(SourceInfo { - /// source_id: Some(publisher.id()), - /// source_sn: Some(0), - /// }).res().await.unwrap(); - /// # }) - /// ``` - #[zenoh_macros::unstable] - pub fn with_source_info(mut self, source_info: SourceInfo) -> Self { - self.source_info = Some(source_info); - self + #[cfg(feature = "unstable")] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + source_info, + ..self + } + } + + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { attachment, ..self } + } + + #[cfg(feature = "unstable")] + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + attachment: Some(attachment), + ..self + } + } +} + +impl PutSampleBuilderTrait for PutPublication<'_> { + fn with_encoding(self, encoding: Encoding) -> Self { + Self { encoding, ..self } } + + fn with_payload(self, payload: IntoPayload) -> Self + where + IntoPayload: Into, + { + Self { + payload: payload.into(), + ..self + } + } +} + +impl SampleBuilderTrait for DeletePublication<'_> { + fn with_timestamp_opt(self, timestamp: Option) -> Self { + Self { timestamp, ..self } + } + + fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { + Self { + timestamp: Some(timestamp), + ..self + } + } + + #[cfg(feature = "unstable")] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + source_info, + ..self + } + } + + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { attachment, ..self } + } + + #[cfg(feature = "unstable")] + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + attachment: Some(attachment), + ..self + } + } +} + +impl DeleteSampleBuilderTrait for DeletePublication<'_> {} + +impl Resolvable for PutPublication<'_> { + type To = ZResult<()>; } -impl Resolvable for Publication<'_> { +impl Resolvable for DeletePublication<'_> { type To = ZResult<()>; } -impl SyncResolve for Publication<'_> { +impl SyncResolve for PutPublication<'_> { fn res_sync(self) -> ::To { resolve_put( self.publisher, self.payload, - self.kind, + SampleKind::Put, self.encoding, self.timestamp, #[cfg(feature = "unstable")] @@ -805,7 +870,31 @@ impl SyncResolve for Publication<'_> { } } -impl AsyncResolve for Publication<'_> { +impl SyncResolve for DeletePublication<'_> { + fn res_sync(self) -> ::To { + resolve_put( + self.publisher, + Payload::empty(), + SampleKind::Delete, + Encoding::ZENOH_BYTES, + self.timestamp, + #[cfg(feature = "unstable")] + self.source_info, + #[cfg(feature = "unstable")] + self.attachment, + ) + } +} + +impl AsyncResolve for PutPublication<'_> { + type Future = Ready; + + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) + } +} + +impl AsyncResolve for DeletePublication<'_> { type Future = Ready; fn res_async(self) -> Self::Future { @@ -823,18 +912,25 @@ impl<'a> Sink for Publisher<'a> { #[inline] fn start_send(self: Pin<&mut Self>, item: Sample) -> Result<(), Self::Error> { - Publication { - publisher: &self, - payload: item.payload, - kind: item.kind, - encoding: item.encoding, - timestamp: None, + let SampleFields { + payload, + kind, + encoding, + #[cfg(feature = "unstable")] + attachment, + .. + } = item.into(); + resolve_put( + &self, + payload, + kind, + encoding, + None, #[cfg(feature = "unstable")] - source_info: None, + SourceInfo::empty(), #[cfg(feature = "unstable")] - attachment: item.attachment, - } - .res_sync() + attachment, + ) } #[inline] @@ -928,7 +1024,7 @@ impl<'a, 'b> PublisherBuilder<'a, 'b> { } // internal function for `PutBuilder` and `DeleteBuilder` - fn one_time_res_sync(self) -> ZResult> { + fn create_one_shot_publisher(self) -> ZResult> { Ok(Publisher { session: self.session, #[cfg(feature = "unstable")] @@ -1013,7 +1109,7 @@ fn resolve_put( kind: SampleKind, encoding: Encoding, timestamp: Option, - #[cfg(feature = "unstable")] source_info: Option, + #[cfg(feature = "unstable")] source_info: SourceInfo, #[cfg(feature = "unstable")] attachment: Option, ) -> ZResult<()> { log::trace!("write({:?}, [...])", &publisher.key_expr); @@ -1051,10 +1147,7 @@ fn resolve_put( timestamp, encoding: encoding.clone().into(), #[cfg(feature = "unstable")] - ext_sinfo: source_info.map(|s| SourceInfoType { - id: s.source_id.unwrap_or_default(), - sn: s.source_sn.unwrap_or_default() as u32, - }), + ext_sinfo: source_info.into(), #[cfg(not(feature = "unstable"))] ext_sinfo: None, #[cfg(feature = "shared-memory")] @@ -1076,10 +1169,7 @@ fn resolve_put( PushBody::Del(Del { timestamp, #[cfg(feature = "unstable")] - ext_sinfo: source_info.map(|s| SourceInfoType { - id: s.source_id.unwrap_or_default(), - sn: s.source_sn.unwrap_or_default() as u32, - }), + ext_sinfo: source_info.into(), #[cfg(not(feature = "unstable"))] ext_sinfo: None, ext_attachment, diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 9edb9fb26c..6f71cd7fb7 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -518,17 +518,10 @@ impl Query { { bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", sample.key_expr, self.key_expr()) } - #[allow(unused_mut)] // will be unused if feature = "unstable" is not enabled - let mut ext_sinfo = None; + #[cfg(not(feature = "unstable"))] + let ext_sinfo = None; #[cfg(feature = "unstable")] - { - if sample.source_info.source_id.is_some() || sample.source_info.source_sn.is_some() { - ext_sinfo = Some(zenoh::put::ext::SourceInfoType { - id: sample.source_info.source_id.unwrap_or_default(), - sn: sample.source_info.source_sn.unwrap_or_default() as u32, - }) - } - } + let ext_sinfo = sample.source_info.into(); self.inner.primitives.send_response(Response { rid: self.inner.qid, wire_expr: WireExpr { diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index acf8536a0e..1998f3e844 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -22,7 +22,7 @@ use crate::Priority; use serde::Serialize; use std::{convert::TryFrom, fmt}; use zenoh_protocol::core::EntityGlobalId; -use zenoh_protocol::{core::CongestionControl, network::push::ext::QoSType}; +use zenoh_protocol::{core::CongestionControl, network::push::ext::QoSType, zenoh}; pub type SourceSn = u64; @@ -163,6 +163,22 @@ impl SourceInfo { source_sn: None, } } + pub(crate) fn is_empty(&self) -> bool { + self.source_id.is_none() && self.source_sn.is_none() + } +} + +impl From for Option { + fn from(source_info: SourceInfo) -> Option { + if source_info.is_empty() { + None + } else { + Some(zenoh::put::ext::SourceInfoType { + id: source_info.source_id.unwrap_or_default(), + sn: source_info.source_sn.unwrap_or_default() as u32, + }) + } + } } #[zenoh_macros::unstable] diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index db81888018..a2371d1bfa 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -41,6 +41,7 @@ use crate::Priority; use crate::Sample; use crate::SampleKind; use crate::Selector; +use crate::SourceInfo; use crate::Value; use async_std::task; use log::{error, trace, warn}; @@ -711,7 +712,7 @@ impl Session { #[cfg(feature = "unstable")] attachment: None, #[cfg(feature = "unstable")] - source_info: None, + source_info: SourceInfo::empty(), } } @@ -745,7 +746,7 @@ impl Session { #[cfg(feature = "unstable")] attachment: None, #[cfg(feature = "unstable")] - source_info: None, + source_info: SourceInfo::empty(), } } /// Query data from the matching queryables in the system. From 00e0a59a71804fa54e4e2cc6d92a35731a079654 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 27 Mar 2024 00:59:12 +0100 Subject: [PATCH 053/124] build fix --- examples/examples/z_pub_shm_thr.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/examples/z_pub_shm_thr.rs b/examples/examples/z_pub_shm_thr.rs index 7c6f3cbbd3..5230ea3ce6 100644 --- a/examples/examples/z_pub_shm_thr.rs +++ b/examples/examples/z_pub_shm_thr.rs @@ -15,6 +15,7 @@ use clap::Parser; use zenoh::config::Config; use zenoh::prelude::r#async::*; use zenoh::publication::CongestionControl; +use zenoh::sample_builder::QoSBuilderTrait; use zenoh::shm::SharedMemoryManager; use zenoh_examples::CommonArgs; From e601271c25becb47c0f14fbbf0dccce2dfdb81f5 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 27 Mar 2024 02:26:16 +0100 Subject: [PATCH 054/124] reply_sample restored --- plugins/zenoh-plugin-example/src/lib.rs | 13 +----- .../src/replica/storage.rs | 42 +++++++++++-------- zenoh-ext/src/publication_cache.rs | 33 +++------------ zenoh-ext/src/querying_subscriber.rs | 9 +--- zenoh/src/queryable.rs | 38 ++++++++++++----- zenoh/src/sample_builder.rs | 6 +-- 6 files changed, 64 insertions(+), 77 deletions(-) diff --git a/plugins/zenoh-plugin-example/src/lib.rs b/plugins/zenoh-plugin-example/src/lib.rs index 40f8d69488..04f49b4739 100644 --- a/plugins/zenoh-plugin-example/src/lib.rs +++ b/plugins/zenoh-plugin-example/src/lib.rs @@ -24,8 +24,6 @@ use std::sync::{ use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::runtime::Runtime; -use zenoh::sample::SampleFields; -use zenoh::sample_builder::SampleBuilderTrait; use zenoh_core::zlock; use zenoh_plugin_trait::{plugin_long_version, plugin_version, Plugin, PluginControl}; use zenoh_result::{bail, ZResult}; @@ -176,16 +174,7 @@ async fn run(runtime: Runtime, selector: KeyExpr<'_>, flag: Arc) { info!("Handling query '{}'", query.selector()); for (key_expr, sample) in stored.iter() { if query.selector().key_expr.intersects(unsafe{keyexpr::from_str_unchecked(key_expr)}) { - let SampleFields { key_expr, timestamp, attachment, source_info, payload, kind, .. } = sample.clone().into(); - let reply = query - .reply_sample(key_expr) - .with_timestamp_opt(timestamp) - .with_attachment_opt(attachment) - .with_source_info(source_info); - match kind { - SampleKind::Put => reply.put(payload).res().await.unwrap(), - SampleKind::Delete => reply.delete().res().await.unwrap(), - } + query.reply_sample(sample.clone()).res().await.unwrap(); } } } diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 67ce871bb0..aed13bbbf1 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -21,14 +21,17 @@ use futures::select; use std::collections::{HashMap, HashSet}; use std::str::{self, FromStr}; use std::time::{SystemTime, UNIX_EPOCH}; +use zenoh::buffers::buffer::SplitBuffer; use zenoh::buffers::ZBuf; -use zenoh::prelude::r#async::*; -use zenoh::query::ConsolidationMode; +use zenoh::key_expr::KeyExpr; +use zenoh::query::{ConsolidationMode, QueryTarget}; +use zenoh::sample::{Sample, SampleKind}; use zenoh::sample_builder::{ - PutSampleBuilder, PutSampleBuilderTrait, SampleBuilder, SampleBuilderTrait, + DeleteSampleBuilder, PutSampleBuilder, PutSampleBuilderTrait, SampleBuilder, SampleBuilderTrait, }; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; -use zenoh::{Result as ZResult, Session}; +use zenoh::value::Value; +use zenoh::{Result as ZResult, Session, SessionDeclarations}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; use zenoh_backend_traits::{Capability, History, Persistence, StorageInsertionResult, StoredData}; use zenoh_core::{AsyncResolve, SyncResolve}; @@ -235,11 +238,8 @@ impl StorageService { continue; } }; - let sample = if sample.timestamp().is_none() { - SampleBuilder::from(sample).with_timestamp(new_reception_timestamp()).res_sync() - } else { - sample - }; + let timestamp = sample.timestamp().cloned().unwrap_or(new_reception_timestamp()); + let sample = SampleBuilder::from(sample).with_timestamp(timestamp).res_sync(); self.process_sample(sample).await; }, // on query on key_expr @@ -307,21 +307,27 @@ impl StorageService { .ovderriding_wild_update(&k, sample.timestamp().unwrap()) .await { - Some(overriding_update) => { + Some(Update { + kind: SampleKind::Put, + data, + }) => { let Value { payload, encoding, .. - } = overriding_update.data.value; + } = data.value; PutSampleBuilder::new(KeyExpr::from(k.clone()), payload) .with_encoding(encoding) - .with_timestamp(overriding_update.data.timestamp) - .res_sync() - } - None => { - PutSampleBuilder::new(KeyExpr::from(k.clone()), sample.payload().clone()) - .with_encoding(sample.encoding().clone()) - .with_timestamp(*sample.timestamp().unwrap()) + .with_timestamp(data.timestamp) .res_sync() } + Some(Update { + kind: SampleKind::Delete, + data, + }) => DeleteSampleBuilder::new(KeyExpr::from(k.clone())) + .with_timestamp(data.timestamp) + .res_sync(), + None => SampleBuilder::from(sample.clone()) + .keyexpr(k.clone()) + .res_sync(), }; let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { diff --git a/zenoh-ext/src/publication_cache.rs b/zenoh-ext/src/publication_cache.rs index 8a782a179e..85cb96cce2 100644 --- a/zenoh-ext/src/publication_cache.rs +++ b/zenoh-ext/src/publication_cache.rs @@ -20,8 +20,6 @@ use std::convert::TryInto; use std::future::Ready; use zenoh::prelude::r#async::*; use zenoh::queryable::{Query, Queryable}; -use zenoh::sample::SampleFields; -use zenoh::sample_builder::SampleBuilderTrait; use zenoh::subscriber::FlumeSubscriber; use zenoh::SessionRef; use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; @@ -118,29 +116,6 @@ pub struct PublicationCache<'a> { _stoptx: Sender, } -async fn reply_sample(query: &Query, sample: &Sample) { - let SampleFields { - key_expr, - timestamp, - attachment, - source_info, - payload, - kind, - .. - } = sample.clone().into(); - let reply = query - .reply_sample(key_expr) - .with_timestamp_opt(timestamp) - .with_attachment_opt(attachment) - .with_source_info(source_info); - if let Err(e) = match kind { - SampleKind::Put => reply.put(payload).res_async().await, - SampleKind::Delete => reply.delete().res_async().await, - } { - log::warn!("Error replying to query: {}", e); - } -} - impl<'a> PublicationCache<'a> { fn new(conf: PublicationCacheBuilder<'a, '_, '_>) -> ZResult> { let key_expr = conf.pub_key_expr?; @@ -237,7 +212,9 @@ impl<'a> PublicationCache<'a> { continue; } } - reply_sample(&query, sample).await; + if let Err(e) = query.reply_sample(sample.clone()).res_async().await { + log::warn!("Error replying to query: {}", e); + } } } } else { @@ -249,7 +226,9 @@ impl<'a> PublicationCache<'a> { continue; } } - reply_sample(&query, sample).await; + if let Err(e) = query.reply_sample(sample.clone()).res_async().await { + log::warn!("Error replying to query: {}", e); + } } } } diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index eb6d6e9516..5c302840b8 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -664,13 +664,8 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { log::trace!("Sample received while fetch in progress: push it to merge_queue"); // ensure the sample has a timestamp, thus it will always be sorted into the MergeQueue // after any timestamped Sample possibly coming from a fetch reply. - let s = if s.timestamp().is_none() { - SampleBuilder::from(s) - .with_timestamp(new_reception_timestamp()) - .res_sync() - } else { - s - }; + let timestamp = s.timestamp().cloned().unwrap_or(new_reception_timestamp()); + let s = SampleBuilder::from(s).with_timestamp(timestamp).res_sync(); state.merge_queue.push(s); } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 6f71cd7fb7..14e9d09068 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -106,20 +106,19 @@ impl Query { self.inner.attachment.as_ref() } - /// Sends a reply or delete reply to this Query + /// Sends a reply in the form of [`Sample`] to this Query. /// - /// This function is useful when resending the samples which can be of [`SampleKind::Put`] or [`SampleKind::Delete`] - /// It allows to build the reply with same common parameters, like timestamp, attachment, source_info, etc. - /// and only on final step to choose the kind of reply by calling [`ReplySampleBuilder::put`] or [`ReplySampleBuilder::delete`] methods. + /// By default, queries only accept replies whose key expression intersects with the query's. + /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), + /// replying on a disjoint key expression will result in an error when resolving the reply. + /// This api is for internal use only. #[inline(always)] - pub fn reply_sample(&self, key_expr: IntoKeyExpr) -> ReplySampleBuilder - where - IntoKeyExpr: Into>, - { - let sample_builder = SampleBuilder::new(key_expr); + #[cfg(feature = "unstable")] + #[doc(hidden)] + pub fn reply_sample(&self, sample: Sample) -> ReplySampleBuilder<'_> { ReplySampleBuilder { query: self, - sample_builder, + sample_builder: sample.into(), } } @@ -302,6 +301,25 @@ impl QoSBuilderTrait for ReplySampleBuilder<'_> { } } +impl Resolvable for ReplySampleBuilder<'_> { + type To = ZResult<()>; +} + +impl SyncResolve for ReplySampleBuilder<'_> { + fn res_sync(self) -> ::To { + let sample = self.sample_builder.res_sync(); + self.query._reply_sample(sample) + } +} + +impl AsyncResolve for ReplySampleBuilder<'_> { + type Future = Ready; + + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) + } +} + /// A builder returned by [`Query::reply()`](Query::reply) #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 1710cbc85b..7e38e84afd 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -87,7 +87,7 @@ impl SampleBuilder { }) } /// Allows to change keyexpr of [`Sample`] - pub fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + pub fn keyexpr(self, key_expr: IntoKeyExpr) -> Self where IntoKeyExpr: Into>, { @@ -189,7 +189,7 @@ impl PutSampleBuilder { where IntoKeyExpr: Into>, { - Self(self.0.with_keyexpr(key_expr)) + Self(self.0.keyexpr(key_expr)) } // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. pub(crate) fn with_qos(self, qos: QoS) -> Self { @@ -283,7 +283,7 @@ impl DeleteSampleBuilder { where IntoKeyExpr: Into>, { - Self(self.0.with_keyexpr(key_expr)) + Self(self.0.keyexpr(key_expr)) } // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. pub(crate) fn with_qos(self, qos: QoS) -> Self { From ea4020ddd3bba3402bba7a4fe172cc2518333066 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 27 Mar 2024 11:29:05 +0100 Subject: [PATCH 055/124] build fixes --- zenoh/src/publication.rs | 2 ++ zenoh/src/session.rs | 1 + 2 files changed, 3 insertions(+) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 1e1c0cb509..8772319593 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -73,6 +73,7 @@ pub struct DeleteBuilder<'a, 'b> { /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; /// use zenoh::publication::CongestionControl; +/// use zenoh::sample_builder::{PutSampleBuilderTrait, QoSBuilderTrait}; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session @@ -951,6 +952,7 @@ impl<'a> Sink for Publisher<'a> { /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; /// use zenoh::publication::CongestionControl; +/// use zenoh::sample_builder::{PutSampleBuilderTrait, QoSBuilderTrait}; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let publisher = session diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index a2371d1bfa..ffe7036050 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -683,6 +683,7 @@ impl Session { /// ``` /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; + /// use zenoh::sample_builder::PutSampleBuilderTrait; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session From 5f0b531041ace6a303533ac0fbc56227ba121617 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 27 Mar 2024 12:37:22 +0100 Subject: [PATCH 056/124] clippy warning fix --- commons/zenoh-macros/build.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/commons/zenoh-macros/build.rs b/commons/zenoh-macros/build.rs index 557593d00e..d3c6b4e55b 100644 --- a/commons/zenoh-macros/build.rs +++ b/commons/zenoh-macros/build.rs @@ -24,6 +24,7 @@ fn main() { let mut version_rs = OpenOptions::new() .create(true) .write(true) + .truncate(true) .open(version_rs) .unwrap(); version_rs.write_all(&output.stdout).unwrap(); From 5a9bf0aacbba65295489110f6c6d645b9c50811b Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 27 Mar 2024 12:44:48 +0100 Subject: [PATCH 057/124] Payload API (#866) * Remove Deref/DerefMut to ZBuf from Payload. * Use reader in payload deserializer * Remove payload writer * Replace deserialize::() with deserialize::Cow() * Fix cargo clippy * Remove blank lifetime --- commons/zenoh-macros/build.rs | 1 + plugins/zenoh-plugin-example/src/lib.rs | 3 +- plugins/zenoh-plugin-rest/src/lib.rs | 22 +++- .../src/replica/storage.rs | 20 ++- zenoh-ext/src/group.rs | 7 +- zenoh/src/payload.rs | 121 ++++++++++-------- zenoh/tests/attachments.rs | 16 +-- 7 files changed, 108 insertions(+), 82 deletions(-) diff --git a/commons/zenoh-macros/build.rs b/commons/zenoh-macros/build.rs index 557593d00e..d5ce6632dc 100644 --- a/commons/zenoh-macros/build.rs +++ b/commons/zenoh-macros/build.rs @@ -23,6 +23,7 @@ fn main() { let version_rs = std::path::PathBuf::from(env::var_os("OUT_DIR").unwrap()).join("version.rs"); let mut version_rs = OpenOptions::new() .create(true) + .truncate(true) .write(true) .open(version_rs) .unwrap(); diff --git a/plugins/zenoh-plugin-example/src/lib.rs b/plugins/zenoh-plugin-example/src/lib.rs index 04f49b4739..ad254278e3 100644 --- a/plugins/zenoh-plugin-example/src/lib.rs +++ b/plugins/zenoh-plugin-example/src/lib.rs @@ -15,6 +15,7 @@ use futures::select; use log::{debug, info}; +use std::borrow::Cow; use std::collections::HashMap; use std::convert::TryFrom; use std::sync::{ @@ -164,7 +165,7 @@ async fn run(runtime: Runtime, selector: KeyExpr<'_>, flag: Arc) { // on sample received by the Subscriber sample = sub.recv_async() => { let sample = sample.unwrap(); - let payload = sample.payload().deserialize::().unwrap_or_else(|e| format!("{}", e)); + let payload = sample.payload().deserialize::>().unwrap_or_else(|e| Cow::from(e.to_string())); info!("Received data ('{}': '{}')", sample.key_expr(), payload); stored.insert(sample.key_expr().to_string(), sample); }, diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index e2718f6579..12c0dd6405 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -70,9 +70,11 @@ fn payload_to_json(payload: &Payload, encoding: &Encoding) -> serde_json::Value match encoding { // If it is a JSON try to deserialize as json, if it fails fallback to base64 &Encoding::APPLICATION_JSON | &Encoding::TEXT_JSON | &Encoding::TEXT_JSON5 => { - serde_json::from_slice::(&payload.contiguous()).unwrap_or( - serde_json::Value::String(StringOrBase64::from(payload).into_string()), - ) + payload + .deserialize::() + .unwrap_or_else(|_| { + serde_json::Value::String(StringOrBase64::from(payload).into_string()) + }) } // otherwise convert to JSON string _ => serde_json::Value::String(StringOrBase64::from(payload).into_string()), @@ -124,7 +126,10 @@ fn sample_to_html(sample: Sample) -> String { format!( "
{}
\n
{}
\n", sample.key_expr().as_str(), - String::from_utf8_lossy(&sample.payload().contiguous()) + sample + .payload() + .deserialize::>() + .unwrap_or_default() ) } @@ -134,7 +139,7 @@ fn result_to_html(sample: Result) -> String { Err(err) => { format!( "
ERROR
\n
{}
\n", - String::from_utf8_lossy(&err.payload.contiguous()) + err.payload.deserialize::>().unwrap_or_default() ) } } @@ -160,12 +165,15 @@ async fn to_raw_response(results: flume::Receiver) -> Response { Ok(sample) => response( StatusCode::Ok, Cow::from(sample.encoding()).as_ref(), - String::from_utf8_lossy(&sample.payload().contiguous()).as_ref(), + &sample + .payload() + .deserialize::>() + .unwrap_or_default(), ), Err(value) => response( StatusCode::Ok, Cow::from(&value.encoding).as_ref(), - String::from_utf8_lossy(&value.payload.contiguous()).as_ref(), + &value.payload.deserialize::>().unwrap_or_default(), ), }, Err(_) => response(StatusCode::Ok, "", ""), diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 35134dfe43..108beaabb2 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -665,11 +665,23 @@ impl StorageService { } fn serialize_update(update: &Update) -> String { + let Update { + kind, + data: + StoredData { + value: Value { + payload, encoding, .. + }, + timestamp, + }, + } = update; + let zbuf: ZBuf = payload.into(); + let result = ( - update.kind.to_string(), - update.data.timestamp.to_string(), - update.data.value.encoding.to_string(), - update.data.value.payload.slices().collect::>(), + kind.to_string(), + timestamp.to_string(), + encoding.to_string(), + zbuf.slices().collect::>(), ); serde_json::to_string_pretty(&result).unwrap() } diff --git a/zenoh-ext/src/group.rs b/zenoh-ext/src/group.rs index 41007d8b87..ec96a8b373 100644 --- a/zenoh-ext/src/group.rs +++ b/zenoh-ext/src/group.rs @@ -25,6 +25,7 @@ use std::convert::TryInto; use std::ops::Add; use std::sync::Arc; use std::time::{Duration, Instant}; +use zenoh::payload::PayloadReader; use zenoh::prelude::r#async::*; use zenoh::publication::Publisher; use zenoh::query::ConsolidationMode; @@ -248,7 +249,7 @@ async fn net_event_handler(z: Arc, state: Arc) { .await .unwrap(); while let Ok(s) = sub.recv_async().await { - match bincode::deserialize::(&(s.payload().contiguous())) { + match bincode::deserialize_from::(s.payload().reader()) { Ok(evt) => match evt { GroupNetEvent::Join(je) => { log::debug!("Member join: {:?}", &je.member); @@ -307,8 +308,8 @@ async fn net_event_handler(z: Arc, state: Arc) { while let Ok(reply) = receiver.recv_async().await { match reply.sample { Ok(sample) => { - match bincode::deserialize::( - &sample.payload().contiguous(), + match bincode::deserialize_from::( + sample.payload().reader(), ) { Ok(m) => { let mut expiry = Instant::now(); diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index aab8235249..ed2a58145c 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -15,14 +15,12 @@ //! Payload primitives. use crate::buffers::ZBuf; use std::{ - borrow::Cow, - convert::Infallible, - fmt::Debug, - ops::{Deref, DerefMut}, - string::FromUtf8Error, - sync::Arc, + borrow::Cow, convert::Infallible, fmt::Debug, ops::Deref, string::FromUtf8Error, sync::Arc, +}; +use zenoh_buffers::buffer::Buffer; +use zenoh_buffers::{ + buffer::SplitBuffer, reader::HasReader, writer::HasWriter, ZBufReader, ZSlice, }; -use zenoh_buffers::{buffer::SplitBuffer, reader::HasReader, writer::HasWriter, ZSlice}; use zenoh_result::ZResult; #[cfg(feature = "shared-memory")] use zenoh_shm::SharedMemoryBuf; @@ -44,19 +42,29 @@ impl Payload { { Self(t.into()) } -} -impl Deref for Payload { - type Target = ZBuf; + /// Returns wether the payload is empty or not. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + /// Returns the length of the payload. + pub fn len(&self) -> usize { + self.0.len() + } - fn deref(&self) -> &Self::Target { - &self.0 + /// Get a [`PayloadReader`] implementing [`std::io::Read`] trait. + pub fn reader(&self) -> PayloadReader<'_> { + PayloadReader(self.0.reader()) } } -impl DerefMut for Payload { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 +/// A reader that implements [`std::io::Read`] trait to read from a [`Payload`]. +pub struct PayloadReader<'a>(ZBufReader<'a>); + +impl std::io::Read for PayloadReader<'_> { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + self.0.read(buf) } } @@ -81,10 +89,10 @@ impl Payload { /// Decode an object of type `T` from a [`Value`] using the [`ZSerde`]. /// See [encode](Value::encode) for an example. - pub fn deserialize(&self) -> ZResult + pub fn deserialize<'a, T>(&'a self) -> ZResult where - ZSerde: Deserialize, - >::Error: Debug, + ZSerde: Deserialize<'a, T>, + >::Error: Debug, { let t: T = ZSerde.deserialize(self).map_err(|e| zerror!("{:?}", e))?; Ok(t) @@ -99,11 +107,11 @@ pub trait Serialize { fn serialize(self, t: T) -> Self::Output; } -pub trait Deserialize { +pub trait Deserialize<'a, T> { type Error; /// The implementer should take care of deserializing the type `T` based on the [`Encoding`] information. - fn deserialize(self, t: &Payload) -> Result; + fn deserialize(self, t: &'a Payload) -> Result; } /// The default serializer for Zenoh payload. It supports primitives types, such as: vec, int, uint, float, string, bool. @@ -129,7 +137,7 @@ impl From for ZBuf { } } -impl Deserialize for ZSerde { +impl Deserialize<'_, ZBuf> for ZSerde { type Error = Infallible; fn deserialize(self, v: &Payload) -> Result { @@ -159,18 +167,17 @@ impl Serialize<&[u8]> for ZSerde { } } -impl Deserialize> for ZSerde { +impl Deserialize<'_, Vec> for ZSerde { type Error = Infallible; fn deserialize(self, v: &Payload) -> Result, Self::Error> { - let v: ZBuf = v.into(); - Ok(v.contiguous().to_vec()) + Ok(Vec::from(v)) } } impl From<&Payload> for Vec { fn from(value: &Payload) -> Self { - value.contiguous().to_vec() + Cow::from(value).to_vec() } } @@ -182,18 +189,17 @@ impl<'a> Serialize> for ZSerde { } } -impl<'a> Deserialize> for ZSerde { +impl<'a> Deserialize<'a, Cow<'a, [u8]>> for ZSerde { type Error = Infallible; - fn deserialize(self, v: &Payload) -> Result, Self::Error> { - let v: Vec = Self.deserialize(v)?; - Ok(Cow::Owned(v)) + fn deserialize(self, v: &'a Payload) -> Result, Self::Error> { + Ok(Cow::from(v)) } } impl<'a> From<&'a Payload> for Cow<'a, [u8]> { fn from(value: &'a Payload) -> Self { - value.contiguous() + value.0.contiguous() } } @@ -214,11 +220,11 @@ impl Serialize<&str> for ZSerde { } } -impl Deserialize for ZSerde { +impl Deserialize<'_, String> for ZSerde { type Error = FromUtf8Error; fn deserialize(self, v: &Payload) -> Result { - String::from_utf8(v.contiguous().to_vec()) + String::from_utf8(Vec::from(v)) } } @@ -246,7 +252,7 @@ impl<'a> Serialize> for ZSerde { } } -impl<'a> Deserialize> for ZSerde { +impl<'a> Deserialize<'a, Cow<'a, str>> for ZSerde { type Error = FromUtf8Error; fn deserialize(self, v: &Payload) -> Result, Self::Error> { @@ -255,10 +261,10 @@ impl<'a> Deserialize> for ZSerde { } } -impl TryFrom<&Payload> for Cow<'_, str> { +impl<'a> TryFrom<&'a Payload> for Cow<'a, str> { type Error = FromUtf8Error; - fn try_from(value: &Payload) -> Result { + fn try_from(value: &'a Payload) -> Result { ZSerde.deserialize(value) } } @@ -295,16 +301,19 @@ macro_rules! impl_int { } } - impl Deserialize<$t> for ZSerde { + impl<'a> Deserialize<'a, $t> for ZSerde { type Error = ZDeserializeError; fn deserialize(self, v: &Payload) -> Result<$t, Self::Error> { - let p = v.contiguous(); + use std::io::Read; + + let mut r = v.reader(); let mut bs = (0 as $t).to_le_bytes(); - if p.len() > bs.len() { + if v.len() > bs.len() { return Err(ZDeserializeError); } - bs[..p.len()].copy_from_slice(&p); + r.read_exact(&mut bs[..v.len()]) + .map_err(|_| ZDeserializeError)?; let t = <$t>::from_le_bytes(bs); Ok(t) } @@ -349,15 +358,12 @@ impl Serialize for ZSerde { } } -impl Deserialize for ZSerde { +impl Deserialize<'_, bool> for ZSerde { type Error = ZDeserializeError; fn deserialize(self, v: &Payload) -> Result { - let p = v.contiguous(); - if p.len() != 1 { - return Err(ZDeserializeError); - } - match p[0] { + let p = v.deserialize::().map_err(|_| ZDeserializeError)?; + match p { 0 => Ok(false), 1 => Ok(true), _ => Err(ZDeserializeError), @@ -380,7 +386,7 @@ impl Serialize<&serde_json::Value> for ZSerde { fn serialize(self, t: &serde_json::Value) -> Self::Output { let mut payload = Payload::empty(); - serde_json::to_writer(payload.writer(), t)?; + serde_json::to_writer(payload.0.writer(), t)?; Ok(payload) } } @@ -393,7 +399,7 @@ impl Serialize for ZSerde { } } -impl Deserialize for ZSerde { +impl Deserialize<'_, serde_json::Value> for ZSerde { type Error = serde_json::Error; fn deserialize(self, v: &Payload) -> Result { @@ -415,7 +421,7 @@ impl Serialize<&serde_yaml::Value> for ZSerde { fn serialize(self, t: &serde_yaml::Value) -> Self::Output { let mut payload = Payload::empty(); - serde_yaml::to_writer(payload.writer(), t)?; + serde_yaml::to_writer(payload.0.writer(), t)?; Ok(payload) } } @@ -428,7 +434,7 @@ impl Serialize for ZSerde { } } -impl Deserialize for ZSerde { +impl Deserialize<'_, serde_yaml::Value> for ZSerde { type Error = serde_yaml::Error; fn deserialize(self, v: &Payload) -> Result { @@ -450,7 +456,7 @@ impl Serialize<&serde_cbor::Value> for ZSerde { fn serialize(self, t: &serde_cbor::Value) -> Self::Output { let mut payload = Payload::empty(); - serde_cbor::to_writer(payload.writer(), t)?; + serde_cbor::to_writer(payload.0.writer(), t)?; Ok(payload) } } @@ -463,7 +469,7 @@ impl Serialize for ZSerde { } } -impl Deserialize for ZSerde { +impl Deserialize<'_, serde_cbor::Value> for ZSerde { type Error = serde_cbor::Error; fn deserialize(self, v: &Payload) -> Result { @@ -486,7 +492,7 @@ impl Serialize<&serde_pickle::Value> for ZSerde { fn serialize(self, t: &serde_pickle::Value) -> Self::Output { let mut payload = Payload::empty(); serde_pickle::value_to_writer( - &mut payload.writer(), + &mut payload.0.writer(), t, serde_pickle::SerOptions::default(), )?; @@ -502,7 +508,7 @@ impl Serialize for ZSerde { } } -impl Deserialize for ZSerde { +impl Deserialize<'_, serde_pickle::Value> for ZSerde { type Error = serde_pickle::Error; fn deserialize(self, v: &Payload) -> Result { @@ -590,9 +596,12 @@ impl std::fmt::Display for StringOrBase64 { impl From<&Payload> for StringOrBase64 { fn from(v: &Payload) -> Self { use base64::{engine::general_purpose::STANDARD as b64_std_engine, Engine}; - match v.deserialize::() { - Ok(s) => StringOrBase64::String(s), - Err(_) => StringOrBase64::Base64(b64_std_engine.encode(v.contiguous())), + match v.deserialize::>() { + Ok(s) => StringOrBase64::String(s.into_owned()), + Err(_) => { + let cow: Cow<'_, [u8]> = Cow::from(v); + StringOrBase64::Base64(b64_std_engine.encode(cow)) + } } } } diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index 603939bc0e..e6a3356559 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -20,10 +20,7 @@ fn pubsub() { let _sub = zenoh .declare_subscriber("test/attachment") .callback(|sample| { - println!( - "{}", - std::str::from_utf8(&sample.payload().contiguous()).unwrap() - ); + println!("{}", sample.payload().deserialize::().unwrap()); for (k, v) in sample.attachment().unwrap() { assert!(k.iter().rev().zip(v.as_slice()).all(|(k, v)| k == v)) } @@ -72,13 +69,10 @@ fn queries() { .callback(|query| { println!( "{}", - std::str::from_utf8( - &query - .value() - .map(|q| q.payload.contiguous()) - .unwrap_or_default() - ) - .unwrap() + query + .value() + .map(|q| q.payload.deserialize::().unwrap()) + .unwrap_or_default() ); let mut attachment = Attachment::new(); for (k, v) in query.attachment().unwrap() { From 2be4fa90ada9eff64827ef24da3ded1de919f7fc Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 27 Mar 2024 12:45:42 +0100 Subject: [PATCH 058/124] clippy warning fix (#867) --- commons/zenoh-macros/build.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/commons/zenoh-macros/build.rs b/commons/zenoh-macros/build.rs index d5ce6632dc..be5abe870b 100644 --- a/commons/zenoh-macros/build.rs +++ b/commons/zenoh-macros/build.rs @@ -25,6 +25,7 @@ fn main() { .create(true) .truncate(true) .write(true) + .truncate(true) .open(version_rs) .unwrap(); version_rs.write_all(&output.stdout).unwrap(); From 448535495d2b901c673e9d839908f646700f9719 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 27 Mar 2024 13:13:34 +0100 Subject: [PATCH 059/124] removed extra truncate appeared from different PRs --- commons/zenoh-macros/build.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/commons/zenoh-macros/build.rs b/commons/zenoh-macros/build.rs index be5abe870b..d5ce6632dc 100644 --- a/commons/zenoh-macros/build.rs +++ b/commons/zenoh-macros/build.rs @@ -25,7 +25,6 @@ fn main() { .create(true) .truncate(true) .write(true) - .truncate(true) .open(version_rs) .unwrap(); version_rs.write_all(&output.stdout).unwrap(); From ce5b6108537599424f5ab0d6da9887b05f966e59 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 11:20:29 +0100 Subject: [PATCH 060/124] sample api for GetBuilder --- examples/examples/z_ping.rs | 2 +- examples/examples/z_pong.rs | 2 +- examples/examples/z_pub_thr.rs | 2 +- plugins/zenoh-plugin-rest/src/lib.rs | 2 +- .../src/replica/align_queryable.rs | 4 +- .../src/replica/aligner.rs | 2 +- .../src/replica/storage.rs | 4 +- zenoh-ext/src/querying_subscriber.rs | 2 +- zenoh/src/liveliness.rs | 5 +- zenoh/src/net/runtime/adminspace.rs | 4 +- zenoh/src/publication.rs | 38 ++++--- zenoh/src/query.rs | 100 ++++++++++++++---- zenoh/src/queryable.rs | 32 +++--- zenoh/src/sample.rs | 75 +++++++++---- zenoh/src/sample_builder.rs | 62 ++++++----- zenoh/src/session.rs | 9 +- zenoh/src/value.rs | 26 +++-- 17 files changed, 249 insertions(+), 122 deletions(-) diff --git a/examples/examples/z_ping.rs b/examples/examples/z_ping.rs index b40afc1f53..79a1e16514 100644 --- a/examples/examples/z_ping.rs +++ b/examples/examples/z_ping.rs @@ -36,7 +36,7 @@ fn main() { let publisher = session .declare_publisher(key_expr_ping) .congestion_control(CongestionControl::Block) - .express(express) + .is_express(express) .res() .unwrap(); diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index 0003958b5d..a629cce3cf 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -35,7 +35,7 @@ fn main() { let publisher = session .declare_publisher(key_expr_pong) .congestion_control(CongestionControl::Block) - .express(express) + .is_express(express) .res() .unwrap(); diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index 7e7c1ac9b5..c9b9fe64f3 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -42,7 +42,7 @@ fn main() { .declare_publisher("test/thr") .congestion_control(CongestionControl::Block) .priority(prio) - .express(args.express) + .is_express(args.express) .res() .unwrap(); diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index 8a85f14caa..74da23679f 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -34,7 +34,7 @@ use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, Reply}; use zenoh::runtime::Runtime; -use zenoh::sample_builder::PutSampleBuilderTrait; +use zenoh::sample_builder::ValueBuilderTrait; use zenoh::selector::TIME_RANGE_KEY; use zenoh::Session; use zenoh_plugin_trait::{plugin_long_version, plugin_version, Plugin, PluginControl}; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index 691fabd7a7..e5c4840666 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -20,8 +20,8 @@ use std::str; use std::str::FromStr; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::PutSampleBuilderTrait; -use zenoh::sample_builder::SampleBuilderTrait; +use zenoh::sample_builder::TimestampBuilderTrait; +use zenoh::sample_builder::ValueBuilderTrait; use zenoh::time::Timestamp; use zenoh::Session; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index a899196e7e..4119a941e5 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -21,7 +21,7 @@ use std::str; use zenoh::key_expr::{KeyExpr, OwnedKeyExpr}; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::{PutSampleBuilder, PutSampleBuilderTrait, SampleBuilderTrait}; +use zenoh::sample_builder::{PutSampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; use zenoh::time::Timestamp; use zenoh::Session; use zenoh_core::{AsyncResolve, SyncResolve}; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 04a707bfda..69c973de39 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -27,7 +27,7 @@ use zenoh::key_expr::KeyExpr; use zenoh::query::{ConsolidationMode, QueryTarget}; use zenoh::sample::{Sample, SampleKind}; use zenoh::sample_builder::{ - DeleteSampleBuilder, PutSampleBuilder, PutSampleBuilderTrait, SampleBuilder, SampleBuilderTrait, + DeleteSampleBuilder, PutSampleBuilder, SampleBuilder, TimestampBuilderTrait, ValueBuilderTrait, }; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::value::Value; @@ -719,7 +719,7 @@ fn construct_update(data: String) -> Update { for slice in result.3 { payload.push_zslice(slice.to_vec().into()); } - let value = Value::new(payload).with_encoding(result.2); + let value = Value::new(payload).with_encoding(result.2.into()); let data = StoredData { value, timestamp: Timestamp::from_str(&result.1).unwrap(), // @TODO: remove the unwrap() diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 5c302840b8..e6b269cfbd 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -20,7 +20,7 @@ use std::time::Duration; use zenoh::handlers::{locked, DefaultHandler}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, QueryTarget, ReplyKeyExpr}; -use zenoh::sample_builder::{SampleBuilder, SampleBuilderTrait}; +use zenoh::sample_builder::{SampleBuilder, TimestampBuilderTrait}; use zenoh::subscriber::{Reliability, Subscriber}; use zenoh::time::{new_reception_timestamp, Timestamp}; use zenoh::Result as ZResult; diff --git a/zenoh/src/liveliness.rs b/zenoh/src/liveliness.rs index 425aa62592..6aac3d3908 100644 --- a/zenoh/src/liveliness.rs +++ b/zenoh/src/liveliness.rs @@ -15,6 +15,8 @@ //! Liveliness primitives. //! //! see [`Liveliness`] +use zenoh_protocol::network::request; + use crate::{query::Reply, Id}; #[zenoh_macros::unstable] @@ -740,18 +742,19 @@ where { fn res_sync(self) -> ::To { let (callback, receiver) = self.handler.into_handler(); - self.session .query( &self.key_expr?.into(), &Some(KeyExpr::from(*KE_PREFIX_LIVELINESS)), QueryTarget::DEFAULT, QueryConsolidation::DEFAULT, + request::ext::QoSType::REQUEST.into(), Locality::default(), self.timeout, None, #[cfg(feature = "unstable")] None, + SourceInfo::empty(), callback, ) .map(|_| receiver) diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index 2a2b318cde..9047e8b112 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -20,7 +20,7 @@ use crate::plugins::sealed::{self as plugins}; use crate::prelude::sync::SyncResolve; use crate::queryable::Query; use crate::queryable::QueryInner; -use crate::sample_builder::PutSampleBuilderTrait; +use crate::sample_builder::ValueBuilderTrait; use crate::value::Value; use async_std::task; use log::{error, trace}; @@ -426,7 +426,7 @@ impl Primitives for AdminSpace { parameters, value: query .ext_body - .map(|b| Value::from(b.payload).with_encoding(b.encoding)), + .map(|b| Value::from(b.payload).with_encoding(b.encoding.into())), qid: msg.id, zid, primitives, diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 8772319593..81a12133ed 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -19,7 +19,7 @@ use crate::prelude::*; use crate::sample::Attachment; use crate::sample::{DataInfo, QoS, Sample, SampleFields, SampleKind}; use crate::sample_builder::{ - DeleteSampleBuilderTrait, PutSampleBuilderTrait, QoSBuilderTrait, SampleBuilderTrait, + QoSBuilderTrait, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, }; use crate::SessionRef; use crate::Undeclarable; @@ -114,9 +114,9 @@ impl QoSBuilderTrait for PutBuilder<'_, '_> { } } #[inline] - fn express(self, is_express: bool) -> Self { + fn is_express(self, is_express: bool) -> Self { Self { - publisher: self.publisher.express(is_express), + publisher: self.publisher.is_express(is_express), ..self } } @@ -138,15 +138,15 @@ impl QoSBuilderTrait for DeleteBuilder<'_, '_> { } } #[inline] - fn express(self, is_express: bool) -> Self { + fn is_express(self, is_express: bool) -> Self { Self { - publisher: self.publisher.express(is_express), + publisher: self.publisher.is_express(is_express), ..self } } } -impl SampleBuilderTrait for PutBuilder<'_, '_> { +impl TimestampBuilderTrait for PutBuilder<'_, '_> { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { timestamp, ..self } } @@ -156,6 +156,9 @@ impl SampleBuilderTrait for PutBuilder<'_, '_> { ..self } } +} + +impl SampleBuilderTrait for PutBuilder<'_, '_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { @@ -176,7 +179,7 @@ impl SampleBuilderTrait for PutBuilder<'_, '_> { } } -impl SampleBuilderTrait for DeleteBuilder<'_, '_> { +impl TimestampBuilderTrait for DeleteBuilder<'_, '_> { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { timestamp, ..self } } @@ -186,6 +189,9 @@ impl SampleBuilderTrait for DeleteBuilder<'_, '_> { ..self } } +} + +impl SampleBuilderTrait for DeleteBuilder<'_, '_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { @@ -206,7 +212,7 @@ impl SampleBuilderTrait for DeleteBuilder<'_, '_> { } } -impl PutSampleBuilderTrait for PutBuilder<'_, '_> { +impl ValueBuilderTrait for PutBuilder<'_, '_> { fn with_encoding(self, encoding: Encoding) -> Self { Self { encoding, ..self } } @@ -222,8 +228,6 @@ impl PutSampleBuilderTrait for PutBuilder<'_, '_> { } } -impl DeleteSampleBuilderTrait for DeleteBuilder<'_, '_> {} - impl PutBuilder<'_, '_> { /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). @@ -761,7 +765,7 @@ pub struct DeletePublication<'a> { pub(crate) attachment: Option, } -impl SampleBuilderTrait for PutPublication<'_> { +impl TimestampBuilderTrait for PutPublication<'_> { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { timestamp, ..self } } @@ -772,7 +776,9 @@ impl SampleBuilderTrait for PutPublication<'_> { ..self } } +} +impl SampleBuilderTrait for PutPublication<'_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { @@ -795,7 +801,7 @@ impl SampleBuilderTrait for PutPublication<'_> { } } -impl PutSampleBuilderTrait for PutPublication<'_> { +impl ValueBuilderTrait for PutPublication<'_> { fn with_encoding(self, encoding: Encoding) -> Self { Self { encoding, ..self } } @@ -811,7 +817,7 @@ impl PutSampleBuilderTrait for PutPublication<'_> { } } -impl SampleBuilderTrait for DeletePublication<'_> { +impl TimestampBuilderTrait for DeletePublication<'_> { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { timestamp, ..self } } @@ -822,7 +828,9 @@ impl SampleBuilderTrait for DeletePublication<'_> { ..self } } +} +impl SampleBuilderTrait for DeletePublication<'_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { @@ -845,8 +853,6 @@ impl SampleBuilderTrait for DeletePublication<'_> { } } -impl DeleteSampleBuilderTrait for DeletePublication<'_> {} - impl Resolvable for PutPublication<'_> { type To = ZResult<()>; } @@ -1010,7 +1016,7 @@ impl QoSBuilderTrait for PublisherBuilder<'_, '_> { /// When express is set to `true`, then the message will not be batched. /// This usually has a positive impact on latency but negative impact on throughput. #[inline] - fn express(self, is_express: bool) -> Self { + fn is_express(self, is_express: bool) -> Self { Self { is_express, ..self } } } diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index fe48748ad4..6a0c4b1933 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -17,6 +17,8 @@ use crate::handlers::{locked, Callback, DefaultHandler}; use crate::prelude::*; #[zenoh_macros::unstable] use crate::sample::Attachment; +use crate::sample::QoSBuilder; +use crate::sample_builder::{QoSBuilderTrait, SampleBuilderTrait, ValueBuilderTrait}; use crate::Session; use std::collections::HashMap; use std::future::Ready; @@ -120,12 +122,70 @@ pub struct GetBuilder<'a, 'b, Handler> { pub(crate) scope: ZResult>>, pub(crate) target: QueryTarget, pub(crate) consolidation: QueryConsolidation, + pub(crate) qos: QoSBuilder, pub(crate) destination: Locality, pub(crate) timeout: Duration, pub(crate) handler: Handler, pub(crate) value: Option, #[cfg(feature = "unstable")] pub(crate) attachment: Option, + #[cfg(feature = "unstable")] + pub(crate) source_info: SourceInfo, +} + +impl SampleBuilderTrait for GetBuilder<'_, '_, Handler> { + #[cfg(feature = "unstable")] + fn with_source_info(self, source_info: SourceInfo) -> Self { + Self { + source_info, + ..self + } + } + + #[cfg(feature = "unstable")] + fn with_attachment_opt(self, attachment: Option) -> Self { + Self { attachment, ..self } + } + + #[cfg(feature = "unstable")] + fn with_attachment(self, attachment: Attachment) -> Self { + Self { + attachment: Some(attachment), + ..self + } + } +} + +impl QoSBuilderTrait for GetBuilder<'_, '_, DefaultHandler> { + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + let qos = self.qos.congestion_control(congestion_control); + Self { qos, ..self } + } + + fn priority(self, priority: Priority) -> Self { + let qos = self.qos.priority(priority); + Self { qos, ..self } + } + + fn is_express(self, is_express: bool) -> Self { + let qos = self.qos.is_express(is_express); + Self { qos, ..self } + } +} + +impl ValueBuilderTrait for GetBuilder<'_, '_, Handler> { + fn with_encoding(self, encoding: Encoding) -> Self { + let value = Some(self.value.unwrap_or_default().with_encoding(encoding)); + Self { value, ..self } + } + + fn with_payload(self, payload: IntoPayload) -> Self + where + IntoPayload: Into, + { + let value = Some(self.value.unwrap_or_default().with_payload(payload)); + Self { value, ..self } + } } impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { @@ -156,11 +216,14 @@ impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { scope, target, consolidation, + qos, destination, timeout, value, #[cfg(feature = "unstable")] attachment, + #[cfg(feature = "unstable")] + source_info, handler: _, } = self; GetBuilder { @@ -169,11 +232,14 @@ impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { scope, target, consolidation, + qos, destination, timeout, value, #[cfg(feature = "unstable")] attachment, + #[cfg(feature = "unstable")] + source_info, handler: callback, } } @@ -239,11 +305,14 @@ impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { scope, target, consolidation, + qos, destination, timeout, value, #[cfg(feature = "unstable")] attachment, + #[cfg(feature = "unstable")] + source_info, handler: _, } = self; GetBuilder { @@ -252,11 +321,14 @@ impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { scope, target, consolidation, + qos, destination, timeout, value, #[cfg(feature = "unstable")] attachment, + #[cfg(feature = "unstable")] + source_info, handler, } } @@ -315,29 +387,11 @@ impl<'a, 'b, Handler> GetBuilder<'a, 'b, Handler> { /// expressions that don't intersect with the query's. #[zenoh_macros::unstable] pub fn accept_replies(self, accept: ReplyKeyExpr) -> Self { - let Self { - session, - selector, - scope, - target, - consolidation, - destination, - timeout, - value, - attachment, - handler, - } = self; Self { - session, - selector: selector.and_then(|s| s.accept_any_keyexpr(accept == ReplyKeyExpr::Any)), - scope, - target, - consolidation, - destination, - timeout, - value, - attachment, - handler, + selector: self + .selector + .and_then(|s| s.accept_any_keyexpr(accept == ReplyKeyExpr::Any)), + ..self } } } @@ -382,11 +436,13 @@ where &self.scope?, self.target, self.consolidation, + self.qos.into(), self.destination, self.timeout, self.value, #[cfg(feature = "unstable")] self.attachment, + self.source_info, callback, ) .map(|_| receiver) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 14e9d09068..a9b469a340 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -20,8 +20,8 @@ use crate::net::primitives::Primitives; use crate::prelude::*; use crate::sample::SourceInfo; use crate::sample_builder::{ - DeleteSampleBuilder, DeleteSampleBuilderTrait, PutSampleBuilder, PutSampleBuilderTrait, - QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, + DeleteSampleBuilder, PutSampleBuilder, QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, + TimestampBuilderTrait, ValueBuilderTrait, }; use crate::Id; use crate::SessionRef; @@ -238,7 +238,7 @@ impl<'a> ReplySampleBuilder<'a> { } } -impl SampleBuilderTrait for ReplySampleBuilder<'_> { +impl TimestampBuilderTrait for ReplySampleBuilder<'_> { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp_opt(timestamp), @@ -252,7 +252,9 @@ impl SampleBuilderTrait for ReplySampleBuilder<'_> { ..self } } +} +impl SampleBuilderTrait for ReplySampleBuilder<'_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { @@ -293,9 +295,9 @@ impl QoSBuilderTrait for ReplySampleBuilder<'_> { } } - fn express(self, is_express: bool) -> Self { + fn is_express(self, is_express: bool) -> Self { Self { - sample_builder: self.sample_builder.express(is_express), + sample_builder: self.sample_builder.is_express(is_express), ..self } } @@ -328,7 +330,7 @@ pub struct ReplyBuilder<'a> { sample_builder: PutSampleBuilder, } -impl SampleBuilderTrait for ReplyBuilder<'_> { +impl TimestampBuilderTrait for ReplyBuilder<'_> { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp_opt(timestamp), @@ -342,7 +344,9 @@ impl SampleBuilderTrait for ReplyBuilder<'_> { ..self } } +} +impl SampleBuilderTrait for ReplyBuilder<'_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { @@ -383,15 +387,15 @@ impl QoSBuilderTrait for ReplyBuilder<'_> { } } - fn express(self, is_express: bool) -> Self { + fn is_express(self, is_express: bool) -> Self { Self { - sample_builder: self.sample_builder.express(is_express), + sample_builder: self.sample_builder.is_express(is_express), ..self } } } -impl PutSampleBuilderTrait for ReplyBuilder<'_> { +impl ValueBuilderTrait for ReplyBuilder<'_> { fn with_encoding(self, encoding: Encoding) -> Self { Self { sample_builder: self.sample_builder.with_encoding(encoding), @@ -418,7 +422,7 @@ pub struct ReplyDelBuilder<'a> { sample_builder: DeleteSampleBuilder, } -impl SampleBuilderTrait for ReplyDelBuilder<'_> { +impl TimestampBuilderTrait for ReplyDelBuilder<'_> { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp_opt(timestamp), @@ -432,7 +436,9 @@ impl SampleBuilderTrait for ReplyDelBuilder<'_> { ..self } } +} +impl SampleBuilderTrait for ReplyDelBuilder<'_> { #[cfg(feature = "unstable")] fn with_source_info(self, source_info: SourceInfo) -> Self { Self { @@ -473,16 +479,14 @@ impl QoSBuilderTrait for ReplyDelBuilder<'_> { } } - fn express(self, is_express: bool) -> Self { + fn is_express(self, is_express: bool) -> Self { Self { - sample_builder: self.sample_builder.express(is_express), + sample_builder: self.sample_builder.is_express(is_express), ..self } } } -impl DeleteSampleBuilderTrait for ReplyDelBuilder<'_> {} - /// A builder returned by [`Query::reply_err()`](Query::reply_err). #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 1998f3e844..2dbeebe717 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -16,13 +16,16 @@ use crate::encoding::Encoding; use crate::payload::Payload; use crate::prelude::{KeyExpr, Value}; +use crate::sample_builder::{QoSBuilderTrait, ValueBuilderTrait}; use crate::time::Timestamp; use crate::Priority; #[zenoh_macros::unstable] use serde::Serialize; use std::{convert::TryFrom, fmt}; +use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; use zenoh_protocol::core::EntityGlobalId; -use zenoh_protocol::{core::CongestionControl, network::push::ext::QoSType, zenoh}; +use zenoh_protocol::network::declare::ext::QoSType; +use zenoh_protocol::{core::CongestionControl, zenoh}; pub type SourceSn = u64; @@ -566,6 +569,58 @@ pub struct QoS { inner: QoSType, } +#[derive(Debug)] +pub struct QoSBuilder(QoS); + +impl From for QoSBuilder { + fn from(qos: QoS) -> Self { + QoSBuilder(qos) + } +} + +impl From for QoS { + fn from(builder: QoSBuilder) -> Self { + builder.0 + } +} + +impl Resolvable for QoSBuilder { + type To = QoS; +} + +impl SyncResolve for QoSBuilder { + fn res_sync(self) -> ::To { + self.0 + } +} + +impl AsyncResolve for QoSBuilder { + type Future = futures::future::Ready; + fn res_async(self) -> Self::Future { + futures::future::ready(self.0) + } +} + +impl QoSBuilderTrait for QoSBuilder { + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + let mut inner = self.0.inner; + inner.set_congestion_control(congestion_control); + Self(QoS { inner }) + } + + fn priority(self, priority: Priority) -> Self { + let mut inner = self.0.inner; + inner.set_priority(priority.into()); + Self(QoS { inner }) + } + + fn is_express(self, is_express: bool) -> Self { + let mut inner = self.0.inner; + inner.set_is_express(is_express); + Self(QoS { inner }) + } +} + impl QoS { /// Gets priority of the message. pub fn priority(&self) -> Priority { @@ -590,24 +645,6 @@ impl QoS { pub fn express(&self) -> bool { self.inner.is_express() } - - /// Sets priority value. - pub fn with_priority(mut self, priority: Priority) -> Self { - self.inner.set_priority(priority.into()); - self - } - - /// Sets congestion control value. - pub fn with_congestion_control(mut self, congestion_control: CongestionControl) -> Self { - self.inner.set_congestion_control(congestion_control); - self - } - - /// Sets express flag vlaue. - pub fn with_express(mut self, is_express: bool) -> Self { - self.inner.set_is_express(is_express); - self - } } impl From for QoS { diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 7e38e84afd..5aca7ff1da 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -14,6 +14,7 @@ use crate::sample::Attachment; use crate::sample::QoS; +use crate::sample::QoSBuilder; use crate::sample::SourceInfo; use crate::Encoding; use crate::KeyExpr; @@ -36,14 +37,17 @@ pub trait QoSBuilderTrait { /// Change the `express` policy to apply when routing the data. /// When express is set to `true`, then the message will not be batched. /// This usually has a positive impact on latency but negative impact on throughput. - fn express(self, is_express: bool) -> Self; + fn is_express(self, is_express: bool) -> Self; } -pub trait SampleBuilderTrait { +pub trait TimestampBuilderTrait { /// Sets of clears timestamp fn with_timestamp_opt(self, timestamp: Option) -> Self; /// Sets timestamp fn with_timestamp(self, timestamp: Timestamp) -> Self; +} + +pub trait SampleBuilderTrait { /// Attach source information #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self; @@ -55,16 +59,15 @@ pub trait SampleBuilderTrait { fn with_attachment(self, attachment: Attachment) -> Self; } -pub trait PutSampleBuilderTrait: SampleBuilderTrait { +pub trait ValueBuilderTrait { /// Set the [`Encoding`] fn with_encoding(self, encoding: Encoding) -> Self; + /// Sets the payload fn with_payload(self, payload: IntoPayload) -> Self where IntoPayload: Into; } -pub trait DeleteSampleBuilderTrait: SampleBuilderTrait {} - #[derive(Debug)] pub struct SampleBuilder(Sample); @@ -98,7 +101,7 @@ impl SampleBuilder { } } -impl SampleBuilderTrait for SampleBuilder { +impl TimestampBuilderTrait for SampleBuilder { fn with_timestamp_opt(self, timestamp: Option) -> Self { Self(Sample { timestamp, @@ -109,7 +112,9 @@ impl SampleBuilderTrait for SampleBuilder { fn with_timestamp(self, timestamp: Timestamp) -> Self { self.with_timestamp_opt(Some(timestamp)) } +} +impl SampleBuilderTrait for SampleBuilder { #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self { Self(Sample { @@ -134,22 +139,19 @@ impl SampleBuilderTrait for SampleBuilder { impl QoSBuilderTrait for SampleBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { - Self(Sample { - qos: self.0.qos.with_congestion_control(congestion_control), - ..self.0 - }) + let qos: QoSBuilder = self.0.qos.into(); + let qos = qos.congestion_control(congestion_control).res_sync(); + Self(Sample { qos, ..self.0 }) } fn priority(self, priority: Priority) -> Self { - Self(Sample { - qos: self.0.qos.with_priority(priority), - ..self.0 - }) + let qos: QoSBuilder = self.0.qos.into(); + let qos = qos.priority(priority).res_sync(); + Self(Sample { qos, ..self.0 }) } - fn express(self, is_express: bool) -> Self { - Self(Sample { - qos: self.0.qos.with_express(is_express), - ..self.0 - }) + fn is_express(self, is_express: bool) -> Self { + let qos: QoSBuilder = self.0.qos.into(); + let qos = qos.is_express(is_express).res_sync(); + Self(Sample { qos, ..self.0 }) } } @@ -197,13 +199,16 @@ impl PutSampleBuilder { } } -impl SampleBuilderTrait for PutSampleBuilder { +impl TimestampBuilderTrait for PutSampleBuilder { fn with_timestamp(self, timestamp: Timestamp) -> Self { Self(self.0.with_timestamp(timestamp)) } fn with_timestamp_opt(self, timestamp: Option) -> Self { Self(self.0.with_timestamp_opt(timestamp)) } +} + +impl SampleBuilderTrait for PutSampleBuilder { #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self { Self(self.0.with_source_info(source_info)) @@ -225,12 +230,12 @@ impl QoSBuilderTrait for PutSampleBuilder { fn priority(self, priority: Priority) -> Self { Self(self.0.priority(priority)) } - fn express(self, is_express: bool) -> Self { - Self(self.0.express(is_express)) + fn is_express(self, is_express: bool) -> Self { + Self(self.0.is_express(is_express)) } } -impl PutSampleBuilderTrait for PutSampleBuilder { +impl ValueBuilderTrait for PutSampleBuilder { fn with_encoding(self, encoding: Encoding) -> Self { Self(SampleBuilder(Sample { encoding, @@ -291,13 +296,16 @@ impl DeleteSampleBuilder { } } -impl SampleBuilderTrait for DeleteSampleBuilder { +impl TimestampBuilderTrait for DeleteSampleBuilder { fn with_timestamp(self, timestamp: Timestamp) -> Self { Self(self.0.with_timestamp(timestamp)) } fn with_timestamp_opt(self, timestamp: Option) -> Self { Self(self.0.with_timestamp_opt(timestamp)) } +} + +impl SampleBuilderTrait for DeleteSampleBuilder { #[zenoh_macros::unstable] fn with_source_info(self, source_info: SourceInfo) -> Self { Self(self.0.with_source_info(source_info)) @@ -319,13 +327,11 @@ impl QoSBuilderTrait for DeleteSampleBuilder { fn priority(self, priority: Priority) -> Self { Self(self.0.priority(priority)) } - fn express(self, is_express: bool) -> Self { - Self(self.0.express(is_express)) + fn is_express(self, is_express: bool) -> Self { + Self(self.0.is_express(is_express)) } } -impl DeleteSampleBuilderTrait for DeleteSampleBuilder {} - impl From for SampleBuilder { fn from(sample: Sample) -> Self { SampleBuilder(sample) diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index ffe7036050..5b80adb0e5 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -784,18 +784,21 @@ impl Session { let conf = self.runtime.config().lock(); Duration::from_millis(unwrap_or_default!(conf.queries_default_timeout())) }; + let qos: QoS = request::ext::QoSType::REQUEST.into(); GetBuilder { session: self, selector, scope: Ok(None), target: QueryTarget::DEFAULT, consolidation: QueryConsolidation::DEFAULT, + qos: qos.into(), destination: Locality::default(), timeout, value: None, #[cfg(feature = "unstable")] attachment: None, handler: DefaultHandler, + source_info: SourceInfo::empty(), } } } @@ -1567,10 +1570,12 @@ impl Session { scope: &Option>, target: QueryTarget, consolidation: QueryConsolidation, + qos: QoS, destination: Locality, timeout: Duration, value: Option, #[cfg(feature = "unstable")] attachment: Option, + #[cfg(feature = "unstable")] source: SourceInfo, callback: Callback<'static, Reply>, ) -> ZResult<()> { log::trace!("get({}, {:?}, {:?})", selector, target, consolidation); @@ -1649,7 +1654,7 @@ impl Session { primitives.send_request(Request { id: qid, wire_expr: wexpr.clone(), - ext_qos: request::ext::QoSType::REQUEST, + ext_qos: qos.into(), ext_tstamp: None, ext_nodeid: request::ext::NodeIdType::DEFAULT, ext_target: target, @@ -1658,7 +1663,7 @@ impl Session { payload: RequestBody::Query(zenoh_protocol::zenoh::Query { consolidation, parameters: selector.parameters().to_string(), - ext_sinfo: None, + ext_sinfo: source.into(), ext_body: value.as_ref().map(|v| query::ext::QueryBodyType { #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/zenoh/src/value.rs b/zenoh/src/value.rs index 128f0ff605..2d98cbf398 100644 --- a/zenoh/src/value.rs +++ b/zenoh/src/value.rs @@ -13,7 +13,7 @@ // //! Value primitives. -use crate::{encoding::Encoding, payload::Payload}; +use crate::{encoding::Encoding, payload::Payload, sample_builder::ValueBuilderTrait}; /// A zenoh [`Value`] contains a `payload` and an [`Encoding`] that indicates how the [`Payload`] should be interpreted. #[non_exhaustive] @@ -36,7 +36,6 @@ impl Value { encoding: Encoding::default(), } } - /// Creates an empty [`Value`]. pub const fn empty() -> Self { Value { @@ -44,15 +43,20 @@ impl Value { encoding: Encoding::default(), } } +} - /// Sets the encoding of this [`Value`]`. - #[inline(always)] - pub fn with_encoding(mut self, encoding: IntoEncoding) -> Self +impl ValueBuilderTrait for Value { + fn with_encoding(self, encoding: Encoding) -> Self { + Self { encoding, ..self } + } + fn with_payload(self, payload: IntoPayload) -> Self where - IntoEncoding: Into, + IntoPayload: Into, { - self.encoding = encoding.into(); - self + Self { + payload: payload.into(), + ..self + } } } @@ -67,3 +71,9 @@ where } } } + +impl Default for Value { + fn default() -> Self { + Value::empty() + } +} From 0bce160e13947dacb6ed110b571578a93b70b8ae Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 11:23:52 +0100 Subject: [PATCH 061/124] restored "express" name --- examples/examples/z_ping.rs | 2 +- examples/examples/z_pong.rs | 2 +- examples/examples/z_pub_thr.rs | 2 +- zenoh/src/publication.rs | 10 +++++----- zenoh/src/query.rs | 4 ++-- zenoh/src/queryable.rs | 12 ++++++------ zenoh/src/sample.rs | 2 +- zenoh/src/sample_builder.rs | 14 +++++++------- 8 files changed, 24 insertions(+), 24 deletions(-) diff --git a/examples/examples/z_ping.rs b/examples/examples/z_ping.rs index 79a1e16514..b40afc1f53 100644 --- a/examples/examples/z_ping.rs +++ b/examples/examples/z_ping.rs @@ -36,7 +36,7 @@ fn main() { let publisher = session .declare_publisher(key_expr_ping) .congestion_control(CongestionControl::Block) - .is_express(express) + .express(express) .res() .unwrap(); diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index a629cce3cf..0003958b5d 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -35,7 +35,7 @@ fn main() { let publisher = session .declare_publisher(key_expr_pong) .congestion_control(CongestionControl::Block) - .is_express(express) + .express(express) .res() .unwrap(); diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index c9b9fe64f3..7e7c1ac9b5 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -42,7 +42,7 @@ fn main() { .declare_publisher("test/thr") .congestion_control(CongestionControl::Block) .priority(prio) - .is_express(args.express) + .express(args.express) .res() .unwrap(); diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 81a12133ed..e60e40d295 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -114,9 +114,9 @@ impl QoSBuilderTrait for PutBuilder<'_, '_> { } } #[inline] - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { Self { - publisher: self.publisher.is_express(is_express), + publisher: self.publisher.express(is_express), ..self } } @@ -138,9 +138,9 @@ impl QoSBuilderTrait for DeleteBuilder<'_, '_> { } } #[inline] - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { Self { - publisher: self.publisher.is_express(is_express), + publisher: self.publisher.express(is_express), ..self } } @@ -1016,7 +1016,7 @@ impl QoSBuilderTrait for PublisherBuilder<'_, '_> { /// When express is set to `true`, then the message will not be batched. /// This usually has a positive impact on latency but negative impact on throughput. #[inline] - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { Self { is_express, ..self } } } diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index 6a0c4b1933..db17715a89 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -167,8 +167,8 @@ impl QoSBuilderTrait for GetBuilder<'_, '_, DefaultHandler> { Self { qos, ..self } } - fn is_express(self, is_express: bool) -> Self { - let qos = self.qos.is_express(is_express); + fn express(self, is_express: bool) -> Self { + let qos = self.qos.express(is_express); Self { qos, ..self } } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index a9b469a340..d9327415f5 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -295,9 +295,9 @@ impl QoSBuilderTrait for ReplySampleBuilder<'_> { } } - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { Self { - sample_builder: self.sample_builder.is_express(is_express), + sample_builder: self.sample_builder.express(is_express), ..self } } @@ -387,9 +387,9 @@ impl QoSBuilderTrait for ReplyBuilder<'_> { } } - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { Self { - sample_builder: self.sample_builder.is_express(is_express), + sample_builder: self.sample_builder.express(is_express), ..self } } @@ -479,9 +479,9 @@ impl QoSBuilderTrait for ReplyDelBuilder<'_> { } } - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { Self { - sample_builder: self.sample_builder.is_express(is_express), + sample_builder: self.sample_builder.express(is_express), ..self } } diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 2dbeebe717..d774e5e007 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -614,7 +614,7 @@ impl QoSBuilderTrait for QoSBuilder { Self(QoS { inner }) } - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { let mut inner = self.0.inner; inner.set_is_express(is_express); Self(QoS { inner }) diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 5aca7ff1da..b13bfce346 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -37,7 +37,7 @@ pub trait QoSBuilderTrait { /// Change the `express` policy to apply when routing the data. /// When express is set to `true`, then the message will not be batched. /// This usually has a positive impact on latency but negative impact on throughput. - fn is_express(self, is_express: bool) -> Self; + fn express(self, is_express: bool) -> Self; } pub trait TimestampBuilderTrait { @@ -148,9 +148,9 @@ impl QoSBuilderTrait for SampleBuilder { let qos = qos.priority(priority).res_sync(); Self(Sample { qos, ..self.0 }) } - fn is_express(self, is_express: bool) -> Self { + fn express(self, is_express: bool) -> Self { let qos: QoSBuilder = self.0.qos.into(); - let qos = qos.is_express(is_express).res_sync(); + let qos = qos.express(is_express).res_sync(); Self(Sample { qos, ..self.0 }) } } @@ -230,8 +230,8 @@ impl QoSBuilderTrait for PutSampleBuilder { fn priority(self, priority: Priority) -> Self { Self(self.0.priority(priority)) } - fn is_express(self, is_express: bool) -> Self { - Self(self.0.is_express(is_express)) + fn express(self, is_express: bool) -> Self { + Self(self.0.express(is_express)) } } @@ -327,8 +327,8 @@ impl QoSBuilderTrait for DeleteSampleBuilder { fn priority(self, priority: Priority) -> Self { Self(self.0.priority(priority)) } - fn is_express(self, is_express: bool) -> Self { - Self(self.0.is_express(is_express)) + fn express(self, is_express: bool) -> Self { + Self(self.0.express(is_express)) } } From 3620c3a7d057c312ff8354bffef40f79424aee80 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 11:32:11 +0100 Subject: [PATCH 062/124] removed 'timestamp_opt' --- zenoh/src/publication.rs | 34 ++++------------------------------ zenoh/src/queryable.rs | 27 +++------------------------ zenoh/src/sample_builder.rs | 20 ++++---------------- 3 files changed, 11 insertions(+), 70 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index e60e40d295..f8a42077b9 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -147,15 +147,9 @@ impl QoSBuilderTrait for DeleteBuilder<'_, '_> { } impl TimestampBuilderTrait for PutBuilder<'_, '_> { - fn with_timestamp_opt(self, timestamp: Option) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self { timestamp, ..self } } - fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { - Self { - timestamp: Some(timestamp), - ..self - } - } } impl SampleBuilderTrait for PutBuilder<'_, '_> { @@ -180,15 +174,9 @@ impl SampleBuilderTrait for PutBuilder<'_, '_> { } impl TimestampBuilderTrait for DeleteBuilder<'_, '_> { - fn with_timestamp_opt(self, timestamp: Option) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self { timestamp, ..self } } - fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { - Self { - timestamp: Some(timestamp), - ..self - } - } } impl SampleBuilderTrait for DeleteBuilder<'_, '_> { @@ -766,16 +754,9 @@ pub struct DeletePublication<'a> { } impl TimestampBuilderTrait for PutPublication<'_> { - fn with_timestamp_opt(self, timestamp: Option) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self { timestamp, ..self } } - - fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { - Self { - timestamp: Some(timestamp), - ..self - } - } } impl SampleBuilderTrait for PutPublication<'_> { @@ -818,16 +799,9 @@ impl ValueBuilderTrait for PutPublication<'_> { } impl TimestampBuilderTrait for DeletePublication<'_> { - fn with_timestamp_opt(self, timestamp: Option) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self { timestamp, ..self } } - - fn with_timestamp(self, timestamp: uhlc::Timestamp) -> Self { - Self { - timestamp: Some(timestamp), - ..self - } - } } impl SampleBuilderTrait for DeletePublication<'_> { diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index d9327415f5..625ae6f25f 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -239,14 +239,7 @@ impl<'a> ReplySampleBuilder<'a> { } impl TimestampBuilderTrait for ReplySampleBuilder<'_> { - fn with_timestamp_opt(self, timestamp: Option) -> Self { - Self { - sample_builder: self.sample_builder.with_timestamp_opt(timestamp), - ..self - } - } - - fn with_timestamp(self, timestamp: Timestamp) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp(timestamp), ..self @@ -331,14 +324,7 @@ pub struct ReplyBuilder<'a> { } impl TimestampBuilderTrait for ReplyBuilder<'_> { - fn with_timestamp_opt(self, timestamp: Option) -> Self { - Self { - sample_builder: self.sample_builder.with_timestamp_opt(timestamp), - ..self - } - } - - fn with_timestamp(self, timestamp: Timestamp) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp(timestamp), ..self @@ -423,14 +409,7 @@ pub struct ReplyDelBuilder<'a> { } impl TimestampBuilderTrait for ReplyDelBuilder<'_> { - fn with_timestamp_opt(self, timestamp: Option) -> Self { - Self { - sample_builder: self.sample_builder.with_timestamp_opt(timestamp), - ..self - } - } - - fn with_timestamp(self, timestamp: Timestamp) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self { sample_builder: self.sample_builder.with_timestamp(timestamp), ..self diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index b13bfce346..990586ca0f 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -42,9 +42,7 @@ pub trait QoSBuilderTrait { pub trait TimestampBuilderTrait { /// Sets of clears timestamp - fn with_timestamp_opt(self, timestamp: Option) -> Self; - /// Sets timestamp - fn with_timestamp(self, timestamp: Timestamp) -> Self; + fn with_timestamp(self, timestamp: Option) -> Self; } pub trait SampleBuilderTrait { @@ -102,16 +100,12 @@ impl SampleBuilder { } impl TimestampBuilderTrait for SampleBuilder { - fn with_timestamp_opt(self, timestamp: Option) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self(Sample { timestamp, ..self.0 }) } - - fn with_timestamp(self, timestamp: Timestamp) -> Self { - self.with_timestamp_opt(Some(timestamp)) - } } impl SampleBuilderTrait for SampleBuilder { @@ -200,12 +194,9 @@ impl PutSampleBuilder { } impl TimestampBuilderTrait for PutSampleBuilder { - fn with_timestamp(self, timestamp: Timestamp) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self(self.0.with_timestamp(timestamp)) } - fn with_timestamp_opt(self, timestamp: Option) -> Self { - Self(self.0.with_timestamp_opt(timestamp)) - } } impl SampleBuilderTrait for PutSampleBuilder { @@ -297,12 +288,9 @@ impl DeleteSampleBuilder { } impl TimestampBuilderTrait for DeleteSampleBuilder { - fn with_timestamp(self, timestamp: Timestamp) -> Self { + fn with_timestamp(self, timestamp: Option) -> Self { Self(self.0.with_timestamp(timestamp)) } - fn with_timestamp_opt(self, timestamp: Option) -> Self { - Self(self.0.with_timestamp_opt(timestamp)) - } } impl SampleBuilderTrait for DeleteSampleBuilder { From aafd2a4761b8b4df5089d19ef74f71bfe28aa644 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 13:10:32 +0100 Subject: [PATCH 063/124] with removed, into> added --- examples/examples/z_pub.rs | 2 +- .../src/replica/align_queryable.rs | 2 +- .../src/replica/aligner.rs | 2 +- .../src/replica/storage.rs | 10 +-- zenoh-ext/src/querying_subscriber.rs | 2 +- zenoh/src/publication.rs | 70 +++++++++---------- zenoh/src/query.rs | 11 +-- zenoh/src/queryable.rs | 60 +++++----------- zenoh/src/sample.rs | 11 +++ zenoh/src/sample_builder.rs | 58 ++++++--------- zenoh/tests/attachments.rs | 10 +-- 11 files changed, 99 insertions(+), 139 deletions(-) diff --git a/examples/examples/z_pub.rs b/examples/examples/z_pub.rs index d22d4d55ee..416ff31f46 100644 --- a/examples/examples/z_pub.rs +++ b/examples/examples/z_pub.rs @@ -39,7 +39,7 @@ async fn main() { println!("Putting Data ('{}': '{}')...", &key_expr, buf); let mut put = publisher.put(buf); if let Some(attachment) = &attachment { - put = put.with_attachment( + put = put.attachment( attachment .split('&') .map(|pair| split_once(pair, '=')) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index e5c4840666..973fb89abe 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -130,7 +130,7 @@ impl AlignQueryable { query .reply(k, v.payload) .with_encoding(v.encoding) - .with_timestamp(ts) + .timestamp(ts) .res() .await .unwrap(); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 4119a941e5..9d5257e53f 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -112,7 +112,7 @@ impl Aligner { } = value; let sample = PutSampleBuilder::new(key, payload) .with_encoding(encoding) - .with_timestamp(ts) + .timestamp(ts) .res_sync(); log::debug!("[ALIGNER] Adding {:?} to storage", sample); self.tx_sample.send_async(sample).await.unwrap_or_else(|e| { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 69c973de39..de76ade51d 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -239,7 +239,7 @@ impl StorageService { } }; let timestamp = sample.timestamp().cloned().unwrap_or(new_reception_timestamp()); - let sample = SampleBuilder::from(sample).with_timestamp(timestamp).res_sync(); + let sample = SampleBuilder::from(sample).timestamp(timestamp).res_sync(); self.process_sample(sample).await; }, // on query on key_expr @@ -316,14 +316,14 @@ impl StorageService { } = data.value; PutSampleBuilder::new(KeyExpr::from(k.clone()), payload) .with_encoding(encoding) - .with_timestamp(data.timestamp) + .timestamp(data.timestamp) .res_sync() } Some(Update { kind: SampleKind::Delete, data, }) => DeleteSampleBuilder::new(KeyExpr::from(k.clone())) - .with_timestamp(data.timestamp) + .timestamp(data.timestamp) .res_sync(), None => SampleBuilder::from(sample.clone()) .keyexpr(k.clone()) @@ -533,7 +533,7 @@ impl StorageService { if let Err(e) = q .reply(key.clone(), payload) .with_encoding(encoding) - .with_timestamp(entry.timestamp) + .timestamp(entry.timestamp) .res_async() .await { @@ -568,7 +568,7 @@ impl StorageService { if let Err(e) = q .reply(q.key_expr().clone(), payload) .with_encoding(encoding) - .with_timestamp(entry.timestamp) + .timestamp(entry.timestamp) .res_async() .await { diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index e6b269cfbd..52a4263396 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -665,7 +665,7 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { // ensure the sample has a timestamp, thus it will always be sorted into the MergeQueue // after any timestamped Sample possibly coming from a fetch reply. let timestamp = s.timestamp().cloned().unwrap_or(new_reception_timestamp()); - let s = SampleBuilder::from(s).with_timestamp(timestamp).res_sync(); + let s = SampleBuilder::from(s).timestamp(timestamp).res_sync(); state.merge_queue.push(s); } } diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index f8a42077b9..cd68530bf7 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -147,54 +147,52 @@ impl QoSBuilderTrait for DeleteBuilder<'_, '_> { } impl TimestampBuilderTrait for PutBuilder<'_, '_> { - fn with_timestamp(self, timestamp: Option) -> Self { - Self { timestamp, ..self } + fn timestamp>>(self, timestamp: T) -> Self { + Self { + timestamp: timestamp.into(), + ..self + } } } impl SampleBuilderTrait for PutBuilder<'_, '_> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { source_info, ..self } } #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { - Self { attachment, ..self } - } - #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - attachment: Some(attachment), + attachment: attachment.into(), ..self } } } impl TimestampBuilderTrait for DeleteBuilder<'_, '_> { - fn with_timestamp(self, timestamp: Option) -> Self { - Self { timestamp, ..self } + fn timestamp>>(self, timestamp: T) -> Self { + Self { + timestamp: timestamp.into(), + ..self + } } } impl SampleBuilderTrait for DeleteBuilder<'_, '_> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { source_info, ..self } } #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { - Self { attachment, ..self } - } - #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - attachment: Some(attachment), + attachment: attachment.into(), ..self } } @@ -754,14 +752,17 @@ pub struct DeletePublication<'a> { } impl TimestampBuilderTrait for PutPublication<'_> { - fn with_timestamp(self, timestamp: Option) -> Self { - Self { timestamp, ..self } + fn timestamp>>(self, timestamp: T) -> Self { + Self { + timestamp: timestamp.into(), + ..self + } } } impl SampleBuilderTrait for PutPublication<'_> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { source_info, ..self @@ -769,14 +770,9 @@ impl SampleBuilderTrait for PutPublication<'_> { } #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { - Self { attachment, ..self } - } - - #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - attachment: Some(attachment), + attachment: attachment.into(), ..self } } @@ -799,14 +795,17 @@ impl ValueBuilderTrait for PutPublication<'_> { } impl TimestampBuilderTrait for DeletePublication<'_> { - fn with_timestamp(self, timestamp: Option) -> Self { - Self { timestamp, ..self } + fn timestamp>>(self, timestamp: T) -> Self { + Self { + timestamp: timestamp.into(), + ..self + } } } impl SampleBuilderTrait for DeletePublication<'_> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { source_info, ..self @@ -814,14 +813,9 @@ impl SampleBuilderTrait for DeletePublication<'_> { } #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { - Self { attachment, ..self } - } - - #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - attachment: Some(attachment), + attachment: attachment.into(), ..self } } diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index db17715a89..2d4e5e1ee3 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -135,7 +135,7 @@ pub struct GetBuilder<'a, 'b, Handler> { impl SampleBuilderTrait for GetBuilder<'_, '_, Handler> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { source_info, ..self @@ -143,14 +143,9 @@ impl SampleBuilderTrait for GetBuilder<'_, '_, Handler> { } #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { - Self { attachment, ..self } - } - - #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - attachment: Some(attachment), + attachment: attachment.into(), ..self } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 625ae6f25f..66cb34459b 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -239,9 +239,9 @@ impl<'a> ReplySampleBuilder<'a> { } impl TimestampBuilderTrait for ReplySampleBuilder<'_> { - fn with_timestamp(self, timestamp: Option) -> Self { + fn timestamp>>(self, timestamp: T) -> Self { Self { - sample_builder: self.sample_builder.with_timestamp(timestamp), + sample_builder: self.sample_builder.timestamp(timestamp), ..self } } @@ -249,25 +249,17 @@ impl TimestampBuilderTrait for ReplySampleBuilder<'_> { impl SampleBuilderTrait for ReplySampleBuilder<'_> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { - sample_builder: self.sample_builder.with_source_info(source_info), + sample_builder: self.sample_builder.source_info(source_info), ..self } } #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - sample_builder: self.sample_builder.with_attachment_opt(attachment), - ..self - } - } - - #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { - Self { - sample_builder: self.sample_builder.with_attachment(attachment), + sample_builder: self.sample_builder.attachment(attachment), ..self } } @@ -324,9 +316,9 @@ pub struct ReplyBuilder<'a> { } impl TimestampBuilderTrait for ReplyBuilder<'_> { - fn with_timestamp(self, timestamp: Option) -> Self { + fn timestamp>>(self, timestamp: T) -> Self { Self { - sample_builder: self.sample_builder.with_timestamp(timestamp), + sample_builder: self.sample_builder.timestamp(timestamp), ..self } } @@ -334,25 +326,17 @@ impl TimestampBuilderTrait for ReplyBuilder<'_> { impl SampleBuilderTrait for ReplyBuilder<'_> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { - Self { - sample_builder: self.sample_builder.with_source_info(source_info), - ..self - } - } - - #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { - sample_builder: self.sample_builder.with_attachment_opt(attachment), + sample_builder: self.sample_builder.source_info(source_info), ..self } } #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - sample_builder: self.sample_builder.with_attachment(attachment), + sample_builder: self.sample_builder.attachment(attachment), ..self } } @@ -409,9 +393,9 @@ pub struct ReplyDelBuilder<'a> { } impl TimestampBuilderTrait for ReplyDelBuilder<'_> { - fn with_timestamp(self, timestamp: Option) -> Self { + fn timestamp>>(self, timestamp: T) -> Self { Self { - sample_builder: self.sample_builder.with_timestamp(timestamp), + sample_builder: self.sample_builder.timestamp(timestamp), ..self } } @@ -419,25 +403,17 @@ impl TimestampBuilderTrait for ReplyDelBuilder<'_> { impl SampleBuilderTrait for ReplyDelBuilder<'_> { #[cfg(feature = "unstable")] - fn with_source_info(self, source_info: SourceInfo) -> Self { - Self { - sample_builder: self.sample_builder.with_source_info(source_info), - ..self - } - } - - #[cfg(feature = "unstable")] - fn with_attachment_opt(self, attachment: Option) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { - sample_builder: self.sample_builder.with_attachment_opt(attachment), + sample_builder: self.sample_builder.source_info(source_info), ..self } } #[cfg(feature = "unstable")] - fn with_attachment(self, attachment: Attachment) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self { - sample_builder: self.sample_builder.with_attachment(attachment), + sample_builder: self.sample_builder.attachment(attachment), ..self } } diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index d774e5e007..163ae2090a 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -263,6 +263,17 @@ mod attachment { } } } + #[zenoh_macros::unstable] + impl From for Option { + fn from(value: AttachmentBuilder) -> Self { + if value.inner.is_empty() { + None + } else { + Some(value.into()) + } + } + } + #[zenoh_macros::unstable] #[derive(Clone)] pub struct Attachment { diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 990586ca0f..2d7277506d 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -41,20 +41,17 @@ pub trait QoSBuilderTrait { } pub trait TimestampBuilderTrait { - /// Sets of clears timestamp - fn with_timestamp(self, timestamp: Option) -> Self; + /// Sets of clears timestamp + fn timestamp>>(self, timestamp: T) -> Self; } pub trait SampleBuilderTrait { /// Attach source information #[zenoh_macros::unstable] - fn with_source_info(self, source_info: SourceInfo) -> Self; - /// Attach or remove user-provided data in key-value format - #[zenoh_macros::unstable] - fn with_attachment_opt(self, attachment: Option) -> Self; + fn source_info(self, source_info: SourceInfo) -> Self; /// Attach user-provided data in key-value format #[zenoh_macros::unstable] - fn with_attachment(self, attachment: Attachment) -> Self; + fn attachment>>(self, attachment: T) -> Self; } pub trait ValueBuilderTrait { @@ -100,9 +97,9 @@ impl SampleBuilder { } impl TimestampBuilderTrait for SampleBuilder { - fn with_timestamp(self, timestamp: Option) -> Self { + fn timestamp>>(self, timestamp: T) -> Self { Self(Sample { - timestamp, + timestamp: timestamp.into(), ..self.0 }) } @@ -110,7 +107,7 @@ impl TimestampBuilderTrait for SampleBuilder { impl SampleBuilderTrait for SampleBuilder { #[zenoh_macros::unstable] - fn with_source_info(self, source_info: SourceInfo) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self(Sample { source_info, ..self.0 @@ -118,17 +115,12 @@ impl SampleBuilderTrait for SampleBuilder { } #[zenoh_macros::unstable] - fn with_attachment_opt(self, attachment: Option) -> Self { + fn attachment>>(self, attachment: T) -> Self { Self(Sample { - attachment, + attachment: attachment.into(), ..self.0 }) } - - #[zenoh_macros::unstable] - fn with_attachment(self, attachment: Attachment) -> Self { - self.with_attachment_opt(Some(attachment)) - } } impl QoSBuilderTrait for SampleBuilder { @@ -194,23 +186,19 @@ impl PutSampleBuilder { } impl TimestampBuilderTrait for PutSampleBuilder { - fn with_timestamp(self, timestamp: Option) -> Self { - Self(self.0.with_timestamp(timestamp)) + fn timestamp>>(self, timestamp: T) -> Self { + Self(self.0.timestamp(timestamp)) } } impl SampleBuilderTrait for PutSampleBuilder { #[zenoh_macros::unstable] - fn with_source_info(self, source_info: SourceInfo) -> Self { - Self(self.0.with_source_info(source_info)) + fn source_info(self, source_info: SourceInfo) -> Self { + Self(self.0.source_info(source_info)) } #[zenoh_macros::unstable] - fn with_attachment(self, attachment: Attachment) -> Self { - Self(self.0.with_attachment(attachment)) - } - #[zenoh_macros::unstable] - fn with_attachment_opt(self, attachment: Option) -> Self { - Self(self.0.with_attachment_opt(attachment)) + fn attachment>>(self, attachment: T) -> Self { + Self(self.0.attachment(attachment)) } } @@ -288,23 +276,19 @@ impl DeleteSampleBuilder { } impl TimestampBuilderTrait for DeleteSampleBuilder { - fn with_timestamp(self, timestamp: Option) -> Self { - Self(self.0.with_timestamp(timestamp)) + fn timestamp>>(self, timestamp: T) -> Self { + Self(self.0.timestamp(timestamp)) } } impl SampleBuilderTrait for DeleteSampleBuilder { #[zenoh_macros::unstable] - fn with_source_info(self, source_info: SourceInfo) -> Self { - Self(self.0.with_source_info(source_info)) - } - #[zenoh_macros::unstable] - fn with_attachment(self, attachment: Attachment) -> Self { - Self(self.0.with_attachment(attachment)) + fn source_info(self, source_info: SourceInfo) -> Self { + Self(self.0.source_info(source_info)) } #[zenoh_macros::unstable] - fn with_attachment_opt(self, attachment: Option) -> Self { - Self(self.0.with_attachment_opt(attachment)) + fn attachment>>(self, attachment: T) -> Self { + Self(self.0.attachment(attachment)) } } diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index ba4c8a7d7c..e87fc5243b 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -38,22 +38,22 @@ fn pubsub() { } zenoh .put("test/attachment", "put") - .with_attachment( + .attachment(Some( backer .iter() .map(|b| (b.0.as_slice(), b.1.as_slice())) .collect(), - ) + )) .res() .unwrap(); publisher .put("publisher") - .with_attachment( + .attachment(Some( backer .iter() .map(|b| (b.0.as_slice(), b.1.as_slice())) .collect(), - ) + )) .res() .unwrap(); } @@ -84,7 +84,7 @@ fn queries() { query.key_expr().clone(), query.value().unwrap().payload.clone(), ) - .with_attachment(attachment) + .attachment(attachment) .res() .unwrap(); }) From fb6509df61afccf4cd983e460553e9f07ce77d25 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 13:21:24 +0100 Subject: [PATCH 064/124] into to encoding returned --- .../src/replica/storage.rs | 2 +- zenoh/src/net/runtime/adminspace.rs | 2 +- zenoh/src/publication.rs | 14 ++++++++++---- zenoh/src/query.rs | 7 ++----- zenoh/src/queryable.rs | 7 ++----- zenoh/src/sample_builder.rs | 17 ++++++----------- zenoh/src/value.rs | 12 ++++++------ 7 files changed, 28 insertions(+), 33 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index de76ade51d..6d31c9710a 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -719,7 +719,7 @@ fn construct_update(data: String) -> Update { for slice in result.3 { payload.push_zslice(slice.to_vec().into()); } - let value = Value::new(payload).with_encoding(result.2.into()); + let value = Value::new(payload).with_encoding(result.2); let data = StoredData { value, timestamp: Timestamp::from_str(&result.1).unwrap(), // @TODO: remove the unwrap() diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index 9047e8b112..caeeb5c89b 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -426,7 +426,7 @@ impl Primitives for AdminSpace { parameters, value: query .ext_body - .map(|b| Value::from(b.payload).with_encoding(b.encoding.into())), + .map(|b| Value::from(b.payload).with_encoding(b.encoding)), qid: msg.id, zid, primitives, diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index cd68530bf7..0e93350222 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -199,8 +199,11 @@ impl SampleBuilderTrait for DeleteBuilder<'_, '_> { } impl ValueBuilderTrait for PutBuilder<'_, '_> { - fn with_encoding(self, encoding: Encoding) -> Self { - Self { encoding, ..self } + fn with_encoding>(self, encoding: T) -> Self { + Self { + encoding: encoding.into(), + ..self + } } fn with_payload(self, payload: IntoPayload) -> Self @@ -779,8 +782,11 @@ impl SampleBuilderTrait for PutPublication<'_> { } impl ValueBuilderTrait for PutPublication<'_> { - fn with_encoding(self, encoding: Encoding) -> Self { - Self { encoding, ..self } + fn with_encoding>(self, encoding: T) -> Self { + Self { + encoding: encoding.into(), + ..self + } } fn with_payload(self, payload: IntoPayload) -> Self diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index 2d4e5e1ee3..05f9a3557f 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -169,15 +169,12 @@ impl QoSBuilderTrait for GetBuilder<'_, '_, DefaultHandler> { } impl ValueBuilderTrait for GetBuilder<'_, '_, Handler> { - fn with_encoding(self, encoding: Encoding) -> Self { + fn with_encoding>(self, encoding: T) -> Self { let value = Some(self.value.unwrap_or_default().with_encoding(encoding)); Self { value, ..self } } - fn with_payload(self, payload: IntoPayload) -> Self - where - IntoPayload: Into, - { + fn with_payload>(self, payload: T) -> Self { let value = Some(self.value.unwrap_or_default().with_payload(payload)); Self { value, ..self } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 66cb34459b..4f478e1ce7 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -366,17 +366,14 @@ impl QoSBuilderTrait for ReplyBuilder<'_> { } impl ValueBuilderTrait for ReplyBuilder<'_> { - fn with_encoding(self, encoding: Encoding) -> Self { + fn with_encoding>(self, encoding: T) -> Self { Self { sample_builder: self.sample_builder.with_encoding(encoding), ..self } } - fn with_payload(self, payload: IntoPayload) -> Self - where - IntoPayload: Into, - { + fn with_payload>(self, payload: T) -> Self { Self { sample_builder: self.sample_builder.with_payload(payload), ..self diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 2d7277506d..a113a9c953 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -41,7 +41,7 @@ pub trait QoSBuilderTrait { } pub trait TimestampBuilderTrait { - /// Sets of clears timestamp + /// Sets of clears timestamp fn timestamp>>(self, timestamp: T) -> Self; } @@ -56,11 +56,9 @@ pub trait SampleBuilderTrait { pub trait ValueBuilderTrait { /// Set the [`Encoding`] - fn with_encoding(self, encoding: Encoding) -> Self; + fn with_encoding>(self, encoding: T) -> Self; /// Sets the payload - fn with_payload(self, payload: IntoPayload) -> Self - where - IntoPayload: Into; + fn with_payload>(self, payload: T) -> Self; } #[derive(Debug)] @@ -215,16 +213,13 @@ impl QoSBuilderTrait for PutSampleBuilder { } impl ValueBuilderTrait for PutSampleBuilder { - fn with_encoding(self, encoding: Encoding) -> Self { + fn with_encoding>(self, encoding: T) -> Self { Self(SampleBuilder(Sample { - encoding, + encoding: encoding.into(), ..self.0 .0 })) } - fn with_payload(self, payload: IntoPayload) -> Self - where - IntoPayload: Into, - { + fn with_payload>(self, payload: T) -> Self { Self(SampleBuilder(Sample { payload: payload.into(), ..self.0 .0 diff --git a/zenoh/src/value.rs b/zenoh/src/value.rs index 2d98cbf398..2e288c64ad 100644 --- a/zenoh/src/value.rs +++ b/zenoh/src/value.rs @@ -46,13 +46,13 @@ impl Value { } impl ValueBuilderTrait for Value { - fn with_encoding(self, encoding: Encoding) -> Self { - Self { encoding, ..self } + fn with_encoding>(self, encoding: T) -> Self { + Self { + encoding: encoding.into(), + ..self + } } - fn with_payload(self, payload: IntoPayload) -> Self - where - IntoPayload: Into, - { + fn with_payload>(self, payload: T) -> Self { Self { payload: payload.into(), ..self From 2ff6bc22f79f5ab373e1073ae5be1744b646ab49 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 13:23:17 +0100 Subject: [PATCH 065/124] example build fix --- examples/examples/z_pub.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/examples/z_pub.rs b/examples/examples/z_pub.rs index 416ff31f46..7166981e72 100644 --- a/examples/examples/z_pub.rs +++ b/examples/examples/z_pub.rs @@ -39,12 +39,12 @@ async fn main() { println!("Putting Data ('{}': '{}')...", &key_expr, buf); let mut put = publisher.put(buf); if let Some(attachment) = &attachment { - put = put.attachment( + put = put.attachment(Some( attachment .split('&') .map(|pair| split_once(pair, '=')) .collect(), - ) + )) } put.res().await.unwrap(); } From 5bbef9c7d4643259a23cde58a20ea08f4a8a464f Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 13:24:50 +0100 Subject: [PATCH 066/124] with removed --- plugins/zenoh-plugin-rest/src/lib.rs | 10 ++-------- .../src/replica/align_queryable.rs | 2 +- .../src/replica/aligner.rs | 2 +- .../src/replica/storage.rs | 10 +++++----- zenoh/src/net/runtime/adminspace.rs | 4 ++-- zenoh/src/publication.rs | 8 ++++---- zenoh/src/query.rs | 8 ++++---- zenoh/src/queryable.rs | 10 +++++----- zenoh/src/sample.rs | 2 +- zenoh/src/sample_builder.rs | 8 ++++---- zenoh/src/value.rs | 4 ++-- 11 files changed, 31 insertions(+), 37 deletions(-) diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index 74da23679f..c90bbe5ac1 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -420,7 +420,7 @@ async fn query(mut req: Request<(Arc, String)>) -> tide::Result { @@ -464,13 +464,7 @@ async fn write(mut req: Request<(Arc, String)>) -> tide::Result { - session - .put(&key_expr, bytes) - .with_encoding(encoding) - .res() - .await - } + SampleKind::Put => session.put(&key_expr, bytes).encoding(encoding).res().await, SampleKind::Delete => session.delete(&key_expr).res().await, }; match res { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index 973fb89abe..b2d2bdc399 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -129,7 +129,7 @@ impl AlignQueryable { AlignData::Data(k, (v, ts)) => { query .reply(k, v.payload) - .with_encoding(v.encoding) + .encoding(v.encoding) .timestamp(ts) .res() .await diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 9d5257e53f..6527d54c66 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -111,7 +111,7 @@ impl Aligner { payload, encoding, .. } = value; let sample = PutSampleBuilder::new(key, payload) - .with_encoding(encoding) + .encoding(encoding) .timestamp(ts) .res_sync(); log::debug!("[ALIGNER] Adding {:?} to storage", sample); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 6d31c9710a..8e60ee320e 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -315,7 +315,7 @@ impl StorageService { payload, encoding, .. } = data.value; PutSampleBuilder::new(KeyExpr::from(k.clone()), payload) - .with_encoding(encoding) + .encoding(encoding) .timestamp(data.timestamp) .res_sync() } @@ -344,7 +344,7 @@ impl StorageService { .put( stripped_key, Value::new(sample_to_store.payload().clone()) - .with_encoding(sample_to_store.encoding().clone()), + .encoding(sample_to_store.encoding().clone()), *sample_to_store.timestamp().unwrap(), ) .await @@ -532,7 +532,7 @@ impl StorageService { } = entry.value; if let Err(e) = q .reply(key.clone(), payload) - .with_encoding(encoding) + .encoding(encoding) .timestamp(entry.timestamp) .res_async() .await @@ -567,7 +567,7 @@ impl StorageService { } = entry.value; if let Err(e) = q .reply(q.key_expr().clone(), payload) - .with_encoding(encoding) + .encoding(encoding) .timestamp(entry.timestamp) .res_async() .await @@ -719,7 +719,7 @@ fn construct_update(data: String) -> Update { for slice in result.3 { payload.push_zslice(slice.to_vec().into()); } - let value = Value::new(payload).with_encoding(result.2); + let value = Value::new(payload).encoding(result.2); let data = StoredData { value, timestamp: Timestamp::from_str(&result.1).unwrap(), // @TODO: remove the unwrap() diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index caeeb5c89b..070b3bcd3a 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -426,7 +426,7 @@ impl Primitives for AdminSpace { parameters, value: query .ext_body - .map(|b| Value::from(b.payload).with_encoding(b.encoding)), + .map(|b| Value::from(b.payload).encoding(b.encoding)), qid: msg.id, zid, primitives, @@ -578,7 +578,7 @@ fn router_data(context: &AdminContext, query: Query) { }; if let Err(e) = query .reply(reply_key, payload) - .with_encoding(Encoding::APPLICATION_JSON) + .encoding(Encoding::APPLICATION_JSON) .res_sync() { log::error!("Error sending AdminSpace reply: {:?}", e); diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 0e93350222..8f52d5e4fa 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -199,14 +199,14 @@ impl SampleBuilderTrait for DeleteBuilder<'_, '_> { } impl ValueBuilderTrait for PutBuilder<'_, '_> { - fn with_encoding>(self, encoding: T) -> Self { + fn encoding>(self, encoding: T) -> Self { Self { encoding: encoding.into(), ..self } } - fn with_payload(self, payload: IntoPayload) -> Self + fn payload(self, payload: IntoPayload) -> Self where IntoPayload: Into, { @@ -782,14 +782,14 @@ impl SampleBuilderTrait for PutPublication<'_> { } impl ValueBuilderTrait for PutPublication<'_> { - fn with_encoding>(self, encoding: T) -> Self { + fn encoding>(self, encoding: T) -> Self { Self { encoding: encoding.into(), ..self } } - fn with_payload(self, payload: IntoPayload) -> Self + fn payload(self, payload: IntoPayload) -> Self where IntoPayload: Into, { diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index 05f9a3557f..837ed69f22 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -169,13 +169,13 @@ impl QoSBuilderTrait for GetBuilder<'_, '_, DefaultHandler> { } impl ValueBuilderTrait for GetBuilder<'_, '_, Handler> { - fn with_encoding>(self, encoding: T) -> Self { - let value = Some(self.value.unwrap_or_default().with_encoding(encoding)); + fn encoding>(self, encoding: T) -> Self { + let value = Some(self.value.unwrap_or_default().encoding(encoding)); Self { value, ..self } } - fn with_payload>(self, payload: T) -> Self { - let value = Some(self.value.unwrap_or_default().with_payload(payload)); + fn payload>(self, payload: T) -> Self { + let value = Some(self.value.unwrap_or_default().payload(payload)); Self { value, ..self } } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 4f478e1ce7..37f914d0e0 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -228,7 +228,7 @@ impl<'a> ReplySampleBuilder<'a> { query: self.query, sample_builder: self.sample_builder.into(), }; - builder.with_payload(payload) + builder.payload(payload) } pub fn delete(self) -> ReplyDelBuilder<'a> { ReplyDelBuilder { @@ -366,16 +366,16 @@ impl QoSBuilderTrait for ReplyBuilder<'_> { } impl ValueBuilderTrait for ReplyBuilder<'_> { - fn with_encoding>(self, encoding: T) -> Self { + fn encoding>(self, encoding: T) -> Self { Self { - sample_builder: self.sample_builder.with_encoding(encoding), + sample_builder: self.sample_builder.encoding(encoding), ..self } } - fn with_payload>(self, payload: T) -> Self { + fn payload>(self, payload: T) -> Self { Self { - sample_builder: self.sample_builder.with_payload(payload), + sample_builder: self.sample_builder.payload(payload), ..self } } diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 163ae2090a..813bc1c63e 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -570,7 +570,7 @@ impl Sample { impl From for Value { fn from(sample: Sample) -> Self { - Value::new(sample.payload).with_encoding(sample.encoding) + Value::new(sample.payload).encoding(sample.encoding) } } diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index a113a9c953..0996f17cf9 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -56,9 +56,9 @@ pub trait SampleBuilderTrait { pub trait ValueBuilderTrait { /// Set the [`Encoding`] - fn with_encoding>(self, encoding: T) -> Self; + fn encoding>(self, encoding: T) -> Self; /// Sets the payload - fn with_payload>(self, payload: T) -> Self; + fn payload>(self, payload: T) -> Self; } #[derive(Debug)] @@ -213,13 +213,13 @@ impl QoSBuilderTrait for PutSampleBuilder { } impl ValueBuilderTrait for PutSampleBuilder { - fn with_encoding>(self, encoding: T) -> Self { + fn encoding>(self, encoding: T) -> Self { Self(SampleBuilder(Sample { encoding: encoding.into(), ..self.0 .0 })) } - fn with_payload>(self, payload: T) -> Self { + fn payload>(self, payload: T) -> Self { Self(SampleBuilder(Sample { payload: payload.into(), ..self.0 .0 diff --git a/zenoh/src/value.rs b/zenoh/src/value.rs index 2e288c64ad..6d4de1366c 100644 --- a/zenoh/src/value.rs +++ b/zenoh/src/value.rs @@ -46,13 +46,13 @@ impl Value { } impl ValueBuilderTrait for Value { - fn with_encoding>(self, encoding: T) -> Self { + fn encoding>(self, encoding: T) -> Self { Self { encoding: encoding.into(), ..self } } - fn with_payload>(self, payload: T) -> Self { + fn payload>(self, payload: T) -> Self { Self { payload: payload.into(), ..self From 9809799b36a6210a9e1f2bbb5e5314540ddb0589 Mon Sep 17 00:00:00 2001 From: Alexander Date: Thu, 28 Mar 2024 13:25:01 +0100 Subject: [PATCH 067/124] Add protocol version to error message (#871) --- io/zenoh-transport/src/unicast/establishment/accept.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/io/zenoh-transport/src/unicast/establishment/accept.rs b/io/zenoh-transport/src/unicast/establishment/accept.rs index 72e676f6ec..7648f16e7d 100644 --- a/io/zenoh-transport/src/unicast/establishment/accept.rs +++ b/io/zenoh-transport/src/unicast/establishment/accept.rs @@ -167,9 +167,11 @@ impl<'a, 'b: 'a> AcceptFsm for &'a mut AcceptLink<'b> { // Check if the version is supported if init_syn.version != input.mine_version { let e = zerror!( - "Rejecting InitSyn on {} because of unsupported Zenoh version from peer: {}", + "Rejecting InitSyn on {} because of unsupported Zenoh protocol version (expected: {}, received: {}) from: {}", self.link, - init_syn.zid + input.mine_version, + init_syn.version, + init_syn.zid, ); return Err((e.into(), Some(close::reason::INVALID))); } From c427ac732861fd775f1b275ca7948719f16fbad5 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 13:42:52 +0100 Subject: [PATCH 068/124] resolvable removed from simple builders --- .../src/replica/aligner.rs | 5 +- .../src/replica/storage.rs | 29 ++++----- zenoh-ext/src/querying_subscriber.rs | 5 +- zenoh/src/queryable.rs | 9 +-- zenoh/src/sample.rs | 18 ------ zenoh/src/sample_builder.rs | 60 ++++--------------- 6 files changed, 32 insertions(+), 94 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 6527d54c66..3a6cc0444d 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -24,7 +24,6 @@ use zenoh::prelude::r#async::*; use zenoh::sample_builder::{PutSampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; use zenoh::time::Timestamp; use zenoh::Session; -use zenoh_core::{AsyncResolve, SyncResolve}; pub struct Aligner { session: Arc, @@ -113,7 +112,7 @@ impl Aligner { let sample = PutSampleBuilder::new(key, payload) .encoding(encoding) .timestamp(ts) - .res_sync(); + .into(); log::debug!("[ALIGNER] Adding {:?} to storage", sample); self.tx_sample.send_async(sample).await.unwrap_or_else(|e| { log::error!("[ALIGNER] Error adding sample to storage: {}", e) @@ -331,7 +330,7 @@ impl Aligner { .get(&selector) .consolidation(zenoh::query::ConsolidationMode::None) .accept_replies(zenoh::query::ReplyKeyExpr::Any) - .res_async() + .res() .await { Ok(replies) => { diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 8e60ee320e..9e9f8914d0 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -24,6 +24,7 @@ use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::buffer::SplitBuffer; use zenoh::buffers::ZBuf; use zenoh::key_expr::KeyExpr; +use zenoh::prelude::r#async::*; use zenoh::query::{ConsolidationMode, QueryTarget}; use zenoh::sample::{Sample, SampleKind}; use zenoh::sample_builder::{ @@ -34,7 +35,6 @@ use zenoh::value::Value; use zenoh::{Result as ZResult, Session, SessionDeclarations}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; use zenoh_backend_traits::{Capability, History, Persistence, StorageInsertionResult, StoredData}; -use zenoh_core::{AsyncResolve, SyncResolve}; use zenoh_keyexpr::key_expr::OwnedKeyExpr; use zenoh_keyexpr::keyexpr_tree::impls::KeyedSetProvider; use zenoh_keyexpr::keyexpr_tree::{support::NonWild, support::UnknownWildness, KeBoxTree}; @@ -144,12 +144,7 @@ impl StorageService { t.add_async(gc).await; // subscribe on key_expr - let storage_sub = match self - .session - .declare_subscriber(&self.key_expr) - .res_async() - .await - { + let storage_sub = match self.session.declare_subscriber(&self.key_expr).res().await { Ok(storage_sub) => storage_sub, Err(e) => { log::error!("Error starting storage '{}': {}", self.name, e); @@ -162,7 +157,7 @@ impl StorageService { .session .declare_queryable(&self.key_expr) .complete(self.complete) - .res_async() + .res() .await { Ok(storage_queryable) => storage_queryable, @@ -239,7 +234,7 @@ impl StorageService { } }; let timestamp = sample.timestamp().cloned().unwrap_or(new_reception_timestamp()); - let sample = SampleBuilder::from(sample).timestamp(timestamp).res_sync(); + let sample = SampleBuilder::from(sample).timestamp(timestamp).into(); self.process_sample(sample).await; }, // on query on key_expr @@ -303,7 +298,7 @@ impl StorageService { ); // there might be the case that the actual update was outdated due to a wild card update, but not stored yet in the storage. // get the relevant wild card entry and use that value and timestamp to update the storage - let sample_to_store = match self + let sample_to_store: Sample = match self .ovderriding_wild_update(&k, sample.timestamp().unwrap()) .await { @@ -317,17 +312,17 @@ impl StorageService { PutSampleBuilder::new(KeyExpr::from(k.clone()), payload) .encoding(encoding) .timestamp(data.timestamp) - .res_sync() + .into() } Some(Update { kind: SampleKind::Delete, data, }) => DeleteSampleBuilder::new(KeyExpr::from(k.clone())) .timestamp(data.timestamp) - .res_sync(), + .into(), None => SampleBuilder::from(sample.clone()) .keyexpr(k.clone()) - .res_sync(), + .into(), }; let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { @@ -534,7 +529,7 @@ impl StorageService { .reply(key.clone(), payload) .encoding(encoding) .timestamp(entry.timestamp) - .res_async() + .res() .await { log::warn!( @@ -569,7 +564,7 @@ impl StorageService { .reply(q.key_expr().clone(), payload) .encoding(encoding) .timestamp(entry.timestamp) - .res_async() + .res() .await { log::warn!( @@ -584,7 +579,7 @@ impl StorageService { let err_message = format!("Storage '{}' raised an error on query: {}", self.name, e); log::warn!("{}", err_message); - if let Err(e) = q.reply_err(err_message).res_async().await { + if let Err(e) = q.reply_err(err_message).res().await { log::warn!( "Storage '{}' raised an error replying a query: {}", self.name, @@ -666,7 +661,7 @@ impl StorageService { .get(KeyExpr::from(&self.key_expr).with_parameters("_time=[..]")) .target(QueryTarget::All) .consolidation(ConsolidationMode::None) - .res_async() + .res() .await { Ok(replies) => replies, diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 52a4263396..728e9cfa51 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -665,8 +665,9 @@ impl<'a, Receiver> FetchingSubscriber<'a, Receiver> { // ensure the sample has a timestamp, thus it will always be sorted into the MergeQueue // after any timestamped Sample possibly coming from a fetch reply. let timestamp = s.timestamp().cloned().unwrap_or(new_reception_timestamp()); - let s = SampleBuilder::from(s).timestamp(timestamp).res_sync(); - state.merge_queue.push(s); + state + .merge_queue + .push(SampleBuilder::from(s).timestamp(timestamp).into()); } } }; diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 37f914d0e0..a52c96c871 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -294,8 +294,7 @@ impl Resolvable for ReplySampleBuilder<'_> { impl SyncResolve for ReplySampleBuilder<'_> { fn res_sync(self) -> ::To { - let sample = self.sample_builder.res_sync(); - self.query._reply_sample(sample) + self.query._reply_sample(self.sample_builder.into()) } } @@ -453,8 +452,7 @@ impl<'a> Resolvable for ReplyBuilder<'a> { impl SyncResolve for ReplyBuilder<'_> { fn res_sync(self) -> ::To { - let sample = self.sample_builder.res_sync(); - self.query._reply_sample(sample) + self.query._reply_sample(self.sample_builder.into()) } } @@ -464,8 +462,7 @@ impl<'a> Resolvable for ReplyDelBuilder<'a> { impl SyncResolve for ReplyDelBuilder<'_> { fn res_sync(self) -> ::To { - let sample = self.sample_builder.res_sync(); - self.query._reply_sample(sample) + self.query._reply_sample(self.sample_builder.into()) } } diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 813bc1c63e..870b25768e 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -22,7 +22,6 @@ use crate::Priority; #[zenoh_macros::unstable] use serde::Serialize; use std::{convert::TryFrom, fmt}; -use zenoh_core::{AsyncResolve, Resolvable, SyncResolve}; use zenoh_protocol::core::EntityGlobalId; use zenoh_protocol::network::declare::ext::QoSType; use zenoh_protocol::{core::CongestionControl, zenoh}; @@ -595,23 +594,6 @@ impl From for QoS { } } -impl Resolvable for QoSBuilder { - type To = QoS; -} - -impl SyncResolve for QoSBuilder { - fn res_sync(self) -> ::To { - self.0 - } -} - -impl AsyncResolve for QoSBuilder { - type Future = futures::future::Ready; - fn res_async(self) -> Self::Future { - futures::future::ready(self.0) - } -} - impl QoSBuilderTrait for QoSBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { let mut inner = self.0.inner; diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample_builder.rs index 0996f17cf9..f74afdf2b3 100644 --- a/zenoh/src/sample_builder.rs +++ b/zenoh/src/sample_builder.rs @@ -24,9 +24,6 @@ use crate::Sample; use crate::SampleKind; use uhlc::Timestamp; use zenoh_core::zresult; -use zenoh_core::AsyncResolve; -use zenoh_core::Resolvable; -use zenoh_core::SyncResolve; use zenoh_protocol::core::CongestionControl; pub trait QoSBuilderTrait { @@ -124,17 +121,17 @@ impl SampleBuilderTrait for SampleBuilder { impl QoSBuilderTrait for SampleBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { let qos: QoSBuilder = self.0.qos.into(); - let qos = qos.congestion_control(congestion_control).res_sync(); + let qos = qos.congestion_control(congestion_control).into(); Self(Sample { qos, ..self.0 }) } fn priority(self, priority: Priority) -> Self { let qos: QoSBuilder = self.0.qos.into(); - let qos = qos.priority(priority).res_sync(); + let qos = qos.priority(priority).into(); Self(Sample { qos, ..self.0 }) } fn express(self, is_express: bool) -> Self { let qos: QoSBuilder = self.0.qos.into(); - let qos = qos.express(is_express).res_sync(); + let qos = qos.express(is_express).into(); Self(Sample { qos, ..self.0 }) } } @@ -325,53 +322,20 @@ impl TryFrom for DeleteSampleBuilder { } } -impl Resolvable for SampleBuilder { - type To = Sample; -} - -impl Resolvable for PutSampleBuilder { - type To = Sample; -} - -impl Resolvable for DeleteSampleBuilder { - type To = Sample; -} - -impl SyncResolve for SampleBuilder { - fn res_sync(self) -> Self::To { - self.0 - } -} - -impl SyncResolve for PutSampleBuilder { - fn res_sync(self) -> Self::To { - self.0.res_sync() - } -} - -impl SyncResolve for DeleteSampleBuilder { - fn res_sync(self) -> Self::To { - self.0.res_sync() - } -} - -impl AsyncResolve for SampleBuilder { - type Future = futures::future::Ready; - fn res_async(self) -> Self::Future { - futures::future::ready(self.0) +impl From for Sample { + fn from(sample_builder: SampleBuilder) -> Self { + sample_builder.0 } } -impl AsyncResolve for PutSampleBuilder { - type Future = futures::future::Ready; - fn res_async(self) -> Self::Future { - self.0.res_async() +impl From for Sample { + fn from(put_sample_builder: PutSampleBuilder) -> Self { + put_sample_builder.0 .0 } } -impl AsyncResolve for DeleteSampleBuilder { - type Future = futures::future::Ready; - fn res_async(self) -> Self::Future { - self.0.res_async() +impl From for Sample { + fn from(delete_sample_builder: DeleteSampleBuilder) -> Self { + delete_sample_builder.0 .0 } } From 6c6050b477a9f69040bd0f67748e15b7eeca242a Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 28 Mar 2024 15:15:26 +0100 Subject: [PATCH 069/124] Fix cargo clippy --- zenoh/tests/connection_retry.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/zenoh/tests/connection_retry.rs b/zenoh/tests/connection_retry.rs index db84d7bd5d..4f789e6db1 100644 --- a/zenoh/tests/connection_retry.rs +++ b/zenoh/tests/connection_retry.rs @@ -36,9 +36,7 @@ fn retry_config_overriding() { .insert_json5("listen/exit_on_failure", "false") .unwrap(); - let expected = vec![ - // global value - ConnectionRetryConf { + let expected = [ConnectionRetryConf { period_init_ms: 3000, period_max_ms: 6000, period_increase_factor: 1.5, @@ -57,8 +55,7 @@ fn retry_config_overriding() { period_max_ms: 60000, period_increase_factor: 15., exit_on_failure: true, - }, - ]; + }]; for (i, endpoint) in config.listen().endpoints().iter().enumerate() { let retry_config = zenoh_config::get_retry_config(&config, Some(endpoint), true); From 1a2ba1a75358d3703265dccbb3707680988a2647 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 28 Mar 2024 15:21:43 +0100 Subject: [PATCH 070/124] Fix code format --- zenoh/tests/connection_retry.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/zenoh/tests/connection_retry.rs b/zenoh/tests/connection_retry.rs index 4f789e6db1..fcb071b489 100644 --- a/zenoh/tests/connection_retry.rs +++ b/zenoh/tests/connection_retry.rs @@ -36,7 +36,8 @@ fn retry_config_overriding() { .insert_json5("listen/exit_on_failure", "false") .unwrap(); - let expected = [ConnectionRetryConf { + let expected = [ + ConnectionRetryConf { period_init_ms: 3000, period_max_ms: 6000, period_increase_factor: 1.5, @@ -55,7 +56,8 @@ fn retry_config_overriding() { period_max_ms: 60000, period_increase_factor: 15., exit_on_failure: true, - }]; + }, + ]; for (i, endpoint) in config.listen().endpoints().iter().enumerate() { let retry_config = zenoh_config::get_retry_config(&config, Some(endpoint), true); From 7162ff13f34a27ff7455b447536522adc23bf7a5 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 28 Mar 2024 15:43:35 +0100 Subject: [PATCH 071/124] Fix cargo clippy --- zenoh/src/sample.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 5f0234f723..2af8fb7106 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -69,8 +69,12 @@ pub struct SourceInfo { #[test] #[cfg(feature = "unstable")] -#[cfg(not(all(target_os = "macos", target_arch = "aarch64")))] fn source_info_stack_size() { + use crate::{ + sample::{SourceInfo, SourceSn}, + ZenohId, + }; + assert_eq!(std::mem::size_of::(), 16); assert_eq!(std::mem::size_of::>(), 17); assert_eq!(std::mem::size_of::>(), 16); From 10baf8c9cf6050dc6c7f682a3d444710fdb93aea Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 16:04:11 +0100 Subject: [PATCH 072/124] doctests fixed --- zenoh/src/publication.rs | 6 +++--- zenoh/src/session.rs | 5 +++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 8f52d5e4fa..f8f15eca56 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -73,12 +73,12 @@ pub struct DeleteBuilder<'a, 'b> { /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; /// use zenoh::publication::CongestionControl; -/// use zenoh::sample_builder::{PutSampleBuilderTrait, QoSBuilderTrait}; +/// use zenoh::sample_builder::{ValueBuilderTrait, QoSBuilderTrait}; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session /// .put("key/expression", "payload") -/// .with_encoding(Encoding::TEXT_PLAIN) +/// .encoding(Encoding::TEXT_PLAIN) /// .congestion_control(CongestionControl::Block) /// .res() /// .await @@ -932,7 +932,7 @@ impl<'a> Sink for Publisher<'a> { /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; /// use zenoh::publication::CongestionControl; -/// use zenoh::sample_builder::{PutSampleBuilderTrait, QoSBuilderTrait}; +/// use zenoh::sample_builder::QoSBuilderTrait; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let publisher = session diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 5b80adb0e5..cc30e12293 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -683,12 +683,13 @@ impl Session { /// ``` /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; - /// use zenoh::sample_builder::PutSampleBuilderTrait; + /// use zenoh::sample_builder::SampleBuilderTrait; + /// use zenoh::sample_builder::ValueBuilderTrait; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session /// .put("key/expression", "payload") - /// .with_encoding(Encoding::TEXT_PLAIN) + /// .encoding(Encoding::TEXT_PLAIN) /// .res() /// .await /// .unwrap(); From 48cb96ba7ab43c13a212fe4bb5943edb38089b9b Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 16:41:55 +0100 Subject: [PATCH 073/124] sample bulider in separarte module --- zenoh/src/{sample_builder.rs => sample/builder.rs} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename zenoh/src/{sample_builder.rs => sample/builder.rs} (100%) diff --git a/zenoh/src/sample_builder.rs b/zenoh/src/sample/builder.rs similarity index 100% rename from zenoh/src/sample_builder.rs rename to zenoh/src/sample/builder.rs From ddb93a2364bbe4db227d54b1107539b717fa0d83 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 16:42:05 +0100 Subject: [PATCH 074/124] separate module --- examples/examples/z_ping.rs | 2 +- examples/examples/z_pong.rs | 2 +- examples/examples/z_pub.rs | 2 +- examples/examples/z_pub_shm_thr.rs | 2 +- examples/examples/z_pub_thr.rs | 2 +- plugins/zenoh-plugin-rest/examples/z_serve_sse.rs | 2 +- plugins/zenoh-plugin-rest/src/lib.rs | 2 +- .../src/replica/align_queryable.rs | 4 ++-- .../src/replica/aligner.rs | 2 +- .../src/replica/storage.rs | 4 ++-- zenoh-ext/src/group.rs | 2 +- zenoh-ext/src/querying_subscriber.rs | 2 +- zenoh/src/lib.rs | 1 - zenoh/src/net/runtime/adminspace.rs | 2 +- zenoh/src/prelude.rs | 4 ++++ zenoh/src/publication.rs | 10 +++++----- zenoh/src/query.rs | 2 +- zenoh/src/queryable.rs | 4 ++-- zenoh/src/sample.rs | 4 +++- zenoh/src/session.rs | 3 +-- zenoh/src/value.rs | 2 +- zenoh/tests/attachments.rs | 4 ++-- zenoh/tests/qos.rs | 2 +- zenoh/tests/routing.rs | 2 +- zenoh/tests/session.rs | 2 +- zenoh/tests/unicity.rs | 2 +- 26 files changed, 38 insertions(+), 34 deletions(-) diff --git a/examples/examples/z_ping.rs b/examples/examples/z_ping.rs index b40afc1f53..59bcaddadc 100644 --- a/examples/examples/z_ping.rs +++ b/examples/examples/z_ping.rs @@ -16,7 +16,7 @@ use std::time::{Duration, Instant}; use zenoh::config::Config; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index 0003958b5d..e0fa079629 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -15,7 +15,7 @@ use clap::Parser; use zenoh::config::Config; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/examples/examples/z_pub.rs b/examples/examples/z_pub.rs index 7166981e72..c4c592b47c 100644 --- a/examples/examples/z_pub.rs +++ b/examples/examples/z_pub.rs @@ -16,7 +16,7 @@ use clap::Parser; use std::time::Duration; use zenoh::config::Config; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::SampleBuilderTrait; +use zenoh::sample::builder::SampleBuilderTrait; use zenoh_examples::CommonArgs; #[async_std::main] diff --git a/examples/examples/z_pub_shm_thr.rs b/examples/examples/z_pub_shm_thr.rs index 5230ea3ce6..a784429906 100644 --- a/examples/examples/z_pub_shm_thr.rs +++ b/examples/examples/z_pub_shm_thr.rs @@ -15,7 +15,7 @@ use clap::Parser; use zenoh::config::Config; use zenoh::prelude::r#async::*; use zenoh::publication::CongestionControl; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh::shm::SharedMemoryManager; use zenoh_examples::CommonArgs; diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index 7e7c1ac9b5..78d54111a8 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -16,7 +16,7 @@ use clap::Parser; use std::convert::TryInto; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs index 48f152e488..c353826fab 100644 --- a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs +++ b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs @@ -15,7 +15,7 @@ use clap::{arg, Command}; use std::time::Duration; use zenoh::prelude::r#async::*; use zenoh::publication::CongestionControl; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh::{config::Config, key_expr::keyexpr}; const HTML: &str = r#" diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index c90bbe5ac1..94796c518d 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -34,7 +34,7 @@ use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, Reply}; use zenoh::runtime::Runtime; -use zenoh::sample_builder::ValueBuilderTrait; +use zenoh::sample::builder::ValueBuilderTrait; use zenoh::selector::TIME_RANGE_KEY; use zenoh::Session; use zenoh_plugin_trait::{plugin_long_version, plugin_version, Plugin, PluginControl}; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index b2d2bdc399..729572601c 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -20,8 +20,8 @@ use std::str; use std::str::FromStr; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::TimestampBuilderTrait; -use zenoh::sample_builder::ValueBuilderTrait; +use zenoh::sample::builder::TimestampBuilderTrait; +use zenoh::sample::builder::ValueBuilderTrait; use zenoh::time::Timestamp; use zenoh::Session; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 3a6cc0444d..1b7f945cee 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -21,7 +21,7 @@ use std::str; use zenoh::key_expr::{KeyExpr, OwnedKeyExpr}; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::{PutSampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; +use zenoh::sample::builder::{PutSampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; use zenoh::time::Timestamp; use zenoh::Session; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 9e9f8914d0..62468ac6a1 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -26,10 +26,10 @@ use zenoh::buffers::ZBuf; use zenoh::key_expr::KeyExpr; use zenoh::prelude::r#async::*; use zenoh::query::{ConsolidationMode, QueryTarget}; -use zenoh::sample::{Sample, SampleKind}; -use zenoh::sample_builder::{ +use zenoh::sample::builder::{ DeleteSampleBuilder, PutSampleBuilder, SampleBuilder, TimestampBuilderTrait, ValueBuilderTrait, }; +use zenoh::sample::{Sample, SampleKind}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::value::Value; use zenoh::{Result as ZResult, Session, SessionDeclarations}; diff --git a/zenoh-ext/src/group.rs b/zenoh-ext/src/group.rs index 39cd982c41..4ae3c77c9f 100644 --- a/zenoh-ext/src/group.rs +++ b/zenoh-ext/src/group.rs @@ -29,7 +29,7 @@ use zenoh::payload::PayloadReader; use zenoh::prelude::r#async::*; use zenoh::publication::Publisher; use zenoh::query::ConsolidationMode; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh::Error as ZError; use zenoh::Result as ZResult; use zenoh::Session; diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 728e9cfa51..5e80cb704c 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -20,7 +20,7 @@ use std::time::Duration; use zenoh::handlers::{locked, DefaultHandler}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, QueryTarget, ReplyKeyExpr}; -use zenoh::sample_builder::{SampleBuilder, TimestampBuilderTrait}; +use zenoh::sample::builder::{SampleBuilder, TimestampBuilderTrait}; use zenoh::subscriber::{Reliability, Subscriber}; use zenoh::time::{new_reception_timestamp, Timestamp}; use zenoh::Result as ZResult; diff --git a/zenoh/src/lib.rs b/zenoh/src/lib.rs index 24b21496ec..ed2f01f180 100644 --- a/zenoh/src/lib.rs +++ b/zenoh/src/lib.rs @@ -145,7 +145,6 @@ pub mod publication; pub mod query; pub mod queryable; pub mod sample; -pub mod sample_builder; pub mod subscriber; pub mod value; #[cfg(feature = "shared-memory")] diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index 070b3bcd3a..41295f6cd0 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -20,7 +20,7 @@ use crate::plugins::sealed::{self as plugins}; use crate::prelude::sync::SyncResolve; use crate::queryable::Query; use crate::queryable::QueryInner; -use crate::sample_builder::ValueBuilderTrait; +use crate::sample::builder::ValueBuilderTrait; use crate::value::Value; use async_std::task; use log::{error, trace}; diff --git a/zenoh/src/prelude.rs b/zenoh/src/prelude.rs index 26c93e1801..850148f506 100644 --- a/zenoh/src/prelude.rs +++ b/zenoh/src/prelude.rs @@ -60,6 +60,10 @@ pub(crate) mod common { #[zenoh_macros::unstable] pub use crate::publication::PublisherDeclarations; pub use zenoh_protocol::core::{CongestionControl, Reliability, WhatAmI}; + + pub use crate::sample::builder::{ + QoSBuilderTrait, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, + }; } /// Prelude to import when using Zenoh's sync API. diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index f8f15eca56..d2463610fb 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -15,12 +15,12 @@ //! Publishing primitives. use crate::net::primitives::Primitives; use crate::prelude::*; +use crate::sample::builder::{ + QoSBuilderTrait, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, +}; #[zenoh_macros::unstable] use crate::sample::Attachment; use crate::sample::{DataInfo, QoS, Sample, SampleFields, SampleKind}; -use crate::sample_builder::{ - QoSBuilderTrait, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, -}; use crate::SessionRef; use crate::Undeclarable; #[cfg(feature = "unstable")] @@ -73,7 +73,7 @@ pub struct DeleteBuilder<'a, 'b> { /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; /// use zenoh::publication::CongestionControl; -/// use zenoh::sample_builder::{ValueBuilderTrait, QoSBuilderTrait}; +/// use zenoh::sample::builder::{ValueBuilderTrait, QoSBuilderTrait}; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session @@ -932,7 +932,7 @@ impl<'a> Sink for Publisher<'a> { /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; /// use zenoh::publication::CongestionControl; -/// use zenoh::sample_builder::QoSBuilderTrait; +/// use zenoh::sample::builder::QoSBuilderTrait; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// let publisher = session diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index 837ed69f22..3a7ee771b3 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -15,10 +15,10 @@ //! Query primitives. use crate::handlers::{locked, Callback, DefaultHandler}; use crate::prelude::*; +use crate::sample::builder::{QoSBuilderTrait, SampleBuilderTrait, ValueBuilderTrait}; #[zenoh_macros::unstable] use crate::sample::Attachment; use crate::sample::QoSBuilder; -use crate::sample_builder::{QoSBuilderTrait, SampleBuilderTrait, ValueBuilderTrait}; use crate::Session; use std::collections::HashMap; use std::future::Ready; diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index a52c96c871..2e3a1f585a 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -18,11 +18,11 @@ use crate::encoding::Encoding; use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; -use crate::sample::SourceInfo; -use crate::sample_builder::{ +use crate::sample::builder::{ DeleteSampleBuilder, PutSampleBuilder, QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, }; +use crate::sample::SourceInfo; use crate::Id; use crate::SessionRef; use crate::Undeclarable; diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs index 870b25768e..455d54318b 100644 --- a/zenoh/src/sample.rs +++ b/zenoh/src/sample.rs @@ -16,7 +16,7 @@ use crate::encoding::Encoding; use crate::payload::Payload; use crate::prelude::{KeyExpr, Value}; -use crate::sample_builder::{QoSBuilderTrait, ValueBuilderTrait}; +use crate::sample::builder::{QoSBuilderTrait, ValueBuilderTrait}; use crate::time::Timestamp; use crate::Priority; #[zenoh_macros::unstable] @@ -26,6 +26,8 @@ use zenoh_protocol::core::EntityGlobalId; use zenoh_protocol::network::declare::ext::QoSType; use zenoh_protocol::{core::CongestionControl, zenoh}; +pub mod builder; + pub type SourceSn = u64; /// The locality of samples to be received by subscribers or targeted by publishers. diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index cc30e12293..2f24673b5e 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -683,8 +683,7 @@ impl Session { /// ``` /// # async_std::task::block_on(async { /// use zenoh::prelude::r#async::*; - /// use zenoh::sample_builder::SampleBuilderTrait; - /// use zenoh::sample_builder::ValueBuilderTrait; + /// use zenoh::prelude::*; /// /// let session = zenoh::open(config::peer()).res().await.unwrap(); /// session diff --git a/zenoh/src/value.rs b/zenoh/src/value.rs index 6d4de1366c..8ea5aef19f 100644 --- a/zenoh/src/value.rs +++ b/zenoh/src/value.rs @@ -13,7 +13,7 @@ // //! Value primitives. -use crate::{encoding::Encoding, payload::Payload, sample_builder::ValueBuilderTrait}; +use crate::{encoding::Encoding, payload::Payload, sample::builder::ValueBuilderTrait}; /// A zenoh [`Value`] contains a `payload` and an [`Encoding`] that indicates how the [`Payload`] should be interpreted. #[non_exhaustive] diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index e87fc5243b..f50e33cf6f 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -14,7 +14,7 @@ #[cfg(feature = "unstable")] #[test] fn pubsub() { - use zenoh::{prelude::sync::*, sample_builder::SampleBuilderTrait}; + use zenoh::{prelude::sync::*, sample::builder::SampleBuilderTrait}; let zenoh = zenoh::open(Config::default()).res().unwrap(); let _sub = zenoh @@ -61,7 +61,7 @@ fn pubsub() { #[cfg(feature = "unstable")] #[test] fn queries() { - use zenoh::{prelude::sync::*, sample::Attachment, sample_builder::SampleBuilderTrait}; + use zenoh::{prelude::sync::*, sample::builder::SampleBuilderTrait, sample::Attachment}; let zenoh = zenoh::open(Config::default()).res().unwrap(); let _sub = zenoh diff --git a/zenoh/tests/qos.rs b/zenoh/tests/qos.rs index 8dc39423cb..46896e5432 100644 --- a/zenoh/tests/qos.rs +++ b/zenoh/tests/qos.rs @@ -15,7 +15,7 @@ use async_std::prelude::FutureExt; use async_std::task; use std::time::Duration; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh::{publication::Priority, SessionDeclarations}; use zenoh_core::zasync_executor_init; diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index 123550852e..6585f8aae4 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -20,7 +20,7 @@ use std::sync::{atomic::AtomicUsize, Arc}; use std::time::Duration; use zenoh::config::{Config, ModeDependentValue}; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh::{value::Value, Result}; use zenoh_core::zasync_executor_init; use zenoh_protocol::core::{WhatAmI, WhatAmIMatcher}; diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index 955ec7a73f..436643ac25 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -17,7 +17,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh_core::zasync_executor_init; const TIMEOUT: Duration = Duration::from_secs(60); diff --git a/zenoh/tests/unicity.rs b/zenoh/tests/unicity.rs index 3d1327398d..80f722205b 100644 --- a/zenoh/tests/unicity.rs +++ b/zenoh/tests/unicity.rs @@ -17,7 +17,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use zenoh::prelude::r#async::*; -use zenoh::sample_builder::QoSBuilderTrait; +use zenoh::sample::builder::QoSBuilderTrait; use zenoh_core::zasync_executor_init; const TIMEOUT: Duration = Duration::from_secs(60); From ab96aab5345e7556c0c6ae1329c46efe45a31b63 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 16:53:43 +0100 Subject: [PATCH 075/124] SampleBuilder put/delete --- .../src/replica/aligner.rs | 4 +-- .../src/replica/storage.rs | 8 +++--- zenoh/src/queryable.rs | 4 +-- zenoh/src/sample/builder.rs | 27 +++++++++---------- 4 files changed, 20 insertions(+), 23 deletions(-) diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 1b7f945cee..5121f0b445 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -21,7 +21,7 @@ use std::str; use zenoh::key_expr::{KeyExpr, OwnedKeyExpr}; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::{PutSampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; +use zenoh::sample::builder::{SampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; use zenoh::time::Timestamp; use zenoh::Session; @@ -109,7 +109,7 @@ impl Aligner { let Value { payload, encoding, .. } = value; - let sample = PutSampleBuilder::new(key, payload) + let sample = SampleBuilder::put(key, payload) .encoding(encoding) .timestamp(ts) .into(); diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index 62468ac6a1..feebfb588a 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -26,9 +26,7 @@ use zenoh::buffers::ZBuf; use zenoh::key_expr::KeyExpr; use zenoh::prelude::r#async::*; use zenoh::query::{ConsolidationMode, QueryTarget}; -use zenoh::sample::builder::{ - DeleteSampleBuilder, PutSampleBuilder, SampleBuilder, TimestampBuilderTrait, ValueBuilderTrait, -}; +use zenoh::sample::builder::{SampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; use zenoh::sample::{Sample, SampleKind}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::value::Value; @@ -309,7 +307,7 @@ impl StorageService { let Value { payload, encoding, .. } = data.value; - PutSampleBuilder::new(KeyExpr::from(k.clone()), payload) + SampleBuilder::put(KeyExpr::from(k.clone()), payload) .encoding(encoding) .timestamp(data.timestamp) .into() @@ -317,7 +315,7 @@ impl StorageService { Some(Update { kind: SampleKind::Delete, data, - }) => DeleteSampleBuilder::new(KeyExpr::from(k.clone())) + }) => SampleBuilder::delete(KeyExpr::from(k.clone())) .timestamp(data.timestamp) .into(), None => SampleBuilder::from(sample.clone()) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 2e3a1f585a..c9492394c4 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -137,8 +137,8 @@ impl Query { IntoKeyExpr: Into>, IntoPayload: Into, { - let sample_builder = PutSampleBuilder::new(key_expr, payload) - .with_qos(response::ext::QoSType::RESPONSE.into()); + let sample_builder = + SampleBuilder::put(key_expr, payload).with_qos(response::ext::QoSType::RESPONSE.into()); ReplyBuilder { query: self, sample_builder, diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index f74afdf2b3..8c507c8119 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -62,22 +62,21 @@ pub trait ValueBuilderTrait { pub struct SampleBuilder(Sample); impl SampleBuilder { - pub fn new(key_expr: IntoKeyExpr) -> Self + pub fn put( + key_expr: IntoKeyExpr, + payload: IntoPayload, + ) -> PutSampleBuilder where IntoKeyExpr: Into>, + IntoPayload: Into, { - Self(Sample { - key_expr: key_expr.into(), - payload: Payload::empty(), - kind: SampleKind::default(), - encoding: Encoding::default(), - timestamp: None, - qos: QoS::default(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, - }) + PutSampleBuilder::new(key_expr, payload) + } + pub fn delete(key_expr: IntoKeyExpr) -> DeleteSampleBuilder + where + IntoKeyExpr: Into>, + { + DeleteSampleBuilder::new(key_expr) } /// Allows to change keyexpr of [`Sample`] pub fn keyexpr(self, key_expr: IntoKeyExpr) -> Self @@ -149,7 +148,7 @@ impl From for PutSampleBuilder { } impl PutSampleBuilder { - pub fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self + fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self where IntoKeyExpr: Into>, IntoPayload: Into, From 82c1c999d0f73cc2cc09121e56067591971f5146 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 17:28:56 +0100 Subject: [PATCH 076/124] set value api --- examples/examples/z_get.rs | 21 +++++++----- plugins/zenoh-plugin-rest/src/lib.rs | 2 +- zenoh/src/publication.rs | 17 ++++++++++ zenoh/src/query.rs | 49 ++++++++++++---------------- zenoh/src/queryable.rs | 7 ++++ zenoh/src/sample/builder.rs | 12 +++++++ zenoh/src/value.rs | 18 ++++++++++ zenoh/tests/attachments.rs | 6 ++-- zenoh/tests/handler.rs | 4 +-- 9 files changed, 93 insertions(+), 43 deletions(-) diff --git a/examples/examples/z_get.rs b/examples/examples/z_get.rs index dce74d367b..074f931eff 100644 --- a/examples/examples/z_get.rs +++ b/examples/examples/z_get.rs @@ -28,15 +28,18 @@ async fn main() { let session = zenoh::open(config).res().await.unwrap(); println!("Sending Query '{selector}'..."); - let replies = match value { - Some(value) => session.get(&selector).with_value(value), - None => session.get(&selector), - } - .target(target) - .timeout(timeout) - .res() - .await - .unwrap(); + // let replies = match value { + // Some(value) => session.get(&selector).payload(value), + // None => session.get(&selector), + // } + let replies = session + .get(&selector) + .value(value.map(Value::from)) + .target(target) + .timeout(timeout) + .res() + .await + .unwrap(); while let Ok(reply) = replies.recv_async().await { match reply.sample { Ok(sample) => { diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index 94796c518d..f78c541eff 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -420,7 +420,7 @@ async fn query(mut req: Request<(Arc, String)>) -> tide::Result { diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index d2463610fb..103a65e782 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -215,6 +215,14 @@ impl ValueBuilderTrait for PutBuilder<'_, '_> { ..self } } + fn value>(self, value: T) -> Self { + let Value { payload, encoding } = value.into(); + Self { + payload, + encoding, + ..self + } + } } impl PutBuilder<'_, '_> { @@ -798,6 +806,15 @@ impl ValueBuilderTrait for PutPublication<'_> { ..self } } + + fn value>(self, value: T) -> Self { + let Value { payload, encoding } = value.into(); + Self { + payload, + encoding, + ..self + } + } } impl TimestampBuilderTrait for DeletePublication<'_> { diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index 3a7ee771b3..5a1d443463 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -178,6 +178,13 @@ impl ValueBuilderTrait for GetBuilder<'_, '_, Handler> { let value = Some(self.value.unwrap_or_default().payload(payload)); Self { value, ..self } } + fn value>(self, value: T) -> Self { + let value: Value = value.into(); + Self { + value: if value.is_empty() { None } else { Some(value) }, + ..self + } + } } impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { @@ -328,48 +335,34 @@ impl<'a, 'b> GetBuilder<'a, 'b, DefaultHandler> { impl<'a, 'b, Handler> GetBuilder<'a, 'b, Handler> { /// Change the target of the query. #[inline] - pub fn target(mut self, target: QueryTarget) -> Self { - self.target = target; - self + pub fn target(self, target: QueryTarget) -> Self { + Self { target, ..self } } /// Change the consolidation mode of the query. #[inline] - pub fn consolidation>(mut self, consolidation: QC) -> Self { - self.consolidation = consolidation.into(); - self + pub fn consolidation>(self, consolidation: QC) -> Self { + Self { + consolidation: consolidation.into(), + ..self + } } /// Restrict the matching queryables that will receive the query /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] #[inline] - pub fn allowed_destination(mut self, destination: Locality) -> Self { - self.destination = destination; - self + pub fn allowed_destination(self, destination: Locality) -> Self { + Self { + destination, + ..self + } } /// Set query timeout. #[inline] - pub fn timeout(mut self, timeout: Duration) -> Self { - self.timeout = timeout; - self - } - - /// Set query value. - #[inline] - pub fn with_value(mut self, value: IntoValue) -> Self - where - IntoValue: Into, - { - self.value = Some(value.into()); - self - } - - #[zenoh_macros::unstable] - pub fn with_attachment(mut self, attachment: Attachment) -> Self { - self.attachment = Some(attachment); - self + pub fn timeout(self, timeout: Duration) -> Self { + Self { timeout, ..self } } /// By default, `get` guarantees that it will only receive replies whose key expressions intersect diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index c9492394c4..aa5f041a2b 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -378,6 +378,13 @@ impl ValueBuilderTrait for ReplyBuilder<'_> { ..self } } + fn value>(self, value: T) -> Self { + let Value { payload, encoding } = value.into(); + Self { + sample_builder: self.sample_builder.payload(payload).encoding(encoding), + ..self + } + } } /// A builder returned by [`Query::reply_del()`](Query::reply) diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index 8c507c8119..1bd50e7f69 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -22,6 +22,7 @@ use crate::Payload; use crate::Priority; use crate::Sample; use crate::SampleKind; +use crate::Value; use uhlc::Timestamp; use zenoh_core::zresult; use zenoh_protocol::core::CongestionControl; @@ -56,6 +57,9 @@ pub trait ValueBuilderTrait { fn encoding>(self, encoding: T) -> Self; /// Sets the payload fn payload>(self, payload: T) -> Self; + /// Sets both payload and encoding at once. + /// This is convenient for passing user type which supports `Into` when both payload and encoding depends on user type + fn value>(self, value: T) -> Self; } #[derive(Debug)] @@ -221,6 +225,14 @@ impl ValueBuilderTrait for PutSampleBuilder { ..self.0 .0 })) } + fn value>(self, value: T) -> Self { + let Value { payload, encoding } = value.into(); + Self(SampleBuilder(Sample { + payload, + encoding, + ..self.0 .0 + })) + } } #[derive(Debug)] diff --git a/zenoh/src/value.rs b/zenoh/src/value.rs index 8ea5aef19f..92a87cb6c5 100644 --- a/zenoh/src/value.rs +++ b/zenoh/src/value.rs @@ -43,6 +43,11 @@ impl Value { encoding: Encoding::default(), } } + /// Checks if the [`Value`] is empty. + /// Value is considered empty if its payload is empty and encoding is default. + pub fn is_empty(&self) -> bool { + self.payload.is_empty() && self.encoding == Encoding::default() + } } impl ValueBuilderTrait for Value { @@ -58,6 +63,10 @@ impl ValueBuilderTrait for Value { ..self } } + fn value>(self, value: T) -> Self { + let Value { payload, encoding } = value.into(); + Self { payload, encoding } + } } impl From for Value @@ -72,6 +81,15 @@ where } } +impl From> for Value +where + T: Into, +{ + fn from(t: Option) -> Self { + t.map_or_else(Value::empty, Into::into) + } +} + impl Default for Value { fn default() -> Self { Value::empty() diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index f50e33cf6f..2725351ab0 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -100,13 +100,13 @@ fn queries() { } let get = zenoh .get("test/attachment") - .with_value("query") - .with_attachment( + .payload("query") + .attachment(Some( backer .iter() .map(|b| (b.0.as_slice(), b.1.as_slice())) .collect(), - ) + )) .res() .unwrap(); while let Ok(reply) = get.recv() { diff --git a/zenoh/tests/handler.rs b/zenoh/tests/handler.rs index c1e912fc75..ceed15e2c3 100644 --- a/zenoh/tests/handler.rs +++ b/zenoh/tests/handler.rs @@ -57,12 +57,12 @@ fn query_with_ringbuffer() { let _reply1 = zenoh .get("test/ringbuffer_query") - .with_value("query1") + .payload("query1") .res() .unwrap(); let _reply2 = zenoh .get("test/ringbuffer_query") - .with_value("query2") + .payload("query2") .res() .unwrap(); From b5a1f6b1eb3fd3310f233d54abc9135449d4630a Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 17:32:48 +0100 Subject: [PATCH 077/124] with removed --- zenoh/src/queryable.rs | 2 +- zenoh/src/sample/builder.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index aa5f041a2b..aec45c46df 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -138,7 +138,7 @@ impl Query { IntoPayload: Into, { let sample_builder = - SampleBuilder::put(key_expr, payload).with_qos(response::ext::QoSType::RESPONSE.into()); + SampleBuilder::put(key_expr, payload).qos(response::ext::QoSType::RESPONSE.into()); ReplyBuilder { query: self, sample_builder, diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index 1bd50e7f69..920bd2b7b7 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -171,14 +171,14 @@ impl PutSampleBuilder { })) } /// Allows to change keyexpr of [`Sample`] - pub fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self + pub fn keyexpr(self, key_expr: IntoKeyExpr) -> Self where IntoKeyExpr: Into>, { Self(self.0.keyexpr(key_expr)) } // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. - pub(crate) fn with_qos(self, qos: QoS) -> Self { + pub(crate) fn qos(self, qos: QoS) -> Self { Self(SampleBuilder(Sample { qos, ..self.0 .0 })) } } From 1c9515704f25020468a22bf0dfe52d8cc0fb17cb Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 17:37:48 +0100 Subject: [PATCH 078/124] commented code removed --- examples/examples/z_get.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/examples/examples/z_get.rs b/examples/examples/z_get.rs index 074f931eff..259137ee4a 100644 --- a/examples/examples/z_get.rs +++ b/examples/examples/z_get.rs @@ -28,10 +28,6 @@ async fn main() { let session = zenoh::open(config).res().await.unwrap(); println!("Sending Query '{selector}'..."); - // let replies = match value { - // Some(value) => session.get(&selector).payload(value), - // None => session.get(&selector), - // } let replies = session .get(&selector) .value(value.map(Value::from)) From d9eb96a8d86c232513f6c93b1d8a3d2f57ef9f1a Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 17:45:07 +0100 Subject: [PATCH 079/124] map-from removed --- examples/examples/z_get.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/examples/z_get.rs b/examples/examples/z_get.rs index 259137ee4a..542f94ba63 100644 --- a/examples/examples/z_get.rs +++ b/examples/examples/z_get.rs @@ -30,7 +30,7 @@ async fn main() { println!("Sending Query '{selector}'..."); let replies = session .get(&selector) - .value(value.map(Value::from)) + .value(value) .target(target) .timeout(timeout) .res() From e4501f403f11837a9d143dc9f3f91801498b33fa Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Thu, 28 Mar 2024 18:01:14 +0100 Subject: [PATCH 080/124] build warnings fixed --- zenoh/tests/routing.rs | 16 +--------------- zenoh/tests/session.rs | 1 - zenoh/tests/unicity.rs | 1 - 3 files changed, 1 insertion(+), 17 deletions(-) diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index 056680ffd4..830f22a475 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -11,29 +11,15 @@ // Contributors: // ZettaScale Zenoh Team, // -use async_std::prelude::FutureExt; -use futures::future::try_join_all; -use futures::FutureExt as _; use std::str::FromStr; use std::sync::atomic::Ordering; use std::sync::{atomic::AtomicUsize, Arc}; use std::time::Duration; -use std::{ - str::FromStr, - sync::{atomic::AtomicUsize, atomic::Ordering, Arc}, - time::Duration, -}; use tokio_util::{sync::CancellationToken, task::TaskTracker}; use zenoh::config::{Config, ModeDependentValue}; use zenoh::prelude::r#async::*; use zenoh::sample::builder::QoSBuilderTrait; -use zenoh::{ - config::{Config, ModeDependentValue}, - prelude::r#async::*, - Result, -}; -use zenoh::{value::Value, Result}; -use zenoh_core::zasync_executor_init; +use zenoh::Result; use zenoh_core::ztimeout; use zenoh_protocol::core::{WhatAmI, WhatAmIMatcher}; use zenoh_result::bail; diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index 640f23da52..cd7335c28e 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -16,7 +16,6 @@ use std::sync::Arc; use std::time::Duration; use zenoh::prelude::r#async::*; use zenoh::sample::builder::QoSBuilderTrait; -use zenoh_core::zasync_executor_init; use zenoh_core::ztimeout; const TIMEOUT: Duration = Duration::from_secs(60); diff --git a/zenoh/tests/unicity.rs b/zenoh/tests/unicity.rs index f92a26d6c0..a71a0a8034 100644 --- a/zenoh/tests/unicity.rs +++ b/zenoh/tests/unicity.rs @@ -17,7 +17,6 @@ use std::time::Duration; use tokio::runtime::Handle; use zenoh::prelude::r#async::*; use zenoh::sample::builder::QoSBuilderTrait; -use zenoh_core::zasync_executor_init; use zenoh_core::ztimeout; const TIMEOUT: Duration = Duration::from_secs(60); From 1562a17b7a8a515e4a4ef98be7b23e9da47fbd48 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 28 Mar 2024 19:14:00 +0100 Subject: [PATCH 081/124] Protocol interest (#870) * Add InterestId in Declare message * Improve comments * Update commons/zenoh-protocol/src/network/declare.rs Co-authored-by: Mahmoud Mazouz * Update commons/zenoh-protocol/src/network/declare.rs Co-authored-by: Mahmoud Mazouz --------- Co-authored-by: Mahmoud Mazouz --- commons/zenoh-codec/src/network/declare.rs | 17 ++++++++++++++++- commons/zenoh-protocol/src/network/declare.rs | 17 +++++++++++------ zenoh/src/key_expr.rs | 1 + zenoh/src/net/routing/dispatcher/resource.rs | 1 + zenoh/src/net/routing/hat/client/pubsub.rs | 4 ++++ zenoh/src/net/routing/hat/client/queries.rs | 3 +++ .../net/routing/hat/linkstate_peer/pubsub.rs | 6 ++++++ .../net/routing/hat/linkstate_peer/queries.rs | 6 ++++++ zenoh/src/net/routing/hat/p2p_peer/pubsub.rs | 4 ++++ zenoh/src/net/routing/hat/p2p_peer/queries.rs | 3 +++ zenoh/src/net/routing/hat/router/pubsub.rs | 10 ++++++++++ zenoh/src/net/routing/hat/router/queries.rs | 10 ++++++++++ zenoh/src/net/runtime/adminspace.rs | 3 +++ zenoh/src/net/tests/tables.rs | 5 +++++ zenoh/src/session.rs | 7 +++++++ 15 files changed, 90 insertions(+), 7 deletions(-) diff --git a/commons/zenoh-codec/src/network/declare.rs b/commons/zenoh-codec/src/network/declare.rs index c81514ab3e..d7a25ea0a9 100644 --- a/commons/zenoh-codec/src/network/declare.rs +++ b/commons/zenoh-codec/src/network/declare.rs @@ -95,6 +95,7 @@ where fn write(self, writer: &mut W, x: &Declare) -> Self::Output { let Declare { + interest_id, ext_qos, ext_tstamp, ext_nodeid, @@ -103,6 +104,9 @@ where // Header let mut header = id::DECLARE; + if x.interest_id.is_some() { + header |= declare::flag::I; + } let mut n_exts = ((ext_qos != &declare::ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8) + ((ext_nodeid != &declare::ext::NodeIdType::DEFAULT) as u8); @@ -111,6 +115,11 @@ where } self.write(&mut *writer, header)?; + // Body + if let Some(interest_id) = interest_id { + self.write(&mut *writer, interest_id)?; + } + // Extensions if ext_qos != &declare::ext::QoSType::DEFAULT { n_exts -= 1; @@ -157,6 +166,11 @@ where return Err(DidntRead); } + let mut interest_id = None; + if imsg::has_flag(self.header, declare::flag::I) { + interest_id = Some(self.codec.read(&mut *reader)?); + } + // Extensions let mut ext_qos = declare::ext::QoSType::DEFAULT; let mut ext_tstamp = None; @@ -192,10 +206,11 @@ where let body: DeclareBody = self.codec.read(&mut *reader)?; Ok(Declare { - body, + interest_id, ext_qos, ext_tstamp, ext_nodeid, + body, }) } } diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index d41d8bf67f..10027259c2 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -25,20 +25,22 @@ pub use subscriber::*; pub use token::*; pub mod flag { - // pub const X: u8 = 1 << 5; // 0x20 Reserved - // pub const X: u8 = 1 << 6; // 0x40 Reserved + pub const I: u8 = 1 << 5; // 0x20 Interest if I==1 then the declare is in a response to an Interest with future==false + // pub const X: u8 = 1 << 6; // 0x40 Reserved pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow } /// Flags: -/// - X: Reserved +/// - I: Interest If I==1 then the declare is in a response to an Interest with future==false /// - X: Reserved /// - Z: Extension If Z==1 then at least one extension is present /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ -/// |Z|X|X| DECLARE | +/// |Z|X|I| DECLARE | /// +-+-+-+---------+ +/// ~interest_id:z32~ if I==1 +/// +---------------+ /// ~ [decl_exts] ~ if Z==1 /// +---------------+ /// ~ declaration ~ @@ -46,6 +48,7 @@ pub mod flag { /// #[derive(Debug, Clone, PartialEq, Eq)] pub struct Declare { + pub interest_id: Option, pub ext_qos: ext::QoSType, pub ext_tstamp: Option, pub ext_nodeid: ext::NodeIdType, @@ -132,16 +135,18 @@ impl Declare { let mut rng = rand::thread_rng(); - let body = DeclareBody::rand(); + let interest_id = rng.gen_bool(0.5).then_some(rng.gen::()); let ext_qos = ext::QoSType::rand(); let ext_tstamp = rng.gen_bool(0.5).then(ext::TimestampType::rand); let ext_nodeid = ext::NodeIdType::rand(); + let body = DeclareBody::rand(); Self { - body, + interest_id, ext_qos, ext_tstamp, ext_nodeid, + body, } } } diff --git a/zenoh/src/key_expr.rs b/zenoh/src/key_expr.rs index f340f24cf1..aaa1d13724 100644 --- a/zenoh/src/key_expr.rs +++ b/zenoh/src/key_expr.rs @@ -664,6 +664,7 @@ impl SyncResolve for KeyExprUndeclaration<'_> { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(zenoh_protocol::network::Declare { + interest_id: None, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/dispatcher/resource.rs b/zenoh/src/net/routing/dispatcher/resource.rs index 0450dab38a..194b97fca8 100644 --- a/zenoh/src/net/routing/dispatcher/resource.rs +++ b/zenoh/src/net/routing/dispatcher/resource.rs @@ -452,6 +452,7 @@ impl Resource { .insert(expr_id, nonwild_prefix.clone()); face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/client/pubsub.rs b/zenoh/src/net/routing/hat/client/pubsub.rs index 290f90f95f..e85bb77bf9 100644 --- a/zenoh/src/net/routing/hat/client/pubsub.rs +++ b/zenoh/src/net/routing/hat/client/pubsub.rs @@ -53,6 +53,7 @@ fn propagate_simple_subscription_to( let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -136,6 +137,7 @@ fn declare_client_subscription( .primitives .send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -169,6 +171,7 @@ fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -203,6 +206,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/client/queries.rs b/zenoh/src/net/routing/hat/client/queries.rs index 2ac3f1b993..5c0bc5349b 100644 --- a/zenoh/src/net/routing/hat/client/queries.rs +++ b/zenoh/src/net/routing/hat/client/queries.rs @@ -93,6 +93,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -164,6 +165,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -414,6 +418,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -455,6 +460,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs index 9fba744a9c..150c12a632 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs @@ -126,6 +126,7 @@ fn send_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -169,6 +170,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -337,6 +339,7 @@ fn send_forget_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -362,6 +365,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs index a722176292..b495248788 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs @@ -53,6 +53,7 @@ fn propagate_simple_subscription_to( let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -136,6 +137,7 @@ fn declare_client_subscription( .primitives .send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -169,6 +171,7 @@ fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -203,6 +206,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/p2p_peer/queries.rs b/zenoh/src/net/routing/hat/p2p_peer/queries.rs index 38f77bec45..72c32b9217 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/queries.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/queries.rs @@ -93,6 +93,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -164,6 +165,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -408,6 +412,7 @@ fn propagate_forget_simple_subscription_to_peers(tables: &mut Tables, res: &Arc< if let Some(id) = face_hat_mut!(&mut face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -559,6 +564,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -600,6 +606,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -628,6 +635,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -766,6 +774,7 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: if forget { dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -791,6 +800,7 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: }; dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/router/queries.rs b/zenoh/src/net/routing/hat/router/queries.rs index 61abaa7c55..99e787beb5 100644 --- a/zenoh/src/net/routing/hat/router/queries.rs +++ b/zenoh/src/net/routing/hat/router/queries.rs @@ -194,6 +194,7 @@ fn send_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -247,6 +248,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -471,6 +473,7 @@ fn send_forget_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -496,6 +499,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -768,6 +775,7 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -866,6 +874,7 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links if forget { dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -891,6 +900,7 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index 166ff16bd0..d460ee3f1c 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -276,6 +276,8 @@ impl AdminSpace { zlock!(admin.primitives).replace(primitives.clone()); primitives.send_declare(Declare { + interest_id: None, + ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -287,6 +289,7 @@ impl AdminSpace { }); primitives.send_declare(Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/tests/tables.rs b/zenoh/src/net/tests/tables.rs index 516bcd0109..4067f2ad8f 100644 --- a/zenoh/src/net/tests/tables.rs +++ b/zenoh/src/net/tests/tables.rs @@ -579,6 +579,7 @@ fn client_test() { Primitives::send_declare( primitives0.as_ref(), Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -606,6 +607,7 @@ fn client_test() { Primitives::send_declare( primitives0.as_ref(), Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -627,6 +629,7 @@ fn client_test() { Primitives::send_declare( primitives1.as_ref(), Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -654,6 +657,7 @@ fn client_test() { Primitives::send_declare( primitives1.as_ref(), Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -675,6 +679,7 @@ fn client_test() { Primitives::send_declare( primitives2.as_ref(), Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index b9e20a4e68..addb757807 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -872,6 +872,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { + interest_id: None, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1084,6 +1085,7 @@ impl Session { // }; primitives.send_declare(Declare { + interest_id: None, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1140,6 +1142,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { + interest_id: None, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1191,6 +1194,7 @@ impl Session { distance: 0, }; primitives.send_declare(Declare { + interest_id: None, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1212,6 +1216,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { + interest_id: None, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1247,6 +1252,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { + interest_id: None, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1271,6 +1277,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { + interest_id: None, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, From 21fb0832d9cfa904bf787ef9d511572b5ce81755 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 29 Mar 2024 10:43:07 +0100 Subject: [PATCH 082/124] Protocol batchsize (#873) * Use BatchSize typedef instead of u16 * Use BatchSize typedef instead of u16 for vsock --- commons/zenoh-codec/src/core/zint.rs | 68 ++++++++++--------- commons/zenoh-protocol/src/transport/init.rs | 4 +- commons/zenoh-protocol/src/transport/join.rs | 2 +- commons/zenoh-protocol/src/transport/mod.rs | 1 + io/zenoh-link-commons/src/lib.rs | 3 +- io/zenoh-link-commons/src/multicast.rs | 4 +- io/zenoh-link-commons/src/unicast.rs | 7 +- io/zenoh-links/zenoh-link-quic/src/lib.rs | 13 ++-- io/zenoh-links/zenoh-link-quic/src/unicast.rs | 3 +- io/zenoh-links/zenoh-link-serial/src/lib.rs | 7 +- .../zenoh-link-serial/src/unicast.rs | 3 +- io/zenoh-links/zenoh-link-tcp/src/lib.rs | 5 +- io/zenoh-links/zenoh-link-tcp/src/unicast.rs | 3 +- io/zenoh-links/zenoh-link-tls/src/lib.rs | 13 ++-- io/zenoh-links/zenoh-link-tls/src/unicast.rs | 4 +- io/zenoh-links/zenoh-link-udp/src/lib.rs | 11 +-- .../zenoh-link-udp/src/multicast.rs | 3 +- io/zenoh-links/zenoh-link-udp/src/unicast.rs | 3 +- .../zenoh-link-unixpipe/src/unix/unicast.rs | 5 +- .../zenoh-link-unixsock_stream/src/lib.rs | 5 +- .../zenoh-link-unixsock_stream/src/unicast.rs | 3 +- io/zenoh-links/zenoh-link-vsock/src/lib.rs | 4 +- .../zenoh-link-vsock/src/unicast.rs | 8 ++- io/zenoh-links/zenoh-link-ws/src/lib.rs | 5 +- io/zenoh-links/zenoh-link-ws/src/unicast.rs | 3 +- io/zenoh-transport/src/common/pipeline.rs | 12 ++-- io/zenoh-transport/src/manager.rs | 6 +- .../src/unicast/establishment/cookie.rs | 9 ++- 28 files changed, 125 insertions(+), 92 deletions(-) diff --git a/commons/zenoh-codec/src/core/zint.rs b/commons/zenoh-codec/src/core/zint.rs index 0daff7348b..d5160e2ee6 100644 --- a/commons/zenoh-codec/src/core/zint.rs +++ b/commons/zenoh-codec/src/core/zint.rs @@ -17,38 +17,42 @@ use zenoh_buffers::{ writer::{DidntWrite, Writer}, }; -const VLE_LEN: usize = 9; +const VLE_LEN_MAX: usize = vle_len(u64::MAX); + +const fn vle_len(x: u64) -> usize { + const B1: u64 = u64::MAX << 7; + const B2: u64 = u64::MAX << (7 * 2); + const B3: u64 = u64::MAX << (7 * 3); + const B4: u64 = u64::MAX << (7 * 4); + const B5: u64 = u64::MAX << (7 * 5); + const B6: u64 = u64::MAX << (7 * 6); + const B7: u64 = u64::MAX << (7 * 7); + const B8: u64 = u64::MAX << (7 * 8); + + if (x & B1) == 0 { + 1 + } else if (x & B2) == 0 { + 2 + } else if (x & B3) == 0 { + 3 + } else if (x & B4) == 0 { + 4 + } else if (x & B5) == 0 { + 5 + } else if (x & B6) == 0 { + 6 + } else if (x & B7) == 0 { + 7 + } else if (x & B8) == 0 { + 8 + } else { + 9 + } +} impl LCodec for Zenoh080 { fn w_len(self, x: u64) -> usize { - const B1: u64 = u64::MAX << 7; - const B2: u64 = u64::MAX << (7 * 2); - const B3: u64 = u64::MAX << (7 * 3); - const B4: u64 = u64::MAX << (7 * 4); - const B5: u64 = u64::MAX << (7 * 5); - const B6: u64 = u64::MAX << (7 * 6); - const B7: u64 = u64::MAX << (7 * 7); - const B8: u64 = u64::MAX << (7 * 8); - - if (x & B1) == 0 { - 1 - } else if (x & B2) == 0 { - 2 - } else if (x & B3) == 0 { - 3 - } else if (x & B4) == 0 { - 4 - } else if (x & B5) == 0 { - 5 - } else if (x & B6) == 0 { - 6 - } else if (x & B7) == 0 { - 7 - } else if (x & B8) == 0 { - 8 - } else { - 9 - } + vle_len(x) } } @@ -107,7 +111,7 @@ where type Output = Result<(), DidntWrite>; fn write(self, writer: &mut W, mut x: u64) -> Self::Output { - writer.with_slot(VLE_LEN, move |buffer| { + writer.with_slot(VLE_LEN_MAX, move |buffer| { let mut len = 0; while (x & !0x7f_u64) != 0 { // SAFETY: buffer is guaranteed to be VLE_LEN long where VLE_LEN is @@ -122,7 +126,7 @@ where } // In case len == VLE_LEN then all the bits have already been written in the latest iteration. // Else we haven't written all the necessary bytes yet. - if len != VLE_LEN { + if len != VLE_LEN_MAX { // SAFETY: buffer is guaranteed to be VLE_LEN long where VLE_LEN is // the maximum number of bytes a VLE can take once encoded. // I.e.: x is shifted 7 bits to the right every iteration, @@ -151,7 +155,7 @@ where let mut v = 0; let mut i = 0; // 7 * VLE_LEN is beyond the maximum number of shift bits - while (b & 0x80_u8) != 0 && i != 7 * (VLE_LEN - 1) { + while (b & 0x80_u8) != 0 && i != 7 * (VLE_LEN_MAX - 1) { v |= ((b & 0x7f_u8) as u64) << i; b = reader.read_u8()?; i += 7; diff --git a/commons/zenoh-protocol/src/transport/init.rs b/commons/zenoh-protocol/src/transport/init.rs index 1327288471..de517a353c 100644 --- a/commons/zenoh-protocol/src/transport/init.rs +++ b/commons/zenoh-protocol/src/transport/init.rs @@ -165,7 +165,7 @@ impl InitSyn { let whatami = WhatAmI::rand(); let zid = ZenohId::default(); let resolution = Resolution::rand(); - let batch_size: u16 = rng.gen(); + let batch_size: BatchSize = rng.gen(); let ext_qos = rng.gen_bool(0.5).then_some(ZExtUnit::rand()); let ext_shm = rng.gen_bool(0.5).then_some(ZExtZBuf::rand()); let ext_auth = rng.gen_bool(0.5).then_some(ZExtZBuf::rand()); @@ -221,7 +221,7 @@ impl InitAck { } else { Resolution::rand() }; - let batch_size: u16 = rng.gen(); + let batch_size: BatchSize = rng.gen(); let cookie = ZSlice::rand(64); let ext_qos = rng.gen_bool(0.5).then_some(ZExtUnit::rand()); let ext_shm = rng.gen_bool(0.5).then_some(ZExtZBuf::rand()); diff --git a/commons/zenoh-protocol/src/transport/join.rs b/commons/zenoh-protocol/src/transport/join.rs index c5fbb98430..a5cf1422a6 100644 --- a/commons/zenoh-protocol/src/transport/join.rs +++ b/commons/zenoh-protocol/src/transport/join.rs @@ -141,7 +141,7 @@ impl Join { let whatami = WhatAmI::rand(); let zid = ZenohId::default(); let resolution = Resolution::rand(); - let batch_size: u16 = rng.gen(); + let batch_size: BatchSize = rng.gen(); let lease = if rng.gen_bool(0.5) { Duration::from_secs(rng.gen()) } else { diff --git a/commons/zenoh-protocol/src/transport/mod.rs b/commons/zenoh-protocol/src/transport/mod.rs index 1ea6fca144..e92860f441 100644 --- a/commons/zenoh-protocol/src/transport/mod.rs +++ b/commons/zenoh-protocol/src/transport/mod.rs @@ -39,6 +39,7 @@ use crate::network::NetworkMessage; /// the boundary of the serialized messages. The length is encoded as little-endian. /// In any case, the length of a message must not exceed 65_535 bytes. pub type BatchSize = u16; +pub type AtomicBatchSize = core::sync::atomic::AtomicU16; pub mod batch_size { use super::BatchSize; diff --git a/io/zenoh-link-commons/src/lib.rs b/io/zenoh-link-commons/src/lib.rs index f9ad7166ee..138726fd4f 100644 --- a/io/zenoh-link-commons/src/lib.rs +++ b/io/zenoh-link-commons/src/lib.rs @@ -32,6 +32,7 @@ pub use multicast::*; use serde::Serialize; pub use unicast::*; use zenoh_protocol::core::Locator; +use zenoh_protocol::transport::BatchSize; use zenoh_result::ZResult; /*************************************/ @@ -45,7 +46,7 @@ pub struct Link { pub src: Locator, pub dst: Locator, pub group: Option, - pub mtu: u16, + pub mtu: BatchSize, pub is_reliable: bool, pub is_streamed: bool, pub interfaces: Vec, diff --git a/io/zenoh-link-commons/src/multicast.rs b/io/zenoh-link-commons/src/multicast.rs index 65bc7195b6..ccfe6842c1 100644 --- a/io/zenoh-link-commons/src/multicast.rs +++ b/io/zenoh-link-commons/src/multicast.rs @@ -22,7 +22,7 @@ use zenoh_buffers::{reader::HasReader, writer::HasWriter}; use zenoh_codec::{RCodec, WCodec, Zenoh080}; use zenoh_protocol::{ core::{EndPoint, Locator}, - transport::TransportMessage, + transport::{BatchSize, TransportMessage}, }; use zenoh_result::{zerror, ZResult}; @@ -44,7 +44,7 @@ pub struct LinkMulticast(pub Arc); #[async_trait] pub trait LinkMulticastTrait: Send + Sync { - fn get_mtu(&self) -> u16; + fn get_mtu(&self) -> BatchSize; fn get_src(&self) -> &Locator; fn get_dst(&self) -> &Locator; fn is_reliable(&self) -> bool; diff --git a/io/zenoh-link-commons/src/unicast.rs b/io/zenoh-link-commons/src/unicast.rs index fe87e70e94..c21f4a008c 100644 --- a/io/zenoh-link-commons/src/unicast.rs +++ b/io/zenoh-link-commons/src/unicast.rs @@ -19,7 +19,10 @@ use core::{ ops::Deref, }; use std::net::SocketAddr; -use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::{ + core::{EndPoint, Locator}, + transport::BatchSize, +}; use zenoh_result::ZResult; pub type LinkManagerUnicast = Arc; @@ -41,7 +44,7 @@ pub struct LinkUnicast(pub Arc); #[async_trait] pub trait LinkUnicastTrait: Send + Sync { - fn get_mtu(&self) -> u16; + fn get_mtu(&self) -> BatchSize; fn get_src(&self) -> &Locator; fn get_dst(&self) -> &Locator; fn is_reliable(&self) -> bool; diff --git a/io/zenoh-links/zenoh-link-quic/src/lib.rs b/io/zenoh-links/zenoh-link-quic/src/lib.rs index c6d7e16087..4bcabaf5b6 100644 --- a/io/zenoh-links/zenoh-link-quic/src/lib.rs +++ b/io/zenoh-links/zenoh-link-quic/src/lib.rs @@ -28,9 +28,12 @@ use std::net::SocketAddr; use zenoh_config::Config; use zenoh_core::zconfigurable; use zenoh_link_commons::{ConfigurationInspector, LocatorInspector}; -use zenoh_protocol::core::{ - endpoint::{Address, Parameters}, - Locator, +use zenoh_protocol::{ + core::{ + endpoint::{Address, Parameters}, + Locator, + }, + transport::BatchSize, }; use zenoh_result::{bail, zerror, ZResult}; @@ -47,7 +50,7 @@ pub const ALPN_QUIC_HTTP: &[&[u8]] = &[b"hq-29"]; // adopted in Zenoh and the usage of 16 bits in Zenoh to encode the // payload length in byte-streamed, the QUIC MTU is constrained to // 2^16 - 1 bytes (i.e., 65535). -const QUIC_MAX_MTU: u16 = u16::MAX; +const QUIC_MAX_MTU: BatchSize = BatchSize::MAX; pub const QUIC_LOCATOR_PREFIX: &str = "quic"; #[derive(Default, Clone, Copy, Debug)] @@ -137,7 +140,7 @@ impl ConfigurationInspector for QuicConfigurator { zconfigurable! { // Default MTU (QUIC PDU) in bytes. - static ref QUIC_DEFAULT_MTU: u16 = QUIC_MAX_MTU; + static ref QUIC_DEFAULT_MTU: BatchSize = QUIC_MAX_MTU; // The LINGER option causes the shutdown() call to block until (1) all application data is delivered // to the remote end or (2) a timeout expires. The timeout is expressed in seconds. // More info on the LINGER option and its dynamics can be found at: diff --git a/io/zenoh-links/zenoh-link-quic/src/unicast.rs b/io/zenoh-links/zenoh-link-quic/src/unicast.rs index 33953d666d..14a01861ca 100644 --- a/io/zenoh-links/zenoh-link-quic/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-quic/src/unicast.rs @@ -34,6 +34,7 @@ use zenoh_link_commons::{ ListenersUnicastIP, NewLinkChannelSender, }; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{bail, zerror, ZError, ZResult}; pub struct LinkUnicastQuic { @@ -135,7 +136,7 @@ impl LinkUnicastTrait for LinkUnicastQuic { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *QUIC_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-serial/src/lib.rs b/io/zenoh-links/zenoh-link-serial/src/lib.rs index fb4d7fcc12..f7b0b7afeb 100644 --- a/io/zenoh-links/zenoh-link-serial/src/lib.rs +++ b/io/zenoh-links/zenoh-link-serial/src/lib.rs @@ -25,10 +25,11 @@ pub use unicast::*; use zenoh_core::zconfigurable; use zenoh_link_commons::LocatorInspector; use zenoh_protocol::core::{endpoint::Address, EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::ZResult; // Maximum MTU (Serial PDU) in bytes. -const SERIAL_MAX_MTU: u16 = z_serial::MAX_MTU as u16; +const SERIAL_MAX_MTU: BatchSize = z_serial::MAX_MTU as BatchSize; const DEFAULT_BAUDRATE: u32 = 9_600; @@ -36,11 +37,11 @@ const DEFAULT_EXCLUSIVE: bool = true; pub const SERIAL_LOCATOR_PREFIX: &str = "serial"; -const SERIAL_MTU_LIMIT: u16 = SERIAL_MAX_MTU; +const SERIAL_MTU_LIMIT: BatchSize = SERIAL_MAX_MTU; zconfigurable! { // Default MTU (UDP PDU) in bytes. - static ref SERIAL_DEFAULT_MTU: u16 = SERIAL_MTU_LIMIT; + static ref SERIAL_DEFAULT_MTU: BatchSize = SERIAL_MTU_LIMIT; // Amount of time in microseconds to throttle the accept loop upon an error. // Default set to 100 ms. static ref SERIAL_ACCEPT_THROTTLE_TIME: u64 = 100_000; diff --git a/io/zenoh-links/zenoh-link-serial/src/unicast.rs b/io/zenoh-links/zenoh-link-serial/src/unicast.rs index 0efa40ee90..0a5bea3c18 100644 --- a/io/zenoh-links/zenoh-link-serial/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-serial/src/unicast.rs @@ -30,6 +30,7 @@ use zenoh_link_commons::{ NewLinkChannelSender, }; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{zerror, ZResult}; use z_serial::ZSerial; @@ -177,7 +178,7 @@ impl LinkUnicastTrait for LinkUnicastSerial { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *SERIAL_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-tcp/src/lib.rs b/io/zenoh-links/zenoh-link-tcp/src/lib.rs index 1a7d6ae705..0b075d9bf8 100644 --- a/io/zenoh-links/zenoh-link-tcp/src/lib.rs +++ b/io/zenoh-links/zenoh-link-tcp/src/lib.rs @@ -22,6 +22,7 @@ use std::net::SocketAddr; use zenoh_core::zconfigurable; use zenoh_link_commons::LocatorInspector; use zenoh_protocol::core::{endpoint::Address, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{zerror, ZResult}; mod unicast; @@ -33,7 +34,7 @@ pub use unicast::*; // adopted in Zenoh and the usage of 16 bits in Zenoh to encode the // payload length in byte-streamed, the TCP MTU is constrained to // 2^16 - 1 bytes (i.e., 65535). -const TCP_MAX_MTU: u16 = u16::MAX; +const TCP_MAX_MTU: BatchSize = BatchSize::MAX; pub const TCP_LOCATOR_PREFIX: &str = "tcp"; @@ -52,7 +53,7 @@ impl LocatorInspector for TcpLocatorInspector { zconfigurable! { // Default MTU (TCP PDU) in bytes. - static ref TCP_DEFAULT_MTU: u16 = TCP_MAX_MTU; + static ref TCP_DEFAULT_MTU: BatchSize = TCP_MAX_MTU; // The LINGER option causes the shutdown() call to block until (1) all application data is delivered // to the remote end or (2) a timeout expires. The timeout is expressed in seconds. // More info on the LINGER option and its dynamics can be found at: diff --git a/io/zenoh-links/zenoh-link-tcp/src/unicast.rs b/io/zenoh-links/zenoh-link-tcp/src/unicast.rs index 7137ac0212..aaadcf3c23 100644 --- a/io/zenoh-links/zenoh-link-tcp/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-tcp/src/unicast.rs @@ -25,6 +25,7 @@ use zenoh_link_commons::{ ListenersUnicastIP, NewLinkChannelSender, BIND_INTERFACE, }; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{bail, zerror, Error as ZError, ZResult}; use super::{ @@ -145,7 +146,7 @@ impl LinkUnicastTrait for LinkUnicastTcp { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *TCP_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-tls/src/lib.rs b/io/zenoh-links/zenoh-link-tls/src/lib.rs index 95d59104b4..7faebb4cd9 100644 --- a/io/zenoh-links/zenoh-link-tls/src/lib.rs +++ b/io/zenoh-links/zenoh-link-tls/src/lib.rs @@ -30,9 +30,12 @@ use std::{convert::TryFrom, net::SocketAddr}; use zenoh_config::Config; use zenoh_core::zconfigurable; use zenoh_link_commons::{ConfigurationInspector, LocatorInspector}; -use zenoh_protocol::core::{ - endpoint::{self, Address}, - Locator, +use zenoh_protocol::{ + core::{ + endpoint::{self, Address}, + Locator, + }, + transport::BatchSize, }; use zenoh_result::{bail, zerror, ZResult}; @@ -45,7 +48,7 @@ pub use unicast::*; // adopted in Zenoh and the usage of 16 bits in Zenoh to encode the // payload length in byte-streamed, the TLS MTU is constrained to // 2^16 - 1 bytes (i.e., 65535). -const TLS_MAX_MTU: u16 = u16::MAX; +const TLS_MAX_MTU: BatchSize = BatchSize::MAX; pub const TLS_LOCATOR_PREFIX: &str = "tls"; #[derive(Default, Clone, Copy)] @@ -172,7 +175,7 @@ impl ConfigurationInspector for TlsConfigurator { zconfigurable! { // Default MTU (TLS PDU) in bytes. - static ref TLS_DEFAULT_MTU: u16 = TLS_MAX_MTU; + static ref TLS_DEFAULT_MTU: BatchSize = TLS_MAX_MTU; // The LINGER option causes the shutdown() call to block until (1) all application data is delivered // to the remote end or (2) a timeout expires. The timeout is expressed in seconds. // More info on the LINGER option and its dynamics can be found at: diff --git a/io/zenoh-links/zenoh-link-tls/src/unicast.rs b/io/zenoh-links/zenoh-link-tls/src/unicast.rs index 7da711161e..a58e7372dd 100644 --- a/io/zenoh-links/zenoh-link-tls/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-tls/src/unicast.rs @@ -42,8 +42,8 @@ use zenoh_link_commons::{ get_ip_interface_names, LinkManagerUnicastTrait, LinkUnicast, LinkUnicastTrait, ListenersUnicastIP, NewLinkChannelSender, }; -use zenoh_protocol::core::endpoint::Config; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::{core::endpoint::Config, transport::BatchSize}; use zenoh_result::{bail, zerror, ZError, ZResult}; pub struct LinkUnicastTls { @@ -180,7 +180,7 @@ impl LinkUnicastTrait for LinkUnicastTls { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *TLS_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-udp/src/lib.rs b/io/zenoh-links/zenoh-link-udp/src/lib.rs index 91d02cc13d..86db845d8f 100644 --- a/io/zenoh-links/zenoh-link-udp/src/lib.rs +++ b/io/zenoh-links/zenoh-link-udp/src/lib.rs @@ -27,6 +27,7 @@ pub use unicast::*; use zenoh_core::zconfigurable; use zenoh_link_commons::LocatorInspector; use zenoh_protocol::core::{endpoint::Address, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{zerror, ZResult}; // NOTE: In case of using UDP in high-throughput scenarios, it is recommended to set the @@ -44,24 +45,24 @@ use zenoh_result::{zerror, ZResult}; // Although in IPv6 it is possible to have UDP datagrams of size greater than 65,535 bytes via // IPv6 Jumbograms, its usage in Zenoh is discouraged unless the consequences are very well // understood. -const UDP_MAX_MTU: u16 = 65_507; +const UDP_MAX_MTU: BatchSize = 65_507; pub const UDP_LOCATOR_PREFIX: &str = "udp"; #[cfg(any(target_os = "linux", target_os = "windows"))] // Linux default value of a maximum datagram size is set to UDP MAX MTU. -const UDP_MTU_LIMIT: u16 = UDP_MAX_MTU; +const UDP_MTU_LIMIT: BatchSize = UDP_MAX_MTU; #[cfg(target_os = "macos")] // Mac OS X default value of a maximum datagram size is set to 9216 bytes. -const UDP_MTU_LIMIT: u16 = 9_216; +const UDP_MTU_LIMIT: BatchSize = 9_216; #[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))] -const UDP_MTU_LIMIT: u16 = 8_192; +const UDP_MTU_LIMIT: BatchSize = 8_192; zconfigurable! { // Default MTU (UDP PDU) in bytes. - static ref UDP_DEFAULT_MTU: u16 = UDP_MTU_LIMIT; + static ref UDP_DEFAULT_MTU: BatchSize = UDP_MTU_LIMIT; // Amount of time in microseconds to throttle the accept loop upon an error. // Default set to 100 ms. static ref UDP_ACCEPT_THROTTLE_TIME: u64 = 100_000; diff --git a/io/zenoh-links/zenoh-link-udp/src/multicast.rs b/io/zenoh-links/zenoh-link-udp/src/multicast.rs index bc894bd296..a6e7977052 100644 --- a/io/zenoh-links/zenoh-link-udp/src/multicast.rs +++ b/io/zenoh-links/zenoh-link-udp/src/multicast.rs @@ -21,6 +21,7 @@ use std::{borrow::Cow, fmt}; use tokio::net::UdpSocket; use zenoh_link_commons::{LinkManagerMulticastTrait, LinkMulticast, LinkMulticastTrait}; use zenoh_protocol::core::{Config, EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{bail, zerror, Error as ZError, ZResult}; pub struct LinkMulticastUdp { @@ -119,7 +120,7 @@ impl LinkMulticastTrait for LinkMulticastUdp { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *UDP_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-udp/src/unicast.rs b/io/zenoh-links/zenoh-link-udp/src/unicast.rs index 1cd4a0b1ec..5021969bfa 100644 --- a/io/zenoh-links/zenoh-link-udp/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-udp/src/unicast.rs @@ -30,6 +30,7 @@ use zenoh_link_commons::{ LinkUnicastTrait, ListenersUnicastIP, NewLinkChannelSender, BIND_INTERFACE, }; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{bail, zerror, Error as ZError, ZResult}; use zenoh_sync::Mvar; @@ -200,7 +201,7 @@ impl LinkUnicastTrait for LinkUnicastUdp { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *UDP_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-unixpipe/src/unix/unicast.rs b/io/zenoh-links/zenoh-link-unixpipe/src/unix/unicast.rs index 0a0aebe730..3026d4e4b0 100644 --- a/io/zenoh-links/zenoh-link-unixpipe/src/unix/unicast.rs +++ b/io/zenoh-links/zenoh-link-unixpipe/src/unix/unicast.rs @@ -33,6 +33,7 @@ use tokio::io::Interest; use tokio_util::sync::CancellationToken; use zenoh_core::{zasyncread, zasyncwrite}; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_runtime::ZRuntime; use unix_named_pipe::{create, open_write}; @@ -45,7 +46,7 @@ use zenoh_result::{bail, ZResult}; use super::FILE_ACCESS_MASK; -const LINUX_PIPE_MAX_MTU: u16 = 65_535; +const LINUX_PIPE_MAX_MTU: BatchSize = BatchSize::MAX; const LINUX_PIPE_DEDICATE_TRIES: usize = 100; static PIPE_INVITATION: &[u8] = &[0xDE, 0xAD, 0xBE, 0xEF]; @@ -498,7 +499,7 @@ impl LinkUnicastTrait for UnicastPipe { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { LINUX_PIPE_MAX_MTU } diff --git a/io/zenoh-links/zenoh-link-unixsock_stream/src/lib.rs b/io/zenoh-links/zenoh-link-unixsock_stream/src/lib.rs index b6c180cd8d..ce067c1aa2 100644 --- a/io/zenoh-links/zenoh-link-unixsock_stream/src/lib.rs +++ b/io/zenoh-links/zenoh-link-unixsock_stream/src/lib.rs @@ -21,6 +21,7 @@ use async_trait::async_trait; use zenoh_core::zconfigurable; use zenoh_link_commons::LocatorInspector; use zenoh_protocol::core::{endpoint::Address, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::ZResult; #[cfg(target_family = "unix")] mod unicast; @@ -33,13 +34,13 @@ pub use unicast::*; // adopted in Zenoh and the usage of 16 bits in Zenoh to encode the // payload length in byte-streamed, the UNIXSOCKSTREAM MTU is constrained to // 2^16 - 1 bytes (i.e., 65535). -const UNIXSOCKSTREAM_MAX_MTU: u16 = u16::MAX; +const UNIXSOCKSTREAM_MAX_MTU: BatchSize = BatchSize::MAX; pub const UNIXSOCKSTREAM_LOCATOR_PREFIX: &str = "unixsock-stream"; zconfigurable! { // Default MTU (UNIXSOCKSTREAM PDU) in bytes. - static ref UNIXSOCKSTREAM_DEFAULT_MTU: u16 = UNIXSOCKSTREAM_MAX_MTU; + static ref UNIXSOCKSTREAM_DEFAULT_MTU: BatchSize = UNIXSOCKSTREAM_MAX_MTU; // Amount of time in microseconds to throttle the accept loop upon an error. // Default set to 100 ms. static ref UNIXSOCKSTREAM_ACCEPT_THROTTLE_TIME: u64 = 100_000; diff --git a/io/zenoh-links/zenoh-link-unixsock_stream/src/unicast.rs b/io/zenoh-links/zenoh-link-unixsock_stream/src/unicast.rs index 53441ab89c..a961c1aebb 100644 --- a/io/zenoh-links/zenoh-link-unixsock_stream/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-unixsock_stream/src/unicast.rs @@ -32,6 +32,7 @@ use zenoh_link_commons::{ LinkManagerUnicastTrait, LinkUnicast, LinkUnicastTrait, NewLinkChannelSender, }; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{zerror, ZResult}; use super::{get_unix_path_as_string, UNIXSOCKSTREAM_DEFAULT_MTU, UNIXSOCKSTREAM_LOCATOR_PREFIX}; @@ -119,7 +120,7 @@ impl LinkUnicastTrait for LinkUnicastUnixSocketStream { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *UNIXSOCKSTREAM_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-vsock/src/lib.rs b/io/zenoh-links/zenoh-link-vsock/src/lib.rs index 7834050796..d58250fed3 100644 --- a/io/zenoh-links/zenoh-link-vsock/src/lib.rs +++ b/io/zenoh-links/zenoh-link-vsock/src/lib.rs @@ -22,7 +22,7 @@ use async_trait::async_trait; use zenoh_core::zconfigurable; use zenoh_link_commons::LocatorInspector; -use zenoh_protocol::core::Locator; +use zenoh_protocol::{core::Locator, transport::BatchSize}; use zenoh_result::ZResult; #[cfg(target_os = "linux")] @@ -47,7 +47,7 @@ impl LocatorInspector for VsockLocatorInspector { zconfigurable! { // Default MTU in bytes. - static ref VSOCK_DEFAULT_MTU: u16 = u16::MAX; + static ref VSOCK_DEFAULT_MTU: BatchSize = BatchSize::MAX; // Amount of time in microseconds to throttle the accept loop upon an error. // Default set to 100 ms. static ref VSOCK_ACCEPT_THROTTLE_TIME: u64 = 100_000; diff --git a/io/zenoh-links/zenoh-link-vsock/src/unicast.rs b/io/zenoh-links/zenoh-link-vsock/src/unicast.rs index ced7b9dc15..59efa6f0e3 100644 --- a/io/zenoh-links/zenoh-link-vsock/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-vsock/src/unicast.rs @@ -27,8 +27,10 @@ use zenoh_core::{zasyncread, zasyncwrite}; use zenoh_link_commons::{ LinkManagerUnicastTrait, LinkUnicast, LinkUnicastTrait, NewLinkChannelSender, }; -use zenoh_protocol::core::endpoint::Address; -use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::{ + core::{endpoint::Address, EndPoint, Locator}, + transport::BatchSize, +}; use zenoh_result::{bail, zerror, ZResult}; use super::{VSOCK_ACCEPT_THROTTLE_TIME, VSOCK_DEFAULT_MTU, VSOCK_LOCATOR_PREFIX}; @@ -170,7 +172,7 @@ impl LinkUnicastTrait for LinkUnicastVsock { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *VSOCK_DEFAULT_MTU } diff --git a/io/zenoh-links/zenoh-link-ws/src/lib.rs b/io/zenoh-links/zenoh-link-ws/src/lib.rs index f68a20d15d..d165b480a9 100644 --- a/io/zenoh-links/zenoh-link-ws/src/lib.rs +++ b/io/zenoh-links/zenoh-link-ws/src/lib.rs @@ -23,6 +23,7 @@ use url::Url; use zenoh_core::zconfigurable; use zenoh_link_commons::LocatorInspector; use zenoh_protocol::core::{endpoint::Address, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{bail, ZResult}; mod unicast; pub use unicast::*; @@ -33,7 +34,7 @@ pub use unicast::*; // adopted in Zenoh and the usage of 16 bits in Zenoh to encode the // payload length in byte-streamed, the TCP MTU is constrained to // 2^16 - 1 bytes (i.e., 65535). -const WS_MAX_MTU: u16 = u16::MAX; +const WS_MAX_MTU: BatchSize = BatchSize::MAX; pub const WS_LOCATOR_PREFIX: &str = "ws"; @@ -51,7 +52,7 @@ impl LocatorInspector for WsLocatorInspector { zconfigurable! { // Default MTU (TCP PDU) in bytes. - static ref WS_DEFAULT_MTU: u16 = WS_MAX_MTU; + static ref WS_DEFAULT_MTU: BatchSize = WS_MAX_MTU; // Amount of time in microseconds to throttle the accept loop upon an error. // Default set to 100 ms. static ref TCP_ACCEPT_THROTTLE_TIME: u64 = 100_000; diff --git a/io/zenoh-links/zenoh-link-ws/src/unicast.rs b/io/zenoh-links/zenoh-link-ws/src/unicast.rs index 6a0cf64e6e..acf568f78c 100644 --- a/io/zenoh-links/zenoh-link-ws/src/unicast.rs +++ b/io/zenoh-links/zenoh-link-ws/src/unicast.rs @@ -34,6 +34,7 @@ use zenoh_link_commons::{ LinkManagerUnicastTrait, LinkUnicast, LinkUnicastTrait, NewLinkChannelSender, }; use zenoh_protocol::core::{EndPoint, Locator}; +use zenoh_protocol::transport::BatchSize; use zenoh_result::{bail, zerror, ZResult}; use super::{get_ws_addr, get_ws_url, TCP_ACCEPT_THROTTLE_TIME, WS_DEFAULT_MTU, WS_LOCATOR_PREFIX}; @@ -200,7 +201,7 @@ impl LinkUnicastTrait for LinkUnicastWs { } #[inline(always)] - fn get_mtu(&self) -> u16 { + fn get_mtu(&self) -> BatchSize { *WS_DEFAULT_MTU } diff --git a/io/zenoh-transport/src/common/pipeline.rs b/io/zenoh-transport/src/common/pipeline.rs index fb95d709db..b74fa2990c 100644 --- a/io/zenoh-transport/src/common/pipeline.rs +++ b/io/zenoh-transport/src/common/pipeline.rs @@ -22,7 +22,7 @@ use ringbuffer_spsc::{RingBuffer, RingBufferReader, RingBufferWriter}; use std::sync::{Arc, Mutex, MutexGuard}; use std::time::Duration; use std::{ - sync::atomic::{AtomicBool, AtomicU16, Ordering}, + sync::atomic::{AtomicBool, Ordering}, time::Instant, }; use zenoh_buffers::{ @@ -40,7 +40,7 @@ use zenoh_protocol::{ transport::{ fragment::FragmentHeader, frame::{self, FrameHeader}, - BatchSize, TransportMessage, + AtomicBatchSize, BatchSize, TransportMessage, }, }; @@ -75,7 +75,7 @@ impl StageInRefill { struct StageInOut { n_out_w: Sender<()>, s_out_w: RingBufferWriter, - bytes: Arc, + bytes: Arc, backoff: Arc, } @@ -355,12 +355,12 @@ enum Pull { struct Backoff { retry_time: NanoSeconds, last_bytes: BatchSize, - bytes: Arc, + bytes: Arc, backoff: Arc, } impl Backoff { - fn new(bytes: Arc, backoff: Arc) -> Self { + fn new(bytes: Arc, backoff: Arc) -> Self { Self { retry_time: 0, last_bytes: 0, @@ -552,7 +552,7 @@ impl TransmissionPipeline { // This is a SPSC ring buffer let (s_out_w, s_out_r) = RingBuffer::::init(); let current = Arc::new(Mutex::new(None)); - let bytes = Arc::new(AtomicU16::new(0)); + let bytes = Arc::new(AtomicBatchSize::new(0)); let backoff = Arc::new(AtomicBool::new(false)); stage_in.push(Mutex::new(StageIn { diff --git a/io/zenoh-transport/src/manager.rs b/io/zenoh-transport/src/manager.rs index f16a68cfba..2d7961ed2b 100644 --- a/io/zenoh-transport/src/manager.rs +++ b/io/zenoh-transport/src/manager.rs @@ -93,7 +93,7 @@ pub struct TransportManagerConfig { pub zid: ZenohId, pub whatami: WhatAmI, pub resolution: Resolution, - pub batch_size: u16, + pub batch_size: BatchSize, pub wait_before_drop: Duration, pub queue_size: [usize; Priority::NUM], pub queue_backoff: Duration, @@ -122,7 +122,7 @@ pub struct TransportManagerBuilder { zid: ZenohId, whatami: WhatAmI, resolution: Resolution, - batch_size: u16, + batch_size: BatchSize, wait_before_drop: Duration, queue_size: QueueSizeConf, queue_backoff: Duration, @@ -151,7 +151,7 @@ impl TransportManagerBuilder { self } - pub fn batch_size(mut self, batch_size: u16) -> Self { + pub fn batch_size(mut self, batch_size: BatchSize) -> Self { self.batch_size = batch_size; self } diff --git a/io/zenoh-transport/src/unicast/establishment/cookie.rs b/io/zenoh-transport/src/unicast/establishment/cookie.rs index 0db9e1c93a..6f0295601c 100644 --- a/io/zenoh-transport/src/unicast/establishment/cookie.rs +++ b/io/zenoh-transport/src/unicast/establishment/cookie.rs @@ -19,14 +19,17 @@ use zenoh_buffers::{ }; use zenoh_codec::{RCodec, WCodec, Zenoh080}; use zenoh_crypto::{BlockCipher, PseudoRng}; -use zenoh_protocol::core::{Resolution, WhatAmI, ZenohId}; +use zenoh_protocol::{ + core::{Resolution, WhatAmI, ZenohId}, + transport::BatchSize, +}; #[derive(Debug, PartialEq)] pub(crate) struct Cookie { pub(crate) zid: ZenohId, pub(crate) whatami: WhatAmI, pub(crate) resolution: Resolution, - pub(crate) batch_size: u16, + pub(crate) batch_size: BatchSize, pub(crate) nonce: u64, // Extensions pub(crate) ext_qos: ext::qos::StateAccept, @@ -82,7 +85,7 @@ where let whatami = WhatAmI::try_from(wai).map_err(|_| DidntRead)?; let resolution: u8 = self.read(&mut *reader)?; let resolution = Resolution::from(resolution); - let batch_size: u16 = self.read(&mut *reader)?; + let batch_size: BatchSize = self.read(&mut *reader)?; let nonce: u64 = self.read(&mut *reader)?; // Extensions let ext_qos: ext::qos::StateAccept = self.read(&mut *reader)?; From 312c03a2a79e0d8a06904008331148efd2a5475a Mon Sep 17 00:00:00 2001 From: DenisBiryukov91 <155981813+DenisBiryukov91@users.noreply.github.com> Date: Fri, 29 Mar 2024 16:57:52 +0100 Subject: [PATCH 083/124] Query.reply and reply_del, now accept TryIntoKeyExpr instead of IntoKeyExpr (#878) --- zenoh/src/queryable.rs | 41 ++++++++++++++++++++++------------------- zenoh/tests/routing.rs | 2 +- zenoh/tests/session.rs | 5 +---- 3 files changed, 24 insertions(+), 24 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 599c0e13be..58589bfe8f 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -111,7 +111,7 @@ impl Query { #[inline(always)] #[cfg(feature = "unstable")] #[doc(hidden)] - pub fn reply_sample(&self, sample: Sample) -> ReplyBuilder<'_> { + pub fn reply_sample(&self, sample: Sample) -> ReplyBuilder<'_, 'static> { let Sample { key_expr, payload, @@ -126,7 +126,7 @@ impl Query { } = sample; ReplyBuilder { query: self, - key_expr, + key_expr: Ok(key_expr), payload, kind, encoding, @@ -145,18 +145,19 @@ impl Query { /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), /// replying on a disjoint key expression will result in an error when resolving the reply. #[inline(always)] - pub fn reply( + pub fn reply<'b, TryIntoKeyExpr, IntoPayload>( &self, - key_expr: IntoKeyExpr, + key_expr: TryIntoKeyExpr, payload: IntoPayload, - ) -> ReplyBuilder<'_> + ) -> ReplyBuilder<'_, 'b> where - IntoKeyExpr: Into>, + TryIntoKeyExpr: TryInto>, + >>::Error: Into, IntoPayload: Into, { ReplyBuilder { query: self, - key_expr: key_expr.into(), + key_expr: key_expr.try_into().map_err(Into::into), payload: payload.into(), kind: SampleKind::Put, timestamp: None, @@ -187,13 +188,14 @@ impl Query { /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), /// replying on a disjoint key expression will result in an error when resolving the reply. #[inline(always)] - pub fn reply_del(&self, key_expr: IntoKeyExpr) -> ReplyBuilder<'_> + pub fn reply_del<'b, TryIntoKeyExpr>(&self, key_expr: TryIntoKeyExpr) -> ReplyBuilder<'_, 'b> where - IntoKeyExpr: Into>, + TryIntoKeyExpr: TryInto>, + >>::Error: Into, { ReplyBuilder { query: self, - key_expr: key_expr.into(), + key_expr: key_expr.try_into().map_err(Into::into), payload: Payload::empty(), kind: SampleKind::Delete, timestamp: None, @@ -248,9 +250,9 @@ impl fmt::Display for Query { /// A builder returned by [`Query::reply()`](Query::reply) or [`Query::reply()`](Query::reply). #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] -pub struct ReplyBuilder<'a> { +pub struct ReplyBuilder<'a, 'b> { query: &'a Query, - key_expr: KeyExpr<'static>, + key_expr: ZResult>, payload: Payload, kind: SampleKind, encoding: Encoding, @@ -270,7 +272,7 @@ pub struct ReplyErrBuilder<'a> { value: Value, } -impl<'a> ReplyBuilder<'a> { +impl<'a, 'b> ReplyBuilder<'a, 'b> { #[zenoh_macros::unstable] pub fn with_attachment(mut self, attachment: Attachment) -> Self { self.attachment = Some(attachment); @@ -292,16 +294,17 @@ impl<'a> ReplyBuilder<'a> { } } -impl<'a> Resolvable for ReplyBuilder<'a> { +impl<'a, 'b> Resolvable for ReplyBuilder<'a, 'b> { type To = ZResult<()>; } -impl SyncResolve for ReplyBuilder<'_> { +impl<'a, 'b> SyncResolve for ReplyBuilder<'a, 'b> { fn res_sync(self) -> ::To { + let key_expr = self.key_expr?; if !self.query._accepts_any_replies().unwrap_or(false) - && !self.query.key_expr().intersects(&self.key_expr) + && !self.query.key_expr().intersects(&key_expr) { - bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", self.key_expr, self.query.key_expr()) + bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", &key_expr, self.query.key_expr()) } #[allow(unused_mut)] // will be unused if feature = "unstable" is not enabled let mut ext_sinfo = None; @@ -318,7 +321,7 @@ impl SyncResolve for ReplyBuilder<'_> { rid: self.query.inner.qid, wire_expr: WireExpr { scope: 0, - suffix: std::borrow::Cow::Owned(self.key_expr.into()), + suffix: std::borrow::Cow::Owned(key_expr.into()), mapping: Mapping::Sender, }, payload: ResponseBody::Reply(zenoh::Reply { @@ -360,7 +363,7 @@ impl SyncResolve for ReplyBuilder<'_> { } } -impl<'a> AsyncResolve for ReplyBuilder<'a> { +impl<'a, 'b> AsyncResolve for ReplyBuilder<'a, 'b> { type Future = Ready; fn res_async(self) -> Self::Future { diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index c34d06690a..b90f0f568f 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -137,7 +137,7 @@ impl Task { tokio::select! { _ = token.cancelled() => break, query = queryable.recv_async() => { - query?.reply(KeyExpr::try_from(ke.to_owned())?, payload.clone()).res_async().await?; + query?.reply(ke.to_owned(), payload.clone()).res_async().await?; }, } } diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index 5e86499bc7..8c2d2e9937 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -164,10 +164,7 @@ async fn test_session_qryrep(peer01: &Session, peer02: &Session, reliability: Re "ok_del" => { tokio::task::block_in_place(|| { tokio::runtime::Handle::current().block_on(async { - ztimeout!(query - .reply_del(KeyExpr::try_from(key_expr).unwrap()) - .res_async()) - .unwrap() + ztimeout!(query.reply_del(key_expr).res_async()).unwrap() }) }); } From 43a49379c0f126032f89505789d158b908c62ad6 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 29 Mar 2024 18:18:28 +0100 Subject: [PATCH 084/124] SampleBuilder uses generics --- Cargo.lock | 50 ++--- zenoh/src/queryable.rs | 97 ++-------- zenoh/src/sample/builder.rs | 363 +++++++++++++++--------------------- 3 files changed, 189 insertions(+), 321 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d3ea8978b5..9dff82ad80 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -165,9 +165,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.12" +version = "0.6.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96b09b5178381e0874812a9b157f7fe84982617e48f71f4e3235482775e5b540" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" dependencies = [ "anstyle", "anstyle-parse", @@ -1103,9 +1103,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c012a26a7f605efc424dd53697843a72be7dc86ad2d01f7814337794a12231d" +checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" dependencies = [ "anstream", "anstyle", @@ -1122,9 +1122,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" -version = "0.3.31" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c138974f9d5e7fe373eb04df7cae98833802ae4b11c24ac7039a21d5af4b26c" +checksum = "2b73807008a3c7f171cc40312f37d95ef0396e048b5848d775f54b1a4dd4a0d3" dependencies = [ "serde", ] @@ -1541,9 +1541,9 @@ dependencies = [ [[package]] name = "http" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", @@ -1854,9 +1854,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" dependencies = [ "serde", "value-bag", @@ -2865,9 +2865,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" +checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" dependencies = [ "log", "ring 0.17.6", @@ -2923,9 +2923,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.3.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8" +checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" [[package]] name = "rustls-webpki" @@ -3701,9 +3701,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.36.0" +version = "1.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" dependencies = [ "backtrace", "bytes", @@ -3743,7 +3743,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" dependencies = [ - "rustls 0.22.2", + "rustls 0.22.3", "rustls-pki-types", "tokio", ] @@ -4030,9 +4030,9 @@ dependencies = [ [[package]] name = "value-bag" -version = "1.4.1" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d92ccd67fb88503048c01b59152a04effd0782d035a83a6d256ce6085f08f4a3" +checksum = "74797339c3b98616c009c7c3eb53a0ce41e85c8ec66bd3db96ed132d20cfdee8" dependencies = [ "value-bag-serde1", "value-bag-sval2", @@ -4040,9 +4040,9 @@ dependencies = [ [[package]] name = "value-bag-serde1" -version = "1.4.1" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0b9f3feef403a50d4d67e9741a6d8fc688bcbb4e4f31bd4aab72cc690284394" +checksum = "cc35703541cbccb5278ef7b589d79439fc808ff0b5867195a3230f9a47421d39" dependencies = [ "erased-serde", "serde", @@ -4051,9 +4051,9 @@ dependencies = [ [[package]] name = "value-bag-sval2" -version = "1.4.1" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b24f4146b6f3361e91cbf527d1fb35e9376c3c0cef72ca5ec5af6d640fad7d" +checksum = "285b43c29d0b4c0e65aad24561baee67a1b69dc9be9375d4a85138cbf556f7f8" dependencies = [ "sval", "sval_buffer", @@ -4676,7 +4676,7 @@ dependencies = [ "flume", "futures", "log", - "rustls 0.22.2", + "rustls 0.22.3", "rustls-webpki 0.102.2", "serde", "tokio", @@ -4763,7 +4763,7 @@ dependencies = [ "base64 0.21.4", "futures", "log", - "rustls 0.22.2", + "rustls 0.22.3", "rustls-pemfile 2.0.0", "rustls-pki-types", "rustls-webpki 0.102.2", diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 5df0d73d44..0e977f3def 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -19,8 +19,8 @@ use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; use crate::sample::builder::{ - DeleteSampleBuilder, PutSampleBuilder, QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, - TimestampBuilderTrait, ValueBuilderTrait, + OpDelete, OpPut, QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, TimestampBuilderTrait, + ValueBuilderTrait, }; use crate::sample::SourceInfo; use crate::Id; @@ -115,10 +115,10 @@ impl Query { #[inline(always)] #[cfg(feature = "unstable")] #[doc(hidden)] - pub fn reply_sample(&self, sample: Sample) -> ReplySampleBuilder<'_> { - ReplySampleBuilder { + pub fn reply_sample(&self, sample: Sample) -> ReplySample<'_> { + ReplySample { query: self, - sample_builder: sample.into(), + sample, } } @@ -168,7 +168,7 @@ impl Query { IntoKeyExpr: Into>, { let sample_builder = - DeleteSampleBuilder::new(key_expr).with_qos(response::ext::QoSType::RESPONSE.into()); + SampleBuilder::delete(key_expr).qos(response::ext::QoSType::RESPONSE.into()); ReplyDelBuilder { query: self, sample_builder, @@ -214,91 +214,22 @@ impl fmt::Display for Query { } } -pub struct ReplySampleBuilder<'a> { +pub struct ReplySample<'a> { query: &'a Query, - sample_builder: SampleBuilder, + sample: Sample, } -impl<'a> ReplySampleBuilder<'a> { - pub fn put(self, payload: IntoPayload) -> ReplyBuilder<'a> - where - IntoPayload: Into, - { - let builder = ReplyBuilder { - query: self.query, - sample_builder: self.sample_builder.into(), - }; - builder.payload(payload) - } - pub fn delete(self) -> ReplyDelBuilder<'a> { - ReplyDelBuilder { - query: self.query, - sample_builder: self.sample_builder.into(), - } - } -} - -impl TimestampBuilderTrait for ReplySampleBuilder<'_> { - fn timestamp>>(self, timestamp: T) -> Self { - Self { - sample_builder: self.sample_builder.timestamp(timestamp), - ..self - } - } -} - -impl SampleBuilderTrait for ReplySampleBuilder<'_> { - #[cfg(feature = "unstable")] - fn source_info(self, source_info: SourceInfo) -> Self { - Self { - sample_builder: self.sample_builder.source_info(source_info), - ..self - } - } - - #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: T) -> Self { - Self { - sample_builder: self.sample_builder.attachment(attachment), - ..self - } - } -} - -impl QoSBuilderTrait for ReplySampleBuilder<'_> { - fn congestion_control(self, congestion_control: CongestionControl) -> Self { - Self { - sample_builder: self.sample_builder.congestion_control(congestion_control), - ..self - } - } - - fn priority(self, priority: Priority) -> Self { - Self { - sample_builder: self.sample_builder.priority(priority), - ..self - } - } - - fn express(self, is_express: bool) -> Self { - Self { - sample_builder: self.sample_builder.express(is_express), - ..self - } - } -} - -impl Resolvable for ReplySampleBuilder<'_> { +impl Resolvable for ReplySample<'_> { type To = ZResult<()>; } -impl SyncResolve for ReplySampleBuilder<'_> { +impl SyncResolve for ReplySample<'_> { fn res_sync(self) -> ::To { - self.query._reply_sample(self.sample_builder.into()) + self.query._reply_sample(self.sample) } } -impl AsyncResolve for ReplySampleBuilder<'_> { +impl AsyncResolve for ReplySample<'_> { type Future = Ready; fn res_async(self) -> Self::Future { @@ -311,7 +242,7 @@ impl AsyncResolve for ReplySampleBuilder<'_> { #[derive(Debug)] pub struct ReplyBuilder<'a> { query: &'a Query, - sample_builder: PutSampleBuilder, + sample_builder: SampleBuilder, } impl TimestampBuilderTrait for ReplyBuilder<'_> { @@ -392,7 +323,7 @@ impl ValueBuilderTrait for ReplyBuilder<'_> { #[derive(Debug)] pub struct ReplyDelBuilder<'a> { query: &'a Query, - sample_builder: DeleteSampleBuilder, + sample_builder: SampleBuilder, } impl TimestampBuilderTrait for ReplyDelBuilder<'_> { diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index 920bd2b7b7..cae58514ff 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -12,6 +12,8 @@ // ZettaScale Zenoh Team, // +use std::marker::PhantomData; + use crate::sample::Attachment; use crate::sample::QoS; use crate::sample::QoSBuilder; @@ -63,290 +65,225 @@ pub trait ValueBuilderTrait { } #[derive(Debug)] -pub struct SampleBuilder(Sample); +pub struct OpPut; +#[derive(Debug)] +pub struct OpDelete; +#[derive(Debug)] +pub struct OpAny; -impl SampleBuilder { +#[derive(Debug)] +pub struct SampleBuilder { + sample: Sample, + _t: PhantomData, +} + +impl SampleBuilder { pub fn put( key_expr: IntoKeyExpr, payload: IntoPayload, - ) -> PutSampleBuilder + ) -> SampleBuilder where IntoKeyExpr: Into>, IntoPayload: Into, { - PutSampleBuilder::new(key_expr, payload) + Self { + sample: Sample { + key_expr: key_expr.into(), + payload: payload.into(), + kind: SampleKind::Put, + encoding: Encoding::default(), + timestamp: None, + qos: QoS::default(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + }, + _t: PhantomData::, + } } - pub fn delete(key_expr: IntoKeyExpr) -> DeleteSampleBuilder +} + +impl SampleBuilder { + pub fn delete(key_expr: IntoKeyExpr) -> SampleBuilder where IntoKeyExpr: Into>, { - DeleteSampleBuilder::new(key_expr) + Self { + sample: Sample { + key_expr: key_expr.into(), + payload: Payload::empty(), + kind: SampleKind::Delete, + encoding: Encoding::default(), + timestamp: None, + qos: QoS::default(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment: None, + }, + _t: PhantomData::, + } } +} + +impl SampleBuilder { /// Allows to change keyexpr of [`Sample`] pub fn keyexpr(self, key_expr: IntoKeyExpr) -> Self where IntoKeyExpr: Into>, { - Self(Sample { - key_expr: key_expr.into(), - ..self.0 - }) + Self { + sample: Sample { + key_expr: key_expr.into(), + ..self.sample + }, + _t: PhantomData::, + } } } -impl TimestampBuilderTrait for SampleBuilder { - fn timestamp>>(self, timestamp: T) -> Self { - Self(Sample { - timestamp: timestamp.into(), - ..self.0 - }) +impl TimestampBuilderTrait for SampleBuilder { + fn timestamp>>(self, timestamp: U) -> Self { + Self { + sample: Sample { + timestamp: timestamp.into(), + ..self.sample + }, + _t: PhantomData::, + } } } -impl SampleBuilderTrait for SampleBuilder { +impl SampleBuilderTrait for SampleBuilder { #[zenoh_macros::unstable] fn source_info(self, source_info: SourceInfo) -> Self { - Self(Sample { - source_info, - ..self.0 - }) + Self { + sample: Sample { + source_info, + ..self.sample + }, + _t: PhantomData::, + } } #[zenoh_macros::unstable] - fn attachment>>(self, attachment: T) -> Self { - Self(Sample { - attachment: attachment.into(), - ..self.0 - }) - } -} - -impl QoSBuilderTrait for SampleBuilder { - fn congestion_control(self, congestion_control: CongestionControl) -> Self { - let qos: QoSBuilder = self.0.qos.into(); - let qos = qos.congestion_control(congestion_control).into(); - Self(Sample { qos, ..self.0 }) - } - fn priority(self, priority: Priority) -> Self { - let qos: QoSBuilder = self.0.qos.into(); - let qos = qos.priority(priority).into(); - Self(Sample { qos, ..self.0 }) - } - fn express(self, is_express: bool) -> Self { - let qos: QoSBuilder = self.0.qos.into(); - let qos = qos.express(is_express).into(); - Self(Sample { qos, ..self.0 }) - } -} - -#[derive(Debug)] -pub struct PutSampleBuilder(SampleBuilder); - -impl From for PutSampleBuilder { - fn from(sample_builder: SampleBuilder) -> Self { - Self(SampleBuilder(Sample { - kind: SampleKind::Put, - ..sample_builder.0 - })) - } -} - -impl PutSampleBuilder { - fn new(key_expr: IntoKeyExpr, payload: IntoPayload) -> Self - where - IntoKeyExpr: Into>, - IntoPayload: Into, - { - Self(SampleBuilder::from(Sample { - key_expr: key_expr.into(), - payload: payload.into(), - kind: SampleKind::Put, - encoding: Encoding::default(), - timestamp: None, - qos: QoS::default(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, - })) - } - /// Allows to change keyexpr of [`Sample`] - pub fn keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self(self.0.keyexpr(key_expr)) - } - // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. - pub(crate) fn qos(self, qos: QoS) -> Self { - Self(SampleBuilder(Sample { qos, ..self.0 .0 })) - } -} - -impl TimestampBuilderTrait for PutSampleBuilder { - fn timestamp>>(self, timestamp: T) -> Self { - Self(self.0.timestamp(timestamp)) + fn attachment>>(self, attachment: U) -> Self { + Self { + sample: Sample { + attachment: attachment.into(), + ..self.sample + }, + _t: PhantomData::, + } } } -impl SampleBuilderTrait for PutSampleBuilder { - #[zenoh_macros::unstable] - fn source_info(self, source_info: SourceInfo) -> Self { - Self(self.0.source_info(source_info)) - } - #[zenoh_macros::unstable] - fn attachment>>(self, attachment: T) -> Self { - Self(self.0.attachment(attachment)) +impl SampleBuilder { + pub fn qos(self, qos: QoS) -> Self { + Self { + sample: Sample { qos, ..self.sample }, + _t: PhantomData::, + } } } -impl QoSBuilderTrait for PutSampleBuilder { +impl QoSBuilderTrait for SampleBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { - Self(self.0.congestion_control(congestion_control)) + let qos: QoSBuilder = self.sample.qos.into(); + let qos = qos.congestion_control(congestion_control).into(); + Self { + sample: Sample { qos, ..self.sample }, + _t: PhantomData::, + } } fn priority(self, priority: Priority) -> Self { - Self(self.0.priority(priority)) + let qos: QoSBuilder = self.sample.qos.into(); + let qos = qos.priority(priority).into(); + Self { + sample: Sample { qos, ..self.sample }, + _t: PhantomData::, + } } fn express(self, is_express: bool) -> Self { - Self(self.0.express(is_express)) + let qos: QoSBuilder = self.sample.qos.into(); + let qos = qos.express(is_express).into(); + Self { + sample: Sample { qos, ..self.sample }, + _t: PhantomData::, + } } } -impl ValueBuilderTrait for PutSampleBuilder { +impl ValueBuilderTrait for SampleBuilder { fn encoding>(self, encoding: T) -> Self { - Self(SampleBuilder(Sample { - encoding: encoding.into(), - ..self.0 .0 - })) + Self { + sample: Sample { + encoding: encoding.into(), + ..self.sample + }, + _t: PhantomData::, + } } fn payload>(self, payload: T) -> Self { - Self(SampleBuilder(Sample { - payload: payload.into(), - ..self.0 .0 - })) + Self { + sample: Sample { + payload: payload.into(), + ..self.sample + }, + _t: PhantomData::, + } } fn value>(self, value: T) -> Self { let Value { payload, encoding } = value.into(); - Self(SampleBuilder(Sample { - payload, - encoding, - ..self.0 .0 - })) - } -} - -#[derive(Debug)] -pub struct DeleteSampleBuilder(SampleBuilder); - -impl From for DeleteSampleBuilder { - fn from(sample_builder: SampleBuilder) -> Self { - Self(SampleBuilder(Sample { - kind: SampleKind::Delete, - ..sample_builder.0 - })) - } -} - -impl DeleteSampleBuilder { - pub fn new(key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self(SampleBuilder::from(Sample { - key_expr: key_expr.into(), - payload: Payload::empty(), - kind: SampleKind::Delete, - encoding: Encoding::default(), - timestamp: None, - qos: QoS::default(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment: None, - })) - } - /// Allows to change keyexpr of [`Sample`] - pub fn with_keyexpr(self, key_expr: IntoKeyExpr) -> Self - where - IntoKeyExpr: Into>, - { - Self(self.0.keyexpr(key_expr)) - } - // It's convenient to set QoS as a whole for internal usage. For user API there are `congestion_control`, `priority` and `express` methods. - pub(crate) fn with_qos(self, qos: QoS) -> Self { - Self(SampleBuilder(Sample { qos, ..self.0 .0 })) - } -} - -impl TimestampBuilderTrait for DeleteSampleBuilder { - fn timestamp>>(self, timestamp: T) -> Self { - Self(self.0.timestamp(timestamp)) - } -} - -impl SampleBuilderTrait for DeleteSampleBuilder { - #[zenoh_macros::unstable] - fn source_info(self, source_info: SourceInfo) -> Self { - Self(self.0.source_info(source_info)) - } - #[zenoh_macros::unstable] - fn attachment>>(self, attachment: T) -> Self { - Self(self.0.attachment(attachment)) - } -} - -impl QoSBuilderTrait for DeleteSampleBuilder { - fn congestion_control(self, congestion_control: CongestionControl) -> Self { - Self(self.0.congestion_control(congestion_control)) - } - fn priority(self, priority: Priority) -> Self { - Self(self.0.priority(priority)) - } - fn express(self, is_express: bool) -> Self { - Self(self.0.express(is_express)) + Self { + sample: Sample { + payload, + encoding, + ..self.sample + }, + _t: PhantomData::, + } } } -impl From for SampleBuilder { +impl From for SampleBuilder { fn from(sample: Sample) -> Self { - SampleBuilder(sample) + SampleBuilder { + sample, + _t: PhantomData::, + } } } -impl TryFrom for PutSampleBuilder { +impl TryFrom for SampleBuilder { type Error = zresult::Error; fn try_from(sample: Sample) -> Result { if sample.kind != SampleKind::Put { bail!("Sample is not a put sample") } - Ok(Self(SampleBuilder(sample))) + Ok(SampleBuilder { + sample, + _t: PhantomData::, + }) } } -impl TryFrom for DeleteSampleBuilder { +impl TryFrom for SampleBuilder { type Error = zresult::Error; fn try_from(sample: Sample) -> Result { if sample.kind != SampleKind::Delete { bail!("Sample is not a delete sample") } - Ok(Self(SampleBuilder(sample))) - } -} - -impl From for Sample { - fn from(sample_builder: SampleBuilder) -> Self { - sample_builder.0 - } -} - -impl From for Sample { - fn from(put_sample_builder: PutSampleBuilder) -> Self { - put_sample_builder.0 .0 + Ok(SampleBuilder { + sample, + _t: PhantomData::, + }) } } -impl From for Sample { - fn from(delete_sample_builder: DeleteSampleBuilder) -> Self { - delete_sample_builder.0 .0 +impl From> for Sample { + fn from(sample_builder: SampleBuilder) -> Self { + sample_builder.sample } } From 6c305a130043a66ee58f3985eb4f71eb708ff5dc Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 29 Mar 2024 18:35:40 +0100 Subject: [PATCH 085/124] Improve Query builders with generics --- zenoh/src/queryable.rs | 178 +++++++++++++----------------------- zenoh/src/sample/builder.rs | 65 ++++++------- 2 files changed, 99 insertions(+), 144 deletions(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 0e977f3def..fea148e6e6 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -19,7 +19,7 @@ use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; use crate::sample::builder::{ - OpDelete, OpPut, QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, TimestampBuilderTrait, + op, QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, }; use crate::sample::SourceInfo; @@ -132,18 +132,19 @@ impl Query { &self, key_expr: IntoKeyExpr, payload: IntoPayload, - ) -> ReplyBuilder<'_> + ) -> ReplySampleBuilder<'_, op::Put> where IntoKeyExpr: Into>, IntoPayload: Into, { let sample_builder = SampleBuilder::put(key_expr, payload).qos(response::ext::QoSType::RESPONSE.into()); - ReplyBuilder { + ReplySampleBuilder { query: self, sample_builder, } } + /// Sends a error reply to this Query. /// #[inline(always)] @@ -163,13 +164,16 @@ impl Query { /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), /// replying on a disjoint key expression will result in an error when resolving the reply. #[inline(always)] - pub fn reply_del(&self, key_expr: IntoKeyExpr) -> ReplyDelBuilder<'_> + pub fn reply_del( + &self, + key_expr: IntoKeyExpr, + ) -> ReplySampleBuilder<'_, op::Delete> where IntoKeyExpr: Into>, { let sample_builder = SampleBuilder::delete(key_expr).qos(response::ext::QoSType::RESPONSE.into()); - ReplyDelBuilder { + ReplySampleBuilder { query: self, sample_builder, } @@ -240,13 +244,13 @@ impl AsyncResolve for ReplySample<'_> { /// A builder returned by [`Query::reply()`](Query::reply) #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] -pub struct ReplyBuilder<'a> { +pub struct ReplySampleBuilder<'a, T> { query: &'a Query, - sample_builder: SampleBuilder, + sample_builder: SampleBuilder, } -impl TimestampBuilderTrait for ReplyBuilder<'_> { - fn timestamp>>(self, timestamp: T) -> Self { +impl TimestampBuilderTrait for ReplySampleBuilder<'_, T> { + fn timestamp>>(self, timestamp: U) -> Self { Self { sample_builder: self.sample_builder.timestamp(timestamp), ..self @@ -254,7 +258,7 @@ impl TimestampBuilderTrait for ReplyBuilder<'_> { } } -impl SampleBuilderTrait for ReplyBuilder<'_> { +impl SampleBuilderTrait for ReplySampleBuilder<'_, T> { #[cfg(feature = "unstable")] fn source_info(self, source_info: SourceInfo) -> Self { Self { @@ -264,7 +268,7 @@ impl SampleBuilderTrait for ReplyBuilder<'_> { } #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: T) -> Self { + fn attachment>>(self, attachment: U) -> Self { Self { sample_builder: self.sample_builder.attachment(attachment), ..self @@ -272,7 +276,7 @@ impl SampleBuilderTrait for ReplyBuilder<'_> { } } -impl QoSBuilderTrait for ReplyBuilder<'_> { +impl QoSBuilderTrait for ReplySampleBuilder<'_, T> { fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self { sample_builder: self.sample_builder.congestion_control(congestion_control), @@ -295,7 +299,7 @@ impl QoSBuilderTrait for ReplyBuilder<'_> { } } -impl ValueBuilderTrait for ReplyBuilder<'_> { +impl ValueBuilderTrait for ReplySampleBuilder<'_, op::Put> { fn encoding>(self, encoding: T) -> Self { Self { sample_builder: self.sample_builder.encoding(encoding), @@ -318,101 +322,86 @@ impl ValueBuilderTrait for ReplyBuilder<'_> { } } -/// A builder returned by [`Query::reply_del()`](Query::reply) -#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] -#[derive(Debug)] -pub struct ReplyDelBuilder<'a> { - query: &'a Query, - sample_builder: SampleBuilder, +impl<'a, T> Resolvable for ReplySampleBuilder<'a, T> { + type To = ZResult<()>; } -impl TimestampBuilderTrait for ReplyDelBuilder<'_> { - fn timestamp>>(self, timestamp: T) -> Self { - Self { - sample_builder: self.sample_builder.timestamp(timestamp), - ..self - } +impl SyncResolve for ReplySampleBuilder<'_, T> { + fn res_sync(self) -> ::To { + self.query._reply_sample(self.sample_builder.into()) } } -impl SampleBuilderTrait for ReplyDelBuilder<'_> { - #[cfg(feature = "unstable")] - fn source_info(self, source_info: SourceInfo) -> Self { - Self { - sample_builder: self.sample_builder.source_info(source_info), - ..self - } - } +impl<'a, T> AsyncResolve for ReplySampleBuilder<'a, T> { + type Future = Ready; - #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: T) -> Self { - Self { - sample_builder: self.sample_builder.attachment(attachment), - ..self - } + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) } } -impl QoSBuilderTrait for ReplyDelBuilder<'_> { - fn congestion_control(self, congestion_control: CongestionControl) -> Self { +/// A builder returned by [`Query::reply_err()`](Query::reply_err). +#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] +#[derive(Debug)] +pub struct ReplyErrBuilder<'a> { + query: &'a Query, + value: Value, +} + +impl ValueBuilderTrait for ReplyErrBuilder<'_> { + fn encoding>(self, encoding: T) -> Self { Self { - sample_builder: self.sample_builder.congestion_control(congestion_control), + value: self.value.encoding(encoding), ..self } } - fn priority(self, priority: Priority) -> Self { + fn payload>(self, payload: T) -> Self { Self { - sample_builder: self.sample_builder.priority(priority), + value: self.value.payload(payload), ..self } } - fn express(self, is_express: bool) -> Self { + fn value>(self, value: T) -> Self { Self { - sample_builder: self.sample_builder.express(is_express), + value: value.into(), ..self } } } -/// A builder returned by [`Query::reply_err()`](Query::reply_err). -#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] -#[derive(Debug)] -pub struct ReplyErrBuilder<'a> { - query: &'a Query, - value: Value, -} - -impl<'a> Resolvable for ReplyBuilder<'a> { - type To = ZResult<()>; -} - -impl SyncResolve for ReplyBuilder<'_> { - fn res_sync(self) -> ::To { - self.query._reply_sample(self.sample_builder.into()) - } -} - -impl<'a> Resolvable for ReplyDelBuilder<'a> { +impl<'a> Resolvable for ReplyErrBuilder<'a> { type To = ZResult<()>; } -impl SyncResolve for ReplyDelBuilder<'_> { +impl SyncResolve for ReplyErrBuilder<'_> { fn res_sync(self) -> ::To { - self.query._reply_sample(self.sample_builder.into()) - } -} - -impl<'a> AsyncResolve for ReplyBuilder<'a> { - type Future = Ready; - - fn res_async(self) -> Self::Future { - std::future::ready(self.res_sync()) + self.query.inner.primitives.send_response(Response { + rid: self.query.inner.qid, + wire_expr: WireExpr { + scope: 0, + suffix: std::borrow::Cow::Owned(self.query.key_expr().as_str().to_owned()), + mapping: Mapping::Sender, + }, + payload: ResponseBody::Err(zenoh::Err { + encoding: self.value.encoding.into(), + ext_sinfo: None, + ext_unknown: vec![], + payload: self.value.payload.into(), + }), + ext_qos: response::ext::QoSType::RESPONSE, + ext_tstamp: None, + ext_respid: Some(response::ext::ResponderIdType { + zid: self.query.inner.zid, + eid: self.query.eid, + }), + }); + Ok(()) } } -impl<'a> AsyncResolve for ReplyDelBuilder<'a> { +impl<'a> AsyncResolve for ReplyErrBuilder<'a> { type Future = Ready; fn res_async(self) -> Self::Future { @@ -477,43 +466,6 @@ impl Query { } } -impl<'a> Resolvable for ReplyErrBuilder<'a> { - type To = ZResult<()>; -} - -impl SyncResolve for ReplyErrBuilder<'_> { - fn res_sync(self) -> ::To { - self.query.inner.primitives.send_response(Response { - rid: self.query.inner.qid, - wire_expr: WireExpr { - scope: 0, - suffix: std::borrow::Cow::Owned(self.query.key_expr().as_str().to_owned()), - mapping: Mapping::Sender, - }, - payload: ResponseBody::Err(zenoh::Err { - encoding: self.value.encoding.into(), - ext_sinfo: None, - ext_unknown: vec![], - payload: self.value.payload.into(), - }), - ext_qos: response::ext::QoSType::RESPONSE, - ext_tstamp: None, - ext_respid: Some(response::ext::ResponderIdType { - zid: self.query.inner.zid, - eid: self.query.eid, - }), - }); - Ok(()) - } -} -impl<'a> AsyncResolve for ReplyErrBuilder<'a> { - type Future = Ready; - - fn res_async(self) -> Self::Future { - std::future::ready(self.res_sync()) - } -} - pub(crate) struct QueryableState { pub(crate) id: Id, pub(crate) key_expr: WireExpr<'static>, diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index cae58514ff..1ec20209aa 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -64,12 +64,16 @@ pub trait ValueBuilderTrait { fn value>(self, value: T) -> Self; } -#[derive(Debug)] -pub struct OpPut; -#[derive(Debug)] -pub struct OpDelete; -#[derive(Debug)] -pub struct OpAny; +pub mod op { + #[derive(Debug)] + pub struct Put; + #[derive(Debug)] + pub struct Delete; + #[derive(Debug)] + pub struct Error; + #[derive(Debug)] + pub struct Any; +} #[derive(Debug)] pub struct SampleBuilder { @@ -77,11 +81,11 @@ pub struct SampleBuilder { _t: PhantomData, } -impl SampleBuilder { +impl SampleBuilder { pub fn put( key_expr: IntoKeyExpr, payload: IntoPayload, - ) -> SampleBuilder + ) -> SampleBuilder where IntoKeyExpr: Into>, IntoPayload: Into, @@ -99,13 +103,13 @@ impl SampleBuilder { #[cfg(feature = "unstable")] attachment: None, }, - _t: PhantomData::, + _t: PhantomData::, } } } -impl SampleBuilder { - pub fn delete(key_expr: IntoKeyExpr) -> SampleBuilder +impl SampleBuilder { + pub fn delete(key_expr: IntoKeyExpr) -> SampleBuilder where IntoKeyExpr: Into>, { @@ -122,7 +126,7 @@ impl SampleBuilder { #[cfg(feature = "unstable")] attachment: None, }, - _t: PhantomData::, + _t: PhantomData::, } } } @@ -141,6 +145,14 @@ impl SampleBuilder { _t: PhantomData::, } } + + // Allows to change qos as a whole of [`Sample`] + pub fn qos(self, qos: QoS) -> Self { + Self { + sample: Sample { qos, ..self.sample }, + _t: PhantomData::, + } + } } impl TimestampBuilderTrait for SampleBuilder { @@ -179,15 +191,6 @@ impl SampleBuilderTrait for SampleBuilder { } } -impl SampleBuilder { - pub fn qos(self, qos: QoS) -> Self { - Self { - sample: Sample { qos, ..self.sample }, - _t: PhantomData::, - } - } -} - impl QoSBuilderTrait for SampleBuilder { fn congestion_control(self, congestion_control: CongestionControl) -> Self { let qos: QoSBuilder = self.sample.qos.into(); @@ -215,14 +218,14 @@ impl QoSBuilderTrait for SampleBuilder { } } -impl ValueBuilderTrait for SampleBuilder { +impl ValueBuilderTrait for SampleBuilder { fn encoding>(self, encoding: T) -> Self { Self { sample: Sample { encoding: encoding.into(), ..self.sample }, - _t: PhantomData::, + _t: PhantomData::, } } fn payload>(self, payload: T) -> Self { @@ -231,7 +234,7 @@ impl ValueBuilderTrait for SampleBuilder { payload: payload.into(), ..self.sample }, - _t: PhantomData::, + _t: PhantomData::, } } fn value>(self, value: T) -> Self { @@ -242,21 +245,21 @@ impl ValueBuilderTrait for SampleBuilder { encoding, ..self.sample }, - _t: PhantomData::, + _t: PhantomData::, } } } -impl From for SampleBuilder { +impl From for SampleBuilder { fn from(sample: Sample) -> Self { SampleBuilder { sample, - _t: PhantomData::, + _t: PhantomData::, } } } -impl TryFrom for SampleBuilder { +impl TryFrom for SampleBuilder { type Error = zresult::Error; fn try_from(sample: Sample) -> Result { if sample.kind != SampleKind::Put { @@ -264,12 +267,12 @@ impl TryFrom for SampleBuilder { } Ok(SampleBuilder { sample, - _t: PhantomData::, + _t: PhantomData::, }) } } -impl TryFrom for SampleBuilder { +impl TryFrom for SampleBuilder { type Error = zresult::Error; fn try_from(sample: Sample) -> Result { if sample.kind != SampleKind::Delete { @@ -277,7 +280,7 @@ impl TryFrom for SampleBuilder { } Ok(SampleBuilder { sample, - _t: PhantomData::, + _t: PhantomData::, }) } } From bca953da3de684228241cbd1c8bc8641945b2b84 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 29 Mar 2024 18:36:58 +0100 Subject: [PATCH 086/124] Reorg sample files --- zenoh/src/sample.rs | 655 -------------------------------------------- 1 file changed, 655 deletions(-) delete mode 100644 zenoh/src/sample.rs diff --git a/zenoh/src/sample.rs b/zenoh/src/sample.rs deleted file mode 100644 index 2b71105d5e..0000000000 --- a/zenoh/src/sample.rs +++ /dev/null @@ -1,655 +0,0 @@ -// -// Copyright (c) 2023 ZettaScale Technology -// -// This program and the accompanying materials are made available under the -// terms of the Eclipse Public License 2.0 which is available at -// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 -// which is available at https://www.apache.org/licenses/LICENSE-2.0. -// -// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 -// -// Contributors: -// ZettaScale Zenoh Team, -// - -//! Sample primitives -use crate::encoding::Encoding; -use crate::payload::Payload; -use crate::prelude::{KeyExpr, Value}; -use crate::sample::builder::{QoSBuilderTrait, ValueBuilderTrait}; -use crate::time::Timestamp; -use crate::Priority; -#[zenoh_macros::unstable] -use serde::Serialize; -use std::{convert::TryFrom, fmt}; -use zenoh_protocol::core::EntityGlobalId; -use zenoh_protocol::network::declare::ext::QoSType; -use zenoh_protocol::{core::CongestionControl, zenoh}; - -pub mod builder; - -pub type SourceSn = u64; - -/// The locality of samples to be received by subscribers or targeted by publishers. -#[zenoh_macros::unstable] -#[derive(Clone, Copy, Debug, Default, Serialize, PartialEq, Eq)] -pub enum Locality { - SessionLocal, - Remote, - #[default] - Any, -} -#[cfg(not(feature = "unstable"))] -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] -pub(crate) enum Locality { - SessionLocal, - Remote, - #[default] - Any, -} - -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub(crate) struct DataInfo { - pub kind: SampleKind, - pub encoding: Option, - pub timestamp: Option, - pub source_id: Option, - pub source_sn: Option, - pub qos: QoS, -} - -pub(crate) trait DataInfoIntoSample { - fn into_sample( - self, - key_expr: IntoKeyExpr, - payload: IntoPayload, - #[cfg(feature = "unstable")] attachment: Option, - ) -> Sample - where - IntoKeyExpr: Into>, - IntoPayload: Into; -} - -impl DataInfoIntoSample for DataInfo { - // This function is for internal use only. - // Technically it may create invalid sample (e.g. a delete sample with a payload and encoding) - // The test for it is intentionally not added to avoid inserting extra "if" into hot path. - // The correctness of the data should be ensured by the caller. - #[inline] - fn into_sample( - self, - key_expr: IntoKeyExpr, - payload: IntoPayload, - #[cfg(feature = "unstable")] attachment: Option, - ) -> Sample - where - IntoKeyExpr: Into>, - IntoPayload: Into, - { - Sample { - key_expr: key_expr.into(), - payload: payload.into(), - kind: self.kind, - encoding: self.encoding.unwrap_or_default(), - timestamp: self.timestamp, - qos: self.qos, - #[cfg(feature = "unstable")] - source_info: SourceInfo { - source_id: self.source_id, - source_sn: self.source_sn, - }, - #[cfg(feature = "unstable")] - attachment, - } - } -} - -impl DataInfoIntoSample for Option { - #[inline] - fn into_sample( - self, - key_expr: IntoKeyExpr, - payload: IntoPayload, - #[cfg(feature = "unstable")] attachment: Option, - ) -> Sample - where - IntoKeyExpr: Into>, - IntoPayload: Into, - { - if let Some(data_info) = self { - data_info.into_sample(key_expr, payload, attachment) - } else { - Sample { - key_expr: key_expr.into(), - payload: payload.into(), - kind: SampleKind::Put, - encoding: Encoding::default(), - timestamp: None, - qos: QoS::default(), - #[cfg(feature = "unstable")] - source_info: SourceInfo::empty(), - #[cfg(feature = "unstable")] - attachment, - } - } - } -} - -/// Informations on the source of a zenoh [`Sample`]. -#[zenoh_macros::unstable] -#[derive(Debug, Clone)] -pub struct SourceInfo { - /// The [`EntityGlobalId`] of the zenoh entity that published the concerned [`Sample`]. - pub source_id: Option, - /// The sequence number of the [`Sample`] from the source. - pub source_sn: Option, -} - -#[test] -#[cfg(feature = "unstable")] -fn source_info_stack_size() { - use crate::{ - sample::{SourceInfo, SourceSn}, - ZenohId, - }; - - assert_eq!(std::mem::size_of::(), 16); - assert_eq!(std::mem::size_of::>(), 17); - assert_eq!(std::mem::size_of::>(), 16); - assert_eq!(std::mem::size_of::(), 17 + 16 + 7); -} - -#[zenoh_macros::unstable] -impl SourceInfo { - pub(crate) fn empty() -> Self { - SourceInfo { - source_id: None, - source_sn: None, - } - } - pub(crate) fn is_empty(&self) -> bool { - self.source_id.is_none() && self.source_sn.is_none() - } -} - -impl From for Option { - fn from(source_info: SourceInfo) -> Option { - if source_info.is_empty() { - None - } else { - Some(zenoh::put::ext::SourceInfoType { - id: source_info.source_id.unwrap_or_default(), - sn: source_info.source_sn.unwrap_or_default() as u32, - }) - } - } -} - -#[zenoh_macros::unstable] -impl From for SourceInfo { - fn from(data_info: DataInfo) -> Self { - SourceInfo { - source_id: data_info.source_id, - source_sn: data_info.source_sn, - } - } -} - -#[zenoh_macros::unstable] -impl From> for SourceInfo { - fn from(data_info: Option) -> Self { - match data_info { - Some(data_info) => data_info.into(), - None => SourceInfo::empty(), - } - } -} - -mod attachment { - #[zenoh_macros::unstable] - use zenoh_buffers::{ - reader::{HasReader, Reader}, - writer::HasWriter, - ZBuf, ZBufReader, ZSlice, - }; - #[zenoh_macros::unstable] - use zenoh_codec::{RCodec, WCodec, Zenoh080}; - #[zenoh_macros::unstable] - use zenoh_protocol::zenoh::ext::AttachmentType; - - /// A builder for [`Attachment`] - #[zenoh_macros::unstable] - #[derive(Debug)] - pub struct AttachmentBuilder { - pub(crate) inner: Vec, - } - #[zenoh_macros::unstable] - impl Default for AttachmentBuilder { - fn default() -> Self { - Self::new() - } - } - #[zenoh_macros::unstable] - impl AttachmentBuilder { - pub fn new() -> Self { - Self { inner: Vec::new() } - } - fn _insert(&mut self, key: &[u8], value: &[u8]) { - let codec = Zenoh080; - let mut writer = self.inner.writer(); - codec.write(&mut writer, key).unwrap(); // Infallible, barring alloc failure - codec.write(&mut writer, value).unwrap(); // Infallible, barring alloc failure - } - /// Inserts a key-value pair to the attachment. - /// - /// Note that [`Attachment`] is a list of non-unique key-value pairs: inserting at the same key multiple times leads to both values being transmitted for that key. - pub fn insert + ?Sized, Value: AsRef<[u8]> + ?Sized>( - &mut self, - key: &Key, - value: &Value, - ) { - self._insert(key.as_ref(), value.as_ref()) - } - pub fn build(self) -> Attachment { - Attachment { - inner: self.inner.into(), - } - } - } - #[zenoh_macros::unstable] - impl From for Attachment { - fn from(value: AttachmentBuilder) -> Self { - Attachment { - inner: value.inner.into(), - } - } - } - #[zenoh_macros::unstable] - impl From for Option { - fn from(value: AttachmentBuilder) -> Self { - if value.inner.is_empty() { - None - } else { - Some(value.into()) - } - } - } - - #[zenoh_macros::unstable] - #[derive(Clone)] - pub struct Attachment { - pub(crate) inner: ZBuf, - } - #[zenoh_macros::unstable] - impl Default for Attachment { - fn default() -> Self { - Self::new() - } - } - #[zenoh_macros::unstable] - impl From for AttachmentType { - fn from(this: Attachment) -> Self { - AttachmentType { buffer: this.inner } - } - } - #[zenoh_macros::unstable] - impl From> for Attachment { - fn from(this: AttachmentType) -> Self { - Attachment { inner: this.buffer } - } - } - #[zenoh_macros::unstable] - impl Attachment { - pub fn new() -> Self { - Self { - inner: ZBuf::empty(), - } - } - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - pub fn len(&self) -> usize { - self.iter().count() - } - pub fn iter(&self) -> AttachmentIterator { - self.into_iter() - } - fn _get(&self, key: &[u8]) -> Option { - self.iter() - .find_map(|(k, v)| (k.as_slice() == key).then_some(v)) - } - pub fn get>(&self, key: &Key) -> Option { - self._get(key.as_ref()) - } - fn _insert(&mut self, key: &[u8], value: &[u8]) { - let codec = Zenoh080; - let mut writer = self.inner.writer(); - codec.write(&mut writer, key).unwrap(); // Infallible, barring alloc failure - codec.write(&mut writer, value).unwrap(); // Infallible, barring alloc failure - } - /// Inserts a key-value pair to the attachment. - /// - /// Note that [`Attachment`] is a list of non-unique key-value pairs: inserting at the same key multiple times leads to both values being transmitted for that key. - /// - /// [`Attachment`] is not very efficient at inserting, so if you wish to perform multiple inserts, it's generally better to [`Attachment::extend`] after performing the inserts on an [`AttachmentBuilder`] - pub fn insert + ?Sized, Value: AsRef<[u8]> + ?Sized>( - &mut self, - key: &Key, - value: &Value, - ) { - self._insert(key.as_ref(), value.as_ref()) - } - fn _extend(&mut self, with: Self) -> &mut Self { - for slice in with.inner.zslices().cloned() { - self.inner.push_zslice(slice); - } - self - } - pub fn extend(&mut self, with: impl Into) -> &mut Self { - let with = with.into(); - self._extend(with) - } - } - #[zenoh_macros::unstable] - pub struct AttachmentIterator<'a> { - reader: ZBufReader<'a>, - } - #[zenoh_macros::unstable] - impl<'a> core::iter::IntoIterator for &'a Attachment { - type Item = (ZSlice, ZSlice); - type IntoIter = AttachmentIterator<'a>; - fn into_iter(self) -> Self::IntoIter { - AttachmentIterator { - reader: self.inner.reader(), - } - } - } - #[zenoh_macros::unstable] - impl core::fmt::Debug for Attachment { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{{")?; - for (key, value) in self { - let key = key.as_slice(); - let value = value.as_slice(); - match core::str::from_utf8(key) { - Ok(key) => write!(f, "\"{key}\": ")?, - Err(_) => { - write!(f, "0x")?; - for byte in key { - write!(f, "{byte:02X}")? - } - } - } - match core::str::from_utf8(value) { - Ok(value) => write!(f, "\"{value}\", ")?, - Err(_) => { - write!(f, "0x")?; - for byte in value { - write!(f, "{byte:02X}")? - } - write!(f, ", ")? - } - } - } - write!(f, "}}") - } - } - #[zenoh_macros::unstable] - impl<'a> core::iter::Iterator for AttachmentIterator<'a> { - type Item = (ZSlice, ZSlice); - fn next(&mut self) -> Option { - let key = Zenoh080.read(&mut self.reader).ok()?; - let value = Zenoh080.read(&mut self.reader).ok()?; - Some((key, value)) - } - fn size_hint(&self) -> (usize, Option) { - ( - (self.reader.remaining() != 0) as usize, - Some(self.reader.remaining() / 2), - ) - } - } - #[zenoh_macros::unstable] - impl<'a> core::iter::FromIterator<(&'a [u8], &'a [u8])> for AttachmentBuilder { - fn from_iter>(iter: T) -> Self { - let codec = Zenoh080; - let mut buffer: Vec = Vec::new(); - let mut writer = buffer.writer(); - for (key, value) in iter { - codec.write(&mut writer, key).unwrap(); // Infallible, barring allocation failures - codec.write(&mut writer, value).unwrap(); // Infallible, barring allocation failures - } - Self { inner: buffer } - } - } - #[zenoh_macros::unstable] - impl<'a> core::iter::FromIterator<(&'a [u8], &'a [u8])> for Attachment { - fn from_iter>(iter: T) -> Self { - AttachmentBuilder::from_iter(iter).into() - } - } -} - -/// The kind of a `Sample`. -#[repr(u8)] -#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] -pub enum SampleKind { - /// if the `Sample` was issued by a `put` operation. - #[default] - Put = 0, - /// if the `Sample` was issued by a `delete` operation. - Delete = 1, -} - -impl fmt::Display for SampleKind { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - SampleKind::Put => write!(f, "PUT"), - SampleKind::Delete => write!(f, "DELETE"), - } - } -} - -impl TryFrom for SampleKind { - type Error = u64; - fn try_from(kind: u64) -> Result { - match kind { - 0 => Ok(SampleKind::Put), - 1 => Ok(SampleKind::Delete), - _ => Err(kind), - } - } -} - -#[zenoh_macros::unstable] -pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; - -/// Structure with public fields for sample. It's convenient if it's necessary to decompose a sample into its fields. -pub struct SampleFields { - pub key_expr: KeyExpr<'static>, - pub payload: Payload, - pub kind: SampleKind, - pub encoding: Encoding, - pub timestamp: Option, - pub express: bool, - pub priority: Priority, - pub congestion_control: CongestionControl, - #[cfg(feature = "unstable")] - pub source_info: SourceInfo, - #[cfg(feature = "unstable")] - pub attachment: Option, -} - -impl From for SampleFields { - fn from(sample: Sample) -> Self { - SampleFields { - key_expr: sample.key_expr, - payload: sample.payload, - kind: sample.kind, - encoding: sample.encoding, - timestamp: sample.timestamp, - express: sample.qos.express(), - priority: sample.qos.priority(), - congestion_control: sample.qos.congestion_control(), - #[cfg(feature = "unstable")] - source_info: sample.source_info, - #[cfg(feature = "unstable")] - attachment: sample.attachment, - } - } -} - -/// A zenoh sample. -#[non_exhaustive] -#[derive(Clone, Debug)] -pub struct Sample { - pub(crate) key_expr: KeyExpr<'static>, - pub(crate) payload: Payload, - pub(crate) kind: SampleKind, - pub(crate) encoding: Encoding, - pub(crate) timestamp: Option, - pub(crate) qos: QoS, - - #[cfg(feature = "unstable")] - pub(crate) source_info: SourceInfo, - - #[cfg(feature = "unstable")] - pub(crate) attachment: Option, -} - -impl Sample { - /// Gets the key expression on which this Sample was published. - #[inline] - pub fn key_expr(&self) -> &KeyExpr<'static> { - &self.key_expr - } - - /// Gets the payload of this Sample. - #[inline] - pub fn payload(&self) -> &Payload { - &self.payload - } - - /// Gets the kind of this Sample. - #[inline] - pub fn kind(&self) -> SampleKind { - self.kind - } - - /// Gets the encoding of this sample - #[inline] - pub fn encoding(&self) -> &Encoding { - &self.encoding - } - - /// Gets the timestamp of this Sample. - #[inline] - pub fn timestamp(&self) -> Option<&Timestamp> { - self.timestamp.as_ref() - } - - /// Gets the quality of service settings this Sample was sent with. - #[inline] - pub fn qos(&self) -> &QoS { - &self.qos - } - - /// Gets infos on the source of this Sample. - #[zenoh_macros::unstable] - #[inline] - pub fn source_info(&self) -> &SourceInfo { - &self.source_info - } - - /// Gets the sample attachment: a map of key-value pairs, where each key and value are byte-slices. - #[zenoh_macros::unstable] - #[inline] - pub fn attachment(&self) -> Option<&Attachment> { - self.attachment.as_ref() - } -} - -impl From for Value { - fn from(sample: Sample) -> Self { - Value::new(sample.payload).encoding(sample.encoding) - } -} - -/// Structure containing quality of service data -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] -pub struct QoS { - inner: QoSType, -} - -#[derive(Debug)] -pub struct QoSBuilder(QoS); - -impl From for QoSBuilder { - fn from(qos: QoS) -> Self { - QoSBuilder(qos) - } -} - -impl From for QoS { - fn from(builder: QoSBuilder) -> Self { - builder.0 - } -} - -impl QoSBuilderTrait for QoSBuilder { - fn congestion_control(self, congestion_control: CongestionControl) -> Self { - let mut inner = self.0.inner; - inner.set_congestion_control(congestion_control); - Self(QoS { inner }) - } - - fn priority(self, priority: Priority) -> Self { - let mut inner = self.0.inner; - inner.set_priority(priority.into()); - Self(QoS { inner }) - } - - fn express(self, is_express: bool) -> Self { - let mut inner = self.0.inner; - inner.set_is_express(is_express); - Self(QoS { inner }) - } -} - -impl QoS { - /// Gets priority of the message. - pub fn priority(&self) -> Priority { - match Priority::try_from(self.inner.get_priority()) { - Ok(p) => p, - Err(e) => { - log::trace!( - "Failed to convert priority: {}; replacing with default value", - e.to_string() - ); - Priority::default() - } - } - } - - /// Gets congestion control of the message. - pub fn congestion_control(&self) -> CongestionControl { - self.inner.get_congestion_control() - } - - /// Gets express flag value. If `true`, the message is not batched during transmission, in order to reduce latency. - pub fn express(&self) -> bool { - self.inner.is_express() - } -} - -impl From for QoS { - fn from(qos: QoSType) -> Self { - QoS { inner: qos } - } -} - -impl From for QoSType { - fn from(qos: QoS) -> Self { - qos.inner - } -} From 9d1a5409541831926e70420fdf89006a67b1020c Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 29 Mar 2024 18:37:23 +0100 Subject: [PATCH 087/124] Remove error op struct in SampleBuilder --- zenoh/src/sample/builder.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index 1ec20209aa..7f438d1381 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -70,8 +70,6 @@ pub mod op { #[derive(Debug)] pub struct Delete; #[derive(Debug)] - pub struct Error; - #[derive(Debug)] pub struct Any; } From 7904d099ba3d069ecc51b76241ef136678a5e005 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 29 Mar 2024 20:29:43 +0100 Subject: [PATCH 088/124] Add forgotten file --- zenoh/src/sample/mod.rs | 655 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 655 insertions(+) create mode 100644 zenoh/src/sample/mod.rs diff --git a/zenoh/src/sample/mod.rs b/zenoh/src/sample/mod.rs new file mode 100644 index 0000000000..2b71105d5e --- /dev/null +++ b/zenoh/src/sample/mod.rs @@ -0,0 +1,655 @@ +// +// Copyright (c) 2023 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// + +//! Sample primitives +use crate::encoding::Encoding; +use crate::payload::Payload; +use crate::prelude::{KeyExpr, Value}; +use crate::sample::builder::{QoSBuilderTrait, ValueBuilderTrait}; +use crate::time::Timestamp; +use crate::Priority; +#[zenoh_macros::unstable] +use serde::Serialize; +use std::{convert::TryFrom, fmt}; +use zenoh_protocol::core::EntityGlobalId; +use zenoh_protocol::network::declare::ext::QoSType; +use zenoh_protocol::{core::CongestionControl, zenoh}; + +pub mod builder; + +pub type SourceSn = u64; + +/// The locality of samples to be received by subscribers or targeted by publishers. +#[zenoh_macros::unstable] +#[derive(Clone, Copy, Debug, Default, Serialize, PartialEq, Eq)] +pub enum Locality { + SessionLocal, + Remote, + #[default] + Any, +} +#[cfg(not(feature = "unstable"))] +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +pub(crate) enum Locality { + SessionLocal, + Remote, + #[default] + Any, +} + +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub(crate) struct DataInfo { + pub kind: SampleKind, + pub encoding: Option, + pub timestamp: Option, + pub source_id: Option, + pub source_sn: Option, + pub qos: QoS, +} + +pub(crate) trait DataInfoIntoSample { + fn into_sample( + self, + key_expr: IntoKeyExpr, + payload: IntoPayload, + #[cfg(feature = "unstable")] attachment: Option, + ) -> Sample + where + IntoKeyExpr: Into>, + IntoPayload: Into; +} + +impl DataInfoIntoSample for DataInfo { + // This function is for internal use only. + // Technically it may create invalid sample (e.g. a delete sample with a payload and encoding) + // The test for it is intentionally not added to avoid inserting extra "if" into hot path. + // The correctness of the data should be ensured by the caller. + #[inline] + fn into_sample( + self, + key_expr: IntoKeyExpr, + payload: IntoPayload, + #[cfg(feature = "unstable")] attachment: Option, + ) -> Sample + where + IntoKeyExpr: Into>, + IntoPayload: Into, + { + Sample { + key_expr: key_expr.into(), + payload: payload.into(), + kind: self.kind, + encoding: self.encoding.unwrap_or_default(), + timestamp: self.timestamp, + qos: self.qos, + #[cfg(feature = "unstable")] + source_info: SourceInfo { + source_id: self.source_id, + source_sn: self.source_sn, + }, + #[cfg(feature = "unstable")] + attachment, + } + } +} + +impl DataInfoIntoSample for Option { + #[inline] + fn into_sample( + self, + key_expr: IntoKeyExpr, + payload: IntoPayload, + #[cfg(feature = "unstable")] attachment: Option, + ) -> Sample + where + IntoKeyExpr: Into>, + IntoPayload: Into, + { + if let Some(data_info) = self { + data_info.into_sample(key_expr, payload, attachment) + } else { + Sample { + key_expr: key_expr.into(), + payload: payload.into(), + kind: SampleKind::Put, + encoding: Encoding::default(), + timestamp: None, + qos: QoS::default(), + #[cfg(feature = "unstable")] + source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] + attachment, + } + } + } +} + +/// Informations on the source of a zenoh [`Sample`]. +#[zenoh_macros::unstable] +#[derive(Debug, Clone)] +pub struct SourceInfo { + /// The [`EntityGlobalId`] of the zenoh entity that published the concerned [`Sample`]. + pub source_id: Option, + /// The sequence number of the [`Sample`] from the source. + pub source_sn: Option, +} + +#[test] +#[cfg(feature = "unstable")] +fn source_info_stack_size() { + use crate::{ + sample::{SourceInfo, SourceSn}, + ZenohId, + }; + + assert_eq!(std::mem::size_of::(), 16); + assert_eq!(std::mem::size_of::>(), 17); + assert_eq!(std::mem::size_of::>(), 16); + assert_eq!(std::mem::size_of::(), 17 + 16 + 7); +} + +#[zenoh_macros::unstable] +impl SourceInfo { + pub(crate) fn empty() -> Self { + SourceInfo { + source_id: None, + source_sn: None, + } + } + pub(crate) fn is_empty(&self) -> bool { + self.source_id.is_none() && self.source_sn.is_none() + } +} + +impl From for Option { + fn from(source_info: SourceInfo) -> Option { + if source_info.is_empty() { + None + } else { + Some(zenoh::put::ext::SourceInfoType { + id: source_info.source_id.unwrap_or_default(), + sn: source_info.source_sn.unwrap_or_default() as u32, + }) + } + } +} + +#[zenoh_macros::unstable] +impl From for SourceInfo { + fn from(data_info: DataInfo) -> Self { + SourceInfo { + source_id: data_info.source_id, + source_sn: data_info.source_sn, + } + } +} + +#[zenoh_macros::unstable] +impl From> for SourceInfo { + fn from(data_info: Option) -> Self { + match data_info { + Some(data_info) => data_info.into(), + None => SourceInfo::empty(), + } + } +} + +mod attachment { + #[zenoh_macros::unstable] + use zenoh_buffers::{ + reader::{HasReader, Reader}, + writer::HasWriter, + ZBuf, ZBufReader, ZSlice, + }; + #[zenoh_macros::unstable] + use zenoh_codec::{RCodec, WCodec, Zenoh080}; + #[zenoh_macros::unstable] + use zenoh_protocol::zenoh::ext::AttachmentType; + + /// A builder for [`Attachment`] + #[zenoh_macros::unstable] + #[derive(Debug)] + pub struct AttachmentBuilder { + pub(crate) inner: Vec, + } + #[zenoh_macros::unstable] + impl Default for AttachmentBuilder { + fn default() -> Self { + Self::new() + } + } + #[zenoh_macros::unstable] + impl AttachmentBuilder { + pub fn new() -> Self { + Self { inner: Vec::new() } + } + fn _insert(&mut self, key: &[u8], value: &[u8]) { + let codec = Zenoh080; + let mut writer = self.inner.writer(); + codec.write(&mut writer, key).unwrap(); // Infallible, barring alloc failure + codec.write(&mut writer, value).unwrap(); // Infallible, barring alloc failure + } + /// Inserts a key-value pair to the attachment. + /// + /// Note that [`Attachment`] is a list of non-unique key-value pairs: inserting at the same key multiple times leads to both values being transmitted for that key. + pub fn insert + ?Sized, Value: AsRef<[u8]> + ?Sized>( + &mut self, + key: &Key, + value: &Value, + ) { + self._insert(key.as_ref(), value.as_ref()) + } + pub fn build(self) -> Attachment { + Attachment { + inner: self.inner.into(), + } + } + } + #[zenoh_macros::unstable] + impl From for Attachment { + fn from(value: AttachmentBuilder) -> Self { + Attachment { + inner: value.inner.into(), + } + } + } + #[zenoh_macros::unstable] + impl From for Option { + fn from(value: AttachmentBuilder) -> Self { + if value.inner.is_empty() { + None + } else { + Some(value.into()) + } + } + } + + #[zenoh_macros::unstable] + #[derive(Clone)] + pub struct Attachment { + pub(crate) inner: ZBuf, + } + #[zenoh_macros::unstable] + impl Default for Attachment { + fn default() -> Self { + Self::new() + } + } + #[zenoh_macros::unstable] + impl From for AttachmentType { + fn from(this: Attachment) -> Self { + AttachmentType { buffer: this.inner } + } + } + #[zenoh_macros::unstable] + impl From> for Attachment { + fn from(this: AttachmentType) -> Self { + Attachment { inner: this.buffer } + } + } + #[zenoh_macros::unstable] + impl Attachment { + pub fn new() -> Self { + Self { + inner: ZBuf::empty(), + } + } + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn len(&self) -> usize { + self.iter().count() + } + pub fn iter(&self) -> AttachmentIterator { + self.into_iter() + } + fn _get(&self, key: &[u8]) -> Option { + self.iter() + .find_map(|(k, v)| (k.as_slice() == key).then_some(v)) + } + pub fn get>(&self, key: &Key) -> Option { + self._get(key.as_ref()) + } + fn _insert(&mut self, key: &[u8], value: &[u8]) { + let codec = Zenoh080; + let mut writer = self.inner.writer(); + codec.write(&mut writer, key).unwrap(); // Infallible, barring alloc failure + codec.write(&mut writer, value).unwrap(); // Infallible, barring alloc failure + } + /// Inserts a key-value pair to the attachment. + /// + /// Note that [`Attachment`] is a list of non-unique key-value pairs: inserting at the same key multiple times leads to both values being transmitted for that key. + /// + /// [`Attachment`] is not very efficient at inserting, so if you wish to perform multiple inserts, it's generally better to [`Attachment::extend`] after performing the inserts on an [`AttachmentBuilder`] + pub fn insert + ?Sized, Value: AsRef<[u8]> + ?Sized>( + &mut self, + key: &Key, + value: &Value, + ) { + self._insert(key.as_ref(), value.as_ref()) + } + fn _extend(&mut self, with: Self) -> &mut Self { + for slice in with.inner.zslices().cloned() { + self.inner.push_zslice(slice); + } + self + } + pub fn extend(&mut self, with: impl Into) -> &mut Self { + let with = with.into(); + self._extend(with) + } + } + #[zenoh_macros::unstable] + pub struct AttachmentIterator<'a> { + reader: ZBufReader<'a>, + } + #[zenoh_macros::unstable] + impl<'a> core::iter::IntoIterator for &'a Attachment { + type Item = (ZSlice, ZSlice); + type IntoIter = AttachmentIterator<'a>; + fn into_iter(self) -> Self::IntoIter { + AttachmentIterator { + reader: self.inner.reader(), + } + } + } + #[zenoh_macros::unstable] + impl core::fmt::Debug for Attachment { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{{")?; + for (key, value) in self { + let key = key.as_slice(); + let value = value.as_slice(); + match core::str::from_utf8(key) { + Ok(key) => write!(f, "\"{key}\": ")?, + Err(_) => { + write!(f, "0x")?; + for byte in key { + write!(f, "{byte:02X}")? + } + } + } + match core::str::from_utf8(value) { + Ok(value) => write!(f, "\"{value}\", ")?, + Err(_) => { + write!(f, "0x")?; + for byte in value { + write!(f, "{byte:02X}")? + } + write!(f, ", ")? + } + } + } + write!(f, "}}") + } + } + #[zenoh_macros::unstable] + impl<'a> core::iter::Iterator for AttachmentIterator<'a> { + type Item = (ZSlice, ZSlice); + fn next(&mut self) -> Option { + let key = Zenoh080.read(&mut self.reader).ok()?; + let value = Zenoh080.read(&mut self.reader).ok()?; + Some((key, value)) + } + fn size_hint(&self) -> (usize, Option) { + ( + (self.reader.remaining() != 0) as usize, + Some(self.reader.remaining() / 2), + ) + } + } + #[zenoh_macros::unstable] + impl<'a> core::iter::FromIterator<(&'a [u8], &'a [u8])> for AttachmentBuilder { + fn from_iter>(iter: T) -> Self { + let codec = Zenoh080; + let mut buffer: Vec = Vec::new(); + let mut writer = buffer.writer(); + for (key, value) in iter { + codec.write(&mut writer, key).unwrap(); // Infallible, barring allocation failures + codec.write(&mut writer, value).unwrap(); // Infallible, barring allocation failures + } + Self { inner: buffer } + } + } + #[zenoh_macros::unstable] + impl<'a> core::iter::FromIterator<(&'a [u8], &'a [u8])> for Attachment { + fn from_iter>(iter: T) -> Self { + AttachmentBuilder::from_iter(iter).into() + } + } +} + +/// The kind of a `Sample`. +#[repr(u8)] +#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] +pub enum SampleKind { + /// if the `Sample` was issued by a `put` operation. + #[default] + Put = 0, + /// if the `Sample` was issued by a `delete` operation. + Delete = 1, +} + +impl fmt::Display for SampleKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + SampleKind::Put => write!(f, "PUT"), + SampleKind::Delete => write!(f, "DELETE"), + } + } +} + +impl TryFrom for SampleKind { + type Error = u64; + fn try_from(kind: u64) -> Result { + match kind { + 0 => Ok(SampleKind::Put), + 1 => Ok(SampleKind::Delete), + _ => Err(kind), + } + } +} + +#[zenoh_macros::unstable] +pub use attachment::{Attachment, AttachmentBuilder, AttachmentIterator}; + +/// Structure with public fields for sample. It's convenient if it's necessary to decompose a sample into its fields. +pub struct SampleFields { + pub key_expr: KeyExpr<'static>, + pub payload: Payload, + pub kind: SampleKind, + pub encoding: Encoding, + pub timestamp: Option, + pub express: bool, + pub priority: Priority, + pub congestion_control: CongestionControl, + #[cfg(feature = "unstable")] + pub source_info: SourceInfo, + #[cfg(feature = "unstable")] + pub attachment: Option, +} + +impl From for SampleFields { + fn from(sample: Sample) -> Self { + SampleFields { + key_expr: sample.key_expr, + payload: sample.payload, + kind: sample.kind, + encoding: sample.encoding, + timestamp: sample.timestamp, + express: sample.qos.express(), + priority: sample.qos.priority(), + congestion_control: sample.qos.congestion_control(), + #[cfg(feature = "unstable")] + source_info: sample.source_info, + #[cfg(feature = "unstable")] + attachment: sample.attachment, + } + } +} + +/// A zenoh sample. +#[non_exhaustive] +#[derive(Clone, Debug)] +pub struct Sample { + pub(crate) key_expr: KeyExpr<'static>, + pub(crate) payload: Payload, + pub(crate) kind: SampleKind, + pub(crate) encoding: Encoding, + pub(crate) timestamp: Option, + pub(crate) qos: QoS, + + #[cfg(feature = "unstable")] + pub(crate) source_info: SourceInfo, + + #[cfg(feature = "unstable")] + pub(crate) attachment: Option, +} + +impl Sample { + /// Gets the key expression on which this Sample was published. + #[inline] + pub fn key_expr(&self) -> &KeyExpr<'static> { + &self.key_expr + } + + /// Gets the payload of this Sample. + #[inline] + pub fn payload(&self) -> &Payload { + &self.payload + } + + /// Gets the kind of this Sample. + #[inline] + pub fn kind(&self) -> SampleKind { + self.kind + } + + /// Gets the encoding of this sample + #[inline] + pub fn encoding(&self) -> &Encoding { + &self.encoding + } + + /// Gets the timestamp of this Sample. + #[inline] + pub fn timestamp(&self) -> Option<&Timestamp> { + self.timestamp.as_ref() + } + + /// Gets the quality of service settings this Sample was sent with. + #[inline] + pub fn qos(&self) -> &QoS { + &self.qos + } + + /// Gets infos on the source of this Sample. + #[zenoh_macros::unstable] + #[inline] + pub fn source_info(&self) -> &SourceInfo { + &self.source_info + } + + /// Gets the sample attachment: a map of key-value pairs, where each key and value are byte-slices. + #[zenoh_macros::unstable] + #[inline] + pub fn attachment(&self) -> Option<&Attachment> { + self.attachment.as_ref() + } +} + +impl From for Value { + fn from(sample: Sample) -> Self { + Value::new(sample.payload).encoding(sample.encoding) + } +} + +/// Structure containing quality of service data +#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] +pub struct QoS { + inner: QoSType, +} + +#[derive(Debug)] +pub struct QoSBuilder(QoS); + +impl From for QoSBuilder { + fn from(qos: QoS) -> Self { + QoSBuilder(qos) + } +} + +impl From for QoS { + fn from(builder: QoSBuilder) -> Self { + builder.0 + } +} + +impl QoSBuilderTrait for QoSBuilder { + fn congestion_control(self, congestion_control: CongestionControl) -> Self { + let mut inner = self.0.inner; + inner.set_congestion_control(congestion_control); + Self(QoS { inner }) + } + + fn priority(self, priority: Priority) -> Self { + let mut inner = self.0.inner; + inner.set_priority(priority.into()); + Self(QoS { inner }) + } + + fn express(self, is_express: bool) -> Self { + let mut inner = self.0.inner; + inner.set_is_express(is_express); + Self(QoS { inner }) + } +} + +impl QoS { + /// Gets priority of the message. + pub fn priority(&self) -> Priority { + match Priority::try_from(self.inner.get_priority()) { + Ok(p) => p, + Err(e) => { + log::trace!( + "Failed to convert priority: {}; replacing with default value", + e.to_string() + ); + Priority::default() + } + } + } + + /// Gets congestion control of the message. + pub fn congestion_control(&self) -> CongestionControl { + self.inner.get_congestion_control() + } + + /// Gets express flag value. If `true`, the message is not batched during transmission, in order to reduce latency. + pub fn express(&self) -> bool { + self.inner.is_express() + } +} + +impl From for QoS { + fn from(qos: QoSType) -> Self { + QoS { inner: qos } + } +} + +impl From for QoSType { + fn from(qos: QoS) -> Self { + qos.inner + } +} From ab349b2e91ee2fce1b0776526f6bb26af26a3b76 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 1 Apr 2024 11:46:29 +0200 Subject: [PATCH 089/124] support of TryIntoKeyexpr --- zenoh/src/key_expr.rs | 2 +- zenoh/src/queryable.rs | 144 ++++++++++++++++++++++++++++------------ zenoh/src/sample/mod.rs | 6 ++ 3 files changed, 107 insertions(+), 45 deletions(-) diff --git a/zenoh/src/key_expr.rs b/zenoh/src/key_expr.rs index aaa1d13724..d2bfb5bcfe 100644 --- a/zenoh/src/key_expr.rs +++ b/zenoh/src/key_expr.rs @@ -185,7 +185,7 @@ impl<'a> KeyExpr<'a> { /// # Safety /// Key Expressions must follow some rules to be accepted by a Zenoh network. /// Messages addressed with invalid key expressions will be dropped. - pub unsafe fn from_str_uncheckend(s: &'a str) -> Self { + pub unsafe fn from_str_unchecked(s: &'a str) -> Self { keyexpr::from_str_unchecked(s).into() } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index c2a5557440..37c3a2303a 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -19,10 +19,9 @@ use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; use crate::sample::builder::{ - op, QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, TimestampBuilderTrait, - ValueBuilderTrait, + QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, }; -use crate::sample::SourceInfo; +use crate::sample::{QoSBuilder, SourceInfo}; use crate::Id; use crate::SessionRef; use crate::Undeclarable; @@ -132,17 +131,23 @@ impl Query { &self, key_expr: TryIntoKeyExpr, payload: IntoPayload, - ) -> ReplySampleBuilder<'_, op::Put> + ) -> ReplyBuilder<'_, 'b, ReplyBuilderPut> where TryIntoKeyExpr: TryInto>, >>::Error: Into, IntoPayload: Into, { - let sample_builder = - SampleBuilder::put(key_expr, payload).qos(response::ext::QoSType::RESPONSE.into()); - ReplySampleBuilder { + ReplyBuilder { query: self, - sample_builder, + key_expr: key_expr.try_into().map_err(Into::into), + qos: response::ext::QoSType::RESPONSE.into(), + kind: ReplyBuilderPut { + payload: payload.into(), + encoding: Encoding::default(), + }, + timestamp: None, + source_info: SourceInfo::empty(), + attachment: None, } } @@ -165,19 +170,22 @@ impl Query { /// Unless the query has enabled disjoint replies (you can check this through [`Query::accepts_replies`]), /// replying on a disjoint key expression will result in an error when resolving the reply. #[inline(always)] - pub fn reply_del( + pub fn reply_del<'b, TryIntoKeyExpr>( &self, - key_expr: IntoKeyExpr, - ) -> ReplySampleBuilder<'_, op::Delete> + key_expr: TryIntoKeyExpr, + ) -> ReplyBuilder<'_, 'b, ReplyBuilderDelete> where TryIntoKeyExpr: TryInto>, >>::Error: Into, { - let sample_builder = - SampleBuilder::delete(key_expr).qos(response::ext::QoSType::RESPONSE.into()); - ReplySampleBuilder { + ReplyBuilder { query: self, - sample_builder, + key_expr: key_expr.try_into().map_err(Into::into), + qos: response::ext::QoSType::RESPONSE.into(), + kind: ReplyBuilderDelete, + timestamp: None, + source_info: SourceInfo::empty(), + attachment: None, } } @@ -243,28 +251,45 @@ impl AsyncResolve for ReplySample<'_> { } } -/// A builder returned by [`Query::reply()`](Query::reply) +#[derive(Debug)] +pub struct ReplyBuilderPut { + payload: super::Payload, + encoding: super::Encoding, +} +#[derive(Debug)] +pub struct ReplyBuilderDelete; + +/// A builder returned by [`Query::reply()`](Query::reply) and [`Query::reply_del()`](Query::reply_del) #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug)] -pub struct ReplySampleBuilder<'a, T> { +pub struct ReplyBuilder<'a, 'b, T> { query: &'a Query, - sample_builder: SampleBuilder, + key_expr: ZResult>, + kind: T, + timestamp: Option, + qos: QoSBuilder, + + #[cfg(feature = "unstable")] + source_info: SourceInfo, + + #[cfg(feature = "unstable")] + attachment: Option, } -impl TimestampBuilderTrait for ReplySampleBuilder<'_, T> { +impl TimestampBuilderTrait for ReplyBuilder<'_, '_, T> { fn timestamp>>(self, timestamp: U) -> Self { Self { - sample_builder: self.sample_builder.timestamp(timestamp), + timestamp: timestamp.into(), ..self } } } -impl SampleBuilderTrait for ReplySampleBuilder<'_, T> { +impl SampleBuilderTrait for ReplyBuilder<'_, '_, T> { #[cfg(feature = "unstable")] fn source_info(self, source_info: SourceInfo) -> Self { Self { - sample_builder: self.sample_builder.source_info(source_info), + source_info, ..self } } @@ -272,69 +297,100 @@ impl SampleBuilderTrait for ReplySampleBuilder<'_, T> { #[cfg(feature = "unstable")] fn attachment>>(self, attachment: U) -> Self { Self { - sample_builder: self.sample_builder.attachment(attachment), + attachment: attachment.into(), ..self } } } -impl QoSBuilderTrait for ReplySampleBuilder<'_, T> { +impl QoSBuilderTrait for ReplyBuilder<'_, '_, T> { fn congestion_control(self, congestion_control: CongestionControl) -> Self { - Self { - sample_builder: self.sample_builder.congestion_control(congestion_control), - ..self - } + let qos = self.qos.congestion_control(congestion_control); + Self { qos, ..self } } fn priority(self, priority: Priority) -> Self { - Self { - sample_builder: self.sample_builder.priority(priority), - ..self - } + let qos = self.qos.priority(priority); + Self { qos, ..self } } fn express(self, is_express: bool) -> Self { - Self { - sample_builder: self.sample_builder.express(is_express), - ..self - } + let qos = self.qos.express(is_express); + Self { qos, ..self } } } -impl ValueBuilderTrait for ReplySampleBuilder<'_, op::Put> { +impl ValueBuilderTrait for ReplyBuilder<'_, '_, ReplyBuilderPut> { fn encoding>(self, encoding: T) -> Self { Self { - sample_builder: self.sample_builder.encoding(encoding), + kind: ReplyBuilderPut { + encoding: encoding.into(), + ..self.kind + }, ..self } } fn payload>(self, payload: T) -> Self { Self { - sample_builder: self.sample_builder.payload(payload), + kind: ReplyBuilderPut { + payload: payload.into(), + ..self.kind + }, ..self } } fn value>(self, value: T) -> Self { let Value { payload, encoding } = value.into(); Self { - sample_builder: self.sample_builder.payload(payload).encoding(encoding), + kind: ReplyBuilderPut { payload, encoding }, ..self } } } -impl<'a, T> Resolvable for ReplySampleBuilder<'a, T> { +impl Resolvable for ReplyBuilder<'_, '_, T> { type To = ZResult<()>; } -impl SyncResolve for ReplySampleBuilder<'_, T> { +impl SyncResolve for ReplyBuilder<'_, '_, ReplyBuilderPut> { + fn res_sync(self) -> ::To { + let key_expr = self.key_expr?.into_owned(); + let sample = SampleBuilder::put(key_expr, self.kind.payload) + .encoding(self.kind.encoding) + .timestamp(self.timestamp) + .qos(self.qos.into()); + #[cfg(feature = "unstable")] + let sample = sample.source_info(self.source_info); + #[cfg(feature = "unstable")] + let sample = sample.attachment(self.attachment); + self.query._reply_sample(sample.into()) + } +} + +impl SyncResolve for ReplyBuilder<'_, '_, ReplyBuilderDelete> { fn res_sync(self) -> ::To { - self.query._reply_sample(self.sample_builder.into()) + let key_expr = self.key_expr?.into_owned(); + let sample = SampleBuilder::delete(key_expr) + .timestamp(self.timestamp) + .qos(self.qos.into()); + #[cfg(feature = "unstable")] + let sample = sample.source_info(self.source_info); + #[cfg(feature = "unstable")] + let sample = sample.attachment(self.attachment); + self.query._reply_sample(sample.into()) + } +} + +impl AsyncResolve for ReplyBuilder<'_, '_, ReplyBuilderPut> { + type Future = Ready; + + fn res_async(self) -> Self::Future { + std::future::ready(self.res_sync()) } } -impl<'a, T> AsyncResolve for ReplySampleBuilder<'a, T> { +impl AsyncResolve for ReplyBuilder<'_, '_, ReplyBuilderDelete> { type Future = Ready; fn res_async(self) -> Self::Future { diff --git a/zenoh/src/sample/mod.rs b/zenoh/src/sample/mod.rs index 2b71105d5e..be80f8277e 100644 --- a/zenoh/src/sample/mod.rs +++ b/zenoh/src/sample/mod.rs @@ -590,6 +590,12 @@ impl From for QoSBuilder { } } +impl From for QoSBuilder { + fn from(qos: QoSType) -> Self { + QoSBuilder(QoS { inner: qos }) + } +} + impl From for QoS { fn from(builder: QoSBuilder) -> Self { builder.0 From e4c4be1d4c5dd5b02ed539a57eba324c6e5b2a07 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 1 Apr 2024 11:53:44 +0200 Subject: [PATCH 090/124] removed "op" namespace to align naming with ReplyBuilder --- zenoh/src/sample/builder.rs | 46 ++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 24 deletions(-) diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index 7f438d1381..fd697e942a 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -64,14 +64,12 @@ pub trait ValueBuilderTrait { fn value>(self, value: T) -> Self; } -pub mod op { - #[derive(Debug)] - pub struct Put; - #[derive(Debug)] - pub struct Delete; - #[derive(Debug)] - pub struct Any; -} +#[derive(Debug)] +pub struct SampleBuilderPut; +#[derive(Debug)] +pub struct SampleBuilderDelete; +#[derive(Debug)] +pub struct SampleBuilderAny; #[derive(Debug)] pub struct SampleBuilder { @@ -79,11 +77,11 @@ pub struct SampleBuilder { _t: PhantomData, } -impl SampleBuilder { +impl SampleBuilder { pub fn put( key_expr: IntoKeyExpr, payload: IntoPayload, - ) -> SampleBuilder + ) -> SampleBuilder where IntoKeyExpr: Into>, IntoPayload: Into, @@ -101,13 +99,13 @@ impl SampleBuilder { #[cfg(feature = "unstable")] attachment: None, }, - _t: PhantomData::, + _t: PhantomData::, } } } -impl SampleBuilder { - pub fn delete(key_expr: IntoKeyExpr) -> SampleBuilder +impl SampleBuilder { + pub fn delete(key_expr: IntoKeyExpr) -> SampleBuilder where IntoKeyExpr: Into>, { @@ -124,7 +122,7 @@ impl SampleBuilder { #[cfg(feature = "unstable")] attachment: None, }, - _t: PhantomData::, + _t: PhantomData::, } } } @@ -216,14 +214,14 @@ impl QoSBuilderTrait for SampleBuilder { } } -impl ValueBuilderTrait for SampleBuilder { +impl ValueBuilderTrait for SampleBuilder { fn encoding>(self, encoding: T) -> Self { Self { sample: Sample { encoding: encoding.into(), ..self.sample }, - _t: PhantomData::, + _t: PhantomData::, } } fn payload>(self, payload: T) -> Self { @@ -232,7 +230,7 @@ impl ValueBuilderTrait for SampleBuilder { payload: payload.into(), ..self.sample }, - _t: PhantomData::, + _t: PhantomData::, } } fn value>(self, value: T) -> Self { @@ -243,21 +241,21 @@ impl ValueBuilderTrait for SampleBuilder { encoding, ..self.sample }, - _t: PhantomData::, + _t: PhantomData::, } } } -impl From for SampleBuilder { +impl From for SampleBuilder { fn from(sample: Sample) -> Self { SampleBuilder { sample, - _t: PhantomData::, + _t: PhantomData::, } } } -impl TryFrom for SampleBuilder { +impl TryFrom for SampleBuilder { type Error = zresult::Error; fn try_from(sample: Sample) -> Result { if sample.kind != SampleKind::Put { @@ -265,12 +263,12 @@ impl TryFrom for SampleBuilder { } Ok(SampleBuilder { sample, - _t: PhantomData::, + _t: PhantomData::, }) } } -impl TryFrom for SampleBuilder { +impl TryFrom for SampleBuilder { type Error = zresult::Error; fn try_from(sample: Sample) -> Result { if sample.kind != SampleKind::Delete { @@ -278,7 +276,7 @@ impl TryFrom for SampleBuilder { } Ok(SampleBuilder { sample, - _t: PhantomData::, + _t: PhantomData::, }) } } From d631f761620b377cd9460f275c4f6deeef61e996 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 1 Apr 2024 13:25:59 +0200 Subject: [PATCH 091/124] publication builder shortened --- zenoh/src/publication.rs | 145 ++++++++---------------------------- zenoh/src/sample/builder.rs | 8 +- zenoh/src/session.rs | 15 ++-- 3 files changed, 46 insertions(+), 122 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 1d62375cdd..69715a0867 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -41,33 +41,15 @@ use zenoh_result::ZResult; /// The kind of congestion control. pub use zenoh_protocol::core::CongestionControl; -/// A builder for initializing a [`delete`](crate::Session::delete) operation. -/// -/// # Examples -/// ``` -/// # #[tokio::main] -/// # async fn main() { -/// use zenoh::prelude::r#async::*; -/// use zenoh::publication::CongestionControl; -/// -/// let session = zenoh::open(config::peer()).res().await.unwrap(); -/// session -/// .delete("key/expression") -/// .res() -/// .await -/// .unwrap(); -/// # } -/// ``` -pub struct DeleteBuilder<'a, 'b> { - pub(crate) publisher: PublisherBuilder<'a, 'b>, - pub(crate) timestamp: Option, - #[cfg(feature = "unstable")] - pub(crate) source_info: SourceInfo, - #[cfg(feature = "unstable")] - pub(crate) attachment: Option, +#[derive(Debug, Clone)] +pub struct PublicationBuilderPut { + pub(crate) payload: Payload, + pub(crate) encoding: Encoding, } +#[derive(Debug, Clone)] +pub struct PublicationBuilderDelete; -/// A builder for initializing a [`put`](crate::Session::put) operation. +/// A builder for initializing a [`put`](crate::Session::put) and [`delete`](crate::Session::delete) operations /// /// # Examples /// ``` @@ -89,10 +71,9 @@ pub struct DeleteBuilder<'a, 'b> { /// ``` #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug, Clone)] -pub struct PutBuilder<'a, 'b> { +pub struct PublicationBuilder<'a, 'b, T> { pub(crate) publisher: PublisherBuilder<'a, 'b>, - pub(crate) payload: Payload, - pub(crate) encoding: Encoding, + pub(crate) kind: T, pub(crate) timestamp: Option, #[cfg(feature = "unstable")] pub(crate) source_info: SourceInfo, @@ -100,7 +81,7 @@ pub struct PutBuilder<'a, 'b> { pub(crate) attachment: Option, } -impl QoSBuilderTrait for PutBuilder<'_, '_> { +impl QoSBuilderTrait for PublicationBuilder<'_, '_, T> { #[inline] fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self { @@ -124,58 +105,8 @@ impl QoSBuilderTrait for PutBuilder<'_, '_> { } } -impl QoSBuilderTrait for DeleteBuilder<'_, '_> { - #[inline] - fn congestion_control(self, congestion_control: CongestionControl) -> Self { - Self { - publisher: self.publisher.congestion_control(congestion_control), - ..self - } - } - #[inline] - fn priority(self, priority: Priority) -> Self { - Self { - publisher: self.publisher.priority(priority), - ..self - } - } - #[inline] - fn express(self, is_express: bool) -> Self { - Self { - publisher: self.publisher.express(is_express), - ..self - } - } -} - -impl TimestampBuilderTrait for PutBuilder<'_, '_> { - fn timestamp>>(self, timestamp: T) -> Self { - Self { - timestamp: timestamp.into(), - ..self - } - } -} - -impl SampleBuilderTrait for PutBuilder<'_, '_> { - #[cfg(feature = "unstable")] - fn source_info(self, source_info: SourceInfo) -> Self { - Self { - source_info, - ..self - } - } - #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: T) -> Self { - Self { - attachment: attachment.into(), - ..self - } - } -} - -impl TimestampBuilderTrait for DeleteBuilder<'_, '_> { - fn timestamp>>(self, timestamp: T) -> Self { +impl TimestampBuilderTrait for PublicationBuilder<'_, '_, T> { + fn timestamp>>(self, timestamp: TS) -> Self { Self { timestamp: timestamp.into(), ..self @@ -183,7 +114,7 @@ impl TimestampBuilderTrait for DeleteBuilder<'_, '_> { } } -impl SampleBuilderTrait for DeleteBuilder<'_, '_> { +impl SampleBuilderTrait for PublicationBuilder<'_, '_, T> { #[cfg(feature = "unstable")] fn source_info(self, source_info: SourceInfo) -> Self { Self { @@ -192,7 +123,7 @@ impl SampleBuilderTrait for DeleteBuilder<'_, '_> { } } #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: T) -> Self { + fn attachment>>(self, attachment: TA) -> Self { Self { attachment: attachment.into(), ..self @@ -200,10 +131,13 @@ impl SampleBuilderTrait for DeleteBuilder<'_, '_> { } } -impl ValueBuilderTrait for PutBuilder<'_, '_> { +impl ValueBuilderTrait for PublicationBuilder<'_, '_, PublicationBuilderPut> { fn encoding>(self, encoding: T) -> Self { Self { - encoding: encoding.into(), + kind: PublicationBuilderPut { + encoding: encoding.into(), + ..self.kind + }, ..self } } @@ -213,32 +147,23 @@ impl ValueBuilderTrait for PutBuilder<'_, '_> { IntoPayload: Into, { Self { - payload: payload.into(), + kind: PublicationBuilderPut { + payload: payload.into(), + ..self.kind + }, ..self } } fn value>(self, value: T) -> Self { let Value { payload, encoding } = value.into(); Self { - payload, - encoding, + kind: PublicationBuilderPut { payload, encoding }, ..self } } } -impl PutBuilder<'_, '_> { - /// Restrict the matching subscribers that will receive the published data - /// to the ones that have the given [`Locality`](crate::prelude::Locality). - #[zenoh_macros::unstable] - #[inline] - pub fn allowed_destination(mut self, destination: Locality) -> Self { - self.publisher = self.publisher.allowed_destination(destination); - self - } -} - -impl DeleteBuilder<'_, '_> { +impl PublicationBuilder<'_, '_, T> { /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] @@ -249,23 +174,19 @@ impl DeleteBuilder<'_, '_> { } } -impl Resolvable for PutBuilder<'_, '_> { - type To = ZResult<()>; -} - -impl Resolvable for DeleteBuilder<'_, '_> { +impl Resolvable for PublicationBuilder<'_, '_, T> { type To = ZResult<()>; } -impl SyncResolve for PutBuilder<'_, '_> { +impl SyncResolve for PublicationBuilder<'_, '_, PublicationBuilderPut> { #[inline] fn res_sync(self) -> ::To { let publisher = self.publisher.create_one_shot_publisher()?; resolve_put( &publisher, - self.payload, + self.kind.payload, SampleKind::Put, - self.encoding, + self.kind.encoding, self.timestamp, #[cfg(feature = "unstable")] self.source_info, @@ -275,7 +196,7 @@ impl SyncResolve for PutBuilder<'_, '_> { } } -impl SyncResolve for DeleteBuilder<'_, '_> { +impl SyncResolve for PublicationBuilder<'_, '_, PublicationBuilderDelete> { #[inline] fn res_sync(self) -> ::To { let publisher = self.publisher.create_one_shot_publisher()?; @@ -293,7 +214,7 @@ impl SyncResolve for DeleteBuilder<'_, '_> { } } -impl AsyncResolve for PutBuilder<'_, '_> { +impl AsyncResolve for PublicationBuilder<'_, '_, PublicationBuilderPut> { type Future = Ready; fn res_async(self) -> Self::Future { @@ -301,7 +222,7 @@ impl AsyncResolve for PutBuilder<'_, '_> { } } -impl AsyncResolve for DeleteBuilder<'_, '_> { +impl AsyncResolve for PublicationBuilder<'_, '_, PublicationBuilderDelete> { type Future = Ready; fn res_async(self) -> Self::Future { @@ -1038,7 +959,7 @@ impl<'a, 'b> PublisherBuilder<'a, 'b> { self } - // internal function for `PutBuilder` and `DeleteBuilder` + // internal function for perfroming the publication fn create_one_shot_publisher(self) -> ZResult> { Ok(Publisher { session: self.session, diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index fd697e942a..295451abc1 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -64,14 +64,14 @@ pub trait ValueBuilderTrait { fn value>(self, value: T) -> Self; } -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct SampleBuilderPut; -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct SampleBuilderDelete; -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct SampleBuilderAny; -#[derive(Debug)] +#[derive(Clone, Debug)] pub struct SampleBuilder { sample: Sample, _t: PhantomData, diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 4d71f58ffa..b1b059d163 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -705,17 +705,19 @@ impl Session { &'a self, key_expr: TryIntoKeyExpr, payload: IntoPayload, - ) -> PutBuilder<'a, 'b> + ) -> PublicationBuilder<'a, 'b, PublicationBuilderPut> where TryIntoKeyExpr: TryInto>, >>::Error: Into, IntoPayload: Into, { - PutBuilder { + PublicationBuilder { publisher: self.declare_publisher(key_expr), - payload: payload.into(), + kind: PublicationBuilderPut { + payload: payload.into(), + encoding: Encoding::default(), + }, timestamp: None, - encoding: Encoding::default(), #[cfg(feature = "unstable")] attachment: None, #[cfg(feature = "unstable")] @@ -743,13 +745,14 @@ impl Session { pub fn delete<'a, 'b: 'a, TryIntoKeyExpr>( &'a self, key_expr: TryIntoKeyExpr, - ) -> DeleteBuilder<'a, 'b> + ) -> PublicationBuilder<'a, 'b, PublicationBuilderDelete> where TryIntoKeyExpr: TryInto>, >>::Error: Into, { - DeleteBuilder { + PublicationBuilder { publisher: self.declare_publisher(key_expr), + kind: PublicationBuilderDelete, timestamp: None, #[cfg(feature = "unstable")] attachment: None, From 9b8aaa69d190547a65084cb5c0be605aa706b67f Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 1 Apr 2024 13:45:01 +0200 Subject: [PATCH 092/124] parametrized publication builder --- zenoh/src/publication.rs | 24 ++++++++++++------------ zenoh/src/session.rs | 4 ++-- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 69715a0867..dd1818d842 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -71,8 +71,8 @@ pub struct PublicationBuilderDelete; /// ``` #[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] #[derive(Debug, Clone)] -pub struct PublicationBuilder<'a, 'b, T> { - pub(crate) publisher: PublisherBuilder<'a, 'b>, +pub struct PublicationBuilder { + pub(crate) publisher: P, pub(crate) kind: T, pub(crate) timestamp: Option, #[cfg(feature = "unstable")] @@ -81,7 +81,7 @@ pub struct PublicationBuilder<'a, 'b, T> { pub(crate) attachment: Option, } -impl QoSBuilderTrait for PublicationBuilder<'_, '_, T> { +impl QoSBuilderTrait for PublicationBuilder, T> { #[inline] fn congestion_control(self, congestion_control: CongestionControl) -> Self { Self { @@ -105,7 +105,7 @@ impl QoSBuilderTrait for PublicationBuilder<'_, '_, T> { } } -impl TimestampBuilderTrait for PublicationBuilder<'_, '_, T> { +impl TimestampBuilderTrait for PublicationBuilder { fn timestamp>>(self, timestamp: TS) -> Self { Self { timestamp: timestamp.into(), @@ -114,7 +114,7 @@ impl TimestampBuilderTrait for PublicationBuilder<'_, '_, T> { } } -impl SampleBuilderTrait for PublicationBuilder<'_, '_, T> { +impl SampleBuilderTrait for PublicationBuilder { #[cfg(feature = "unstable")] fn source_info(self, source_info: SourceInfo) -> Self { Self { @@ -131,7 +131,7 @@ impl SampleBuilderTrait for PublicationBuilder<'_, '_, T> { } } -impl ValueBuilderTrait for PublicationBuilder<'_, '_, PublicationBuilderPut> { +impl

ValueBuilderTrait for PublicationBuilder { fn encoding>(self, encoding: T) -> Self { Self { kind: PublicationBuilderPut { @@ -163,7 +163,7 @@ impl ValueBuilderTrait for PublicationBuilder<'_, '_, PublicationBuilderPut> { } } -impl PublicationBuilder<'_, '_, T> { +impl PublicationBuilder, T> { /// Restrict the matching subscribers that will receive the published data /// to the ones that have the given [`Locality`](crate::prelude::Locality). #[zenoh_macros::unstable] @@ -174,11 +174,11 @@ impl PublicationBuilder<'_, '_, T> { } } -impl Resolvable for PublicationBuilder<'_, '_, T> { +impl Resolvable for PublicationBuilder { type To = ZResult<()>; } -impl SyncResolve for PublicationBuilder<'_, '_, PublicationBuilderPut> { +impl SyncResolve for PublicationBuilder, PublicationBuilderPut> { #[inline] fn res_sync(self) -> ::To { let publisher = self.publisher.create_one_shot_publisher()?; @@ -196,7 +196,7 @@ impl SyncResolve for PublicationBuilder<'_, '_, PublicationBuilderPut> { } } -impl SyncResolve for PublicationBuilder<'_, '_, PublicationBuilderDelete> { +impl SyncResolve for PublicationBuilder, PublicationBuilderDelete> { #[inline] fn res_sync(self) -> ::To { let publisher = self.publisher.create_one_shot_publisher()?; @@ -214,7 +214,7 @@ impl SyncResolve for PublicationBuilder<'_, '_, PublicationBuilderDelete> { } } -impl AsyncResolve for PublicationBuilder<'_, '_, PublicationBuilderPut> { +impl AsyncResolve for PublicationBuilder, PublicationBuilderPut> { type Future = Ready; fn res_async(self) -> Self::Future { @@ -222,7 +222,7 @@ impl AsyncResolve for PublicationBuilder<'_, '_, PublicationBuilderPut> { } } -impl AsyncResolve for PublicationBuilder<'_, '_, PublicationBuilderDelete> { +impl AsyncResolve for PublicationBuilder, PublicationBuilderDelete> { type Future = Ready; fn res_async(self) -> Self::Future { diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index b1b059d163..63cc0bb7fa 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -705,7 +705,7 @@ impl Session { &'a self, key_expr: TryIntoKeyExpr, payload: IntoPayload, - ) -> PublicationBuilder<'a, 'b, PublicationBuilderPut> + ) -> PublicationBuilder, PublicationBuilderPut> where TryIntoKeyExpr: TryInto>, >>::Error: Into, @@ -745,7 +745,7 @@ impl Session { pub fn delete<'a, 'b: 'a, TryIntoKeyExpr>( &'a self, key_expr: TryIntoKeyExpr, - ) -> PublicationBuilder<'a, 'b, PublicationBuilderDelete> + ) -> PublicationBuilder, PublicationBuilderDelete> where TryIntoKeyExpr: TryInto>, >>::Error: Into, From bbe07f78294418e5f1d2aa95499987f827e3510c Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 1 Apr 2024 14:03:02 +0200 Subject: [PATCH 093/124] removed PutPublication, DeletePublication --- zenoh/src/publication.rs | 144 +++++---------------------------------- 1 file changed, 18 insertions(+), 126 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index dd1818d842..41e2b0fa04 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -408,14 +408,19 @@ impl<'a> Publisher<'a> { /// # } /// ``` #[inline] - pub fn put(&self, payload: IntoPayload) -> PutPublication + pub fn put( + &self, + payload: IntoPayload, + ) -> PublicationBuilder<&Publisher<'_>, PublicationBuilderPut> where IntoPayload: Into, { - PutPublication { + PublicationBuilder { publisher: self, - payload: payload.into(), - encoding: Encoding::ZENOH_BYTES, + kind: PublicationBuilderPut { + payload: payload.into(), + encoding: Encoding::ZENOH_BYTES, + }, timestamp: None, #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), @@ -437,9 +442,10 @@ impl<'a> Publisher<'a> { /// publisher.delete().res().await.unwrap(); /// # } /// ``` - pub fn delete(&self) -> DeletePublication { - DeletePublication { + pub fn delete(&self) -> PublicationBuilder<&Publisher<'_>, PublicationBuilderDelete> { + PublicationBuilder { publisher: self, + kind: PublicationBuilderDelete, timestamp: None, #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), @@ -674,127 +680,13 @@ impl Drop for Publisher<'_> { } } -/// A [`Resolvable`] returned by [`Publisher::put()`](Publisher::put), -#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] -pub struct PutPublication<'a> { - publisher: &'a Publisher<'a>, - payload: Payload, - encoding: Encoding, - timestamp: Option, - #[cfg(feature = "unstable")] - pub(crate) source_info: SourceInfo, - #[cfg(feature = "unstable")] - pub(crate) attachment: Option, -} - -/// A [`Resolvable`] returned by [`Publisher::delete()`](Publisher::delete) -#[must_use = "Resolvables do nothing unless you resolve them using the `res` method from either `SyncResolve` or `AsyncResolve`"] -pub struct DeletePublication<'a> { - publisher: &'a Publisher<'a>, - timestamp: Option, - #[cfg(feature = "unstable")] - pub(crate) source_info: SourceInfo, - #[cfg(feature = "unstable")] - pub(crate) attachment: Option, -} - -impl TimestampBuilderTrait for PutPublication<'_> { - fn timestamp>>(self, timestamp: T) -> Self { - Self { - timestamp: timestamp.into(), - ..self - } - } -} - -impl SampleBuilderTrait for PutPublication<'_> { - #[cfg(feature = "unstable")] - fn source_info(self, source_info: SourceInfo) -> Self { - Self { - source_info, - ..self - } - } - - #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: T) -> Self { - Self { - attachment: attachment.into(), - ..self - } - } -} - -impl ValueBuilderTrait for PutPublication<'_> { - fn encoding>(self, encoding: T) -> Self { - Self { - encoding: encoding.into(), - ..self - } - } - - fn payload(self, payload: IntoPayload) -> Self - where - IntoPayload: Into, - { - Self { - payload: payload.into(), - ..self - } - } - - fn value>(self, value: T) -> Self { - let Value { payload, encoding } = value.into(); - Self { - payload, - encoding, - ..self - } - } -} - -impl TimestampBuilderTrait for DeletePublication<'_> { - fn timestamp>>(self, timestamp: T) -> Self { - Self { - timestamp: timestamp.into(), - ..self - } - } -} - -impl SampleBuilderTrait for DeletePublication<'_> { - #[cfg(feature = "unstable")] - fn source_info(self, source_info: SourceInfo) -> Self { - Self { - source_info, - ..self - } - } - - #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: T) -> Self { - Self { - attachment: attachment.into(), - ..self - } - } -} - -impl Resolvable for PutPublication<'_> { - type To = ZResult<()>; -} - -impl Resolvable for DeletePublication<'_> { - type To = ZResult<()>; -} - -impl SyncResolve for PutPublication<'_> { +impl SyncResolve for PublicationBuilder<&Publisher<'_>, PublicationBuilderPut> { fn res_sync(self) -> ::To { resolve_put( self.publisher, - self.payload, + self.kind.payload, SampleKind::Put, - self.encoding, + self.kind.encoding, self.timestamp, #[cfg(feature = "unstable")] self.source_info, @@ -804,7 +696,7 @@ impl SyncResolve for PutPublication<'_> { } } -impl SyncResolve for DeletePublication<'_> { +impl SyncResolve for PublicationBuilder<&Publisher<'_>, PublicationBuilderDelete> { fn res_sync(self) -> ::To { resolve_put( self.publisher, @@ -820,7 +712,7 @@ impl SyncResolve for DeletePublication<'_> { } } -impl AsyncResolve for PutPublication<'_> { +impl AsyncResolve for PublicationBuilder<&Publisher<'_>, PublicationBuilderPut> { type Future = Ready; fn res_async(self) -> Self::Future { @@ -828,7 +720,7 @@ impl AsyncResolve for PutPublication<'_> { } } -impl AsyncResolve for DeletePublication<'_> { +impl AsyncResolve for PublicationBuilder<&Publisher<'_>, PublicationBuilderDelete> { type Future = Ready; fn res_async(self) -> Self::Future { From 4d0f6e52d07c9c0208430b454f8982044f2e0409 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 1 Apr 2024 15:32:30 +0200 Subject: [PATCH 094/124] removed extra uses --- examples/examples/z_ping.rs | 1 - examples/examples/z_pong.rs | 1 - examples/examples/z_pub.rs | 1 - examples/examples/z_pub_shm_thr.rs | 1 - examples/examples/z_pub_thr.rs | 1 - .../zenoh-plugin-rest/examples/z_serve_sse.rs | 1 - .../src/replica/align_queryable.rs | 2 - .../src/replica/aligner.rs | 2 +- .../src/replica/storage.rs | 53 +++----- zenoh-ext/src/group.rs | 1 - zenoh-ext/src/querying_subscriber.rs | 2 +- zenoh/src/publication.rs | 65 +++++---- zenoh/src/queryable.rs | 126 +++++++++--------- zenoh/tests/session.rs | 1 - zenoh/tests/unicity.rs | 1 - 15 files changed, 115 insertions(+), 144 deletions(-) diff --git a/examples/examples/z_ping.rs b/examples/examples/z_ping.rs index 59bcaddadc..a57c937e48 100644 --- a/examples/examples/z_ping.rs +++ b/examples/examples/z_ping.rs @@ -16,7 +16,6 @@ use std::time::{Duration, Instant}; use zenoh::config::Config; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/examples/examples/z_pong.rs b/examples/examples/z_pong.rs index e0fa079629..baa5683f62 100644 --- a/examples/examples/z_pong.rs +++ b/examples/examples/z_pong.rs @@ -15,7 +15,6 @@ use clap::Parser; use zenoh::config::Config; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/examples/examples/z_pub.rs b/examples/examples/z_pub.rs index b6a1ddc0d8..8cd3c4edba 100644 --- a/examples/examples/z_pub.rs +++ b/examples/examples/z_pub.rs @@ -15,7 +15,6 @@ use clap::Parser; use std::time::Duration; use zenoh::config::Config; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::SampleBuilderTrait; use zenoh_examples::CommonArgs; #[tokio::main] diff --git a/examples/examples/z_pub_shm_thr.rs b/examples/examples/z_pub_shm_thr.rs index 86429e8ab7..c8a33f98fa 100644 --- a/examples/examples/z_pub_shm_thr.rs +++ b/examples/examples/z_pub_shm_thr.rs @@ -15,7 +15,6 @@ use clap::Parser; use zenoh::config::Config; use zenoh::prelude::r#async::*; use zenoh::publication::CongestionControl; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh::shm::SharedMemoryManager; use zenoh_examples::CommonArgs; diff --git a/examples/examples/z_pub_thr.rs b/examples/examples/z_pub_thr.rs index 78d54111a8..4354ad2e68 100644 --- a/examples/examples/z_pub_thr.rs +++ b/examples/examples/z_pub_thr.rs @@ -16,7 +16,6 @@ use clap::Parser; use std::convert::TryInto; use zenoh::prelude::sync::*; use zenoh::publication::CongestionControl; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh_examples::CommonArgs; fn main() { diff --git a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs index c353826fab..bb76005d6e 100644 --- a/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs +++ b/plugins/zenoh-plugin-rest/examples/z_serve_sse.rs @@ -15,7 +15,6 @@ use clap::{arg, Command}; use std::time::Duration; use zenoh::prelude::r#async::*; use zenoh::publication::CongestionControl; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh::{config::Config, key_expr::keyexpr}; const HTML: &str = r#" diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs index 729572601c..1ce6a1cb16 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/align_queryable.rs @@ -20,8 +20,6 @@ use std::str; use std::str::FromStr; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::TimestampBuilderTrait; -use zenoh::sample::builder::ValueBuilderTrait; use zenoh::time::Timestamp; use zenoh::Session; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs index 5121f0b445..64d5cfa1cd 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/aligner.rs @@ -21,7 +21,7 @@ use std::str; use zenoh::key_expr::{KeyExpr, OwnedKeyExpr}; use zenoh::payload::StringOrBase64; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::{SampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; +use zenoh::sample::builder::SampleBuilder; use zenoh::time::Timestamp; use zenoh::Session; diff --git a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs index feebfb588a..06c5882408 100644 --- a/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs +++ b/plugins/zenoh-plugin-storage-manager/src/replica/storage.rs @@ -23,14 +23,13 @@ use std::str::{self, FromStr}; use std::time::{SystemTime, UNIX_EPOCH}; use zenoh::buffers::buffer::SplitBuffer; use zenoh::buffers::ZBuf; -use zenoh::key_expr::KeyExpr; use zenoh::prelude::r#async::*; use zenoh::query::{ConsolidationMode, QueryTarget}; -use zenoh::sample::builder::{SampleBuilder, TimestampBuilderTrait, ValueBuilderTrait}; +use zenoh::sample::builder::SampleBuilder; use zenoh::sample::{Sample, SampleKind}; use zenoh::time::{new_reception_timestamp, Timestamp, NTP64}; use zenoh::value::Value; -use zenoh::{Result as ZResult, Session, SessionDeclarations}; +use zenoh::{Result as ZResult, Session}; use zenoh_backend_traits::config::{GarbageCollectionConfig, StorageConfig}; use zenoh_backend_traits::{Capability, History, Persistence, StorageInsertionResult, StoredData}; use zenoh_keyexpr::key_expr::OwnedKeyExpr; @@ -296,31 +295,25 @@ impl StorageService { ); // there might be the case that the actual update was outdated due to a wild card update, but not stored yet in the storage. // get the relevant wild card entry and use that value and timestamp to update the storage - let sample_to_store: Sample = match self + let sample_to_store: Sample = if let Some(update) = self .ovderriding_wild_update(&k, sample.timestamp().unwrap()) .await { - Some(Update { - kind: SampleKind::Put, - data, - }) => { - let Value { - payload, encoding, .. - } = data.value; - SampleBuilder::put(KeyExpr::from(k.clone()), payload) - .encoding(encoding) - .timestamp(data.timestamp) - .into() + match update.kind { + SampleKind::Put => { + SampleBuilder::put(KeyExpr::from(k.clone()), update.data.value.payload) + .encoding(update.data.value.encoding) + .timestamp(update.data.timestamp) + .into() + } + SampleKind::Delete => SampleBuilder::delete(KeyExpr::from(k.clone())) + .timestamp(update.data.timestamp) + .into(), } - Some(Update { - kind: SampleKind::Delete, - data, - }) => SampleBuilder::delete(KeyExpr::from(k.clone())) - .timestamp(data.timestamp) - .into(), - None => SampleBuilder::from(sample.clone()) + } else { + SampleBuilder::from(sample.clone()) .keyexpr(k.clone()) - .into(), + .into() }; let stripped_key = match self.strip_prefix(sample_to_store.key_expr()) { @@ -520,12 +513,9 @@ impl StorageService { match storage.get(stripped_key, q.parameters()).await { Ok(stored_data) => { for entry in stored_data { - let Value { - payload, encoding, .. - } = entry.value; if let Err(e) = q - .reply(key.clone(), payload) - .encoding(encoding) + .reply(key.clone(), entry.value.payload) + .encoding(entry.value.encoding) .timestamp(entry.timestamp) .res() .await @@ -555,12 +545,9 @@ impl StorageService { match storage.get(stripped_key, q.parameters()).await { Ok(stored_data) => { for entry in stored_data { - let Value { - payload, encoding, .. - } = entry.value; if let Err(e) = q - .reply(q.key_expr().clone(), payload) - .encoding(encoding) + .reply(q.key_expr().clone(), entry.value.payload) + .encoding(entry.value.encoding) .timestamp(entry.timestamp) .res() .await diff --git a/zenoh-ext/src/group.rs b/zenoh-ext/src/group.rs index 2075ea9472..8a7823ed72 100644 --- a/zenoh-ext/src/group.rs +++ b/zenoh-ext/src/group.rs @@ -29,7 +29,6 @@ use zenoh::payload::PayloadReader; use zenoh::prelude::r#async::*; use zenoh::publication::Publisher; use zenoh::query::ConsolidationMode; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh::Error as ZError; use zenoh::Result as ZResult; use zenoh::Session; diff --git a/zenoh-ext/src/querying_subscriber.rs b/zenoh-ext/src/querying_subscriber.rs index 4e9b46854d..d749a94ed9 100644 --- a/zenoh-ext/src/querying_subscriber.rs +++ b/zenoh-ext/src/querying_subscriber.rs @@ -20,7 +20,7 @@ use std::time::Duration; use zenoh::handlers::{locked, DefaultHandler}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, QueryTarget, ReplyKeyExpr}; -use zenoh::sample::builder::{SampleBuilder, TimestampBuilderTrait}; +use zenoh::sample::builder::SampleBuilder; use zenoh::subscriber::{Reliability, Subscriber}; use zenoh::time::{new_reception_timestamp, Timestamp}; use zenoh::Result as ZResult; diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 41e2b0fa04..64fa5b49c6 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -15,9 +15,6 @@ //! Publishing primitives. use crate::net::primitives::Primitives; use crate::prelude::*; -use crate::sample::builder::{ - QoSBuilderTrait, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, -}; #[zenoh_macros::unstable] use crate::sample::Attachment; use crate::sample::{DataInfo, QoS, Sample, SampleFields, SampleKind}; @@ -105,29 +102,14 @@ impl QoSBuilderTrait for PublicationBuilder, T> { } } -impl TimestampBuilderTrait for PublicationBuilder { - fn timestamp>>(self, timestamp: TS) -> Self { - Self { - timestamp: timestamp.into(), - ..self - } - } -} - -impl SampleBuilderTrait for PublicationBuilder { - #[cfg(feature = "unstable")] - fn source_info(self, source_info: SourceInfo) -> Self { - Self { - source_info, - ..self - } - } - #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: TA) -> Self { - Self { - attachment: attachment.into(), - ..self - } +impl PublicationBuilder, T> { + /// Restrict the matching subscribers that will receive the published data + /// to the ones that have the given [`Locality`](crate::prelude::Locality). + #[zenoh_macros::unstable] + #[inline] + pub fn allowed_destination(mut self, destination: Locality) -> Self { + self.publisher = self.publisher.allowed_destination(destination); + self } } @@ -163,14 +145,29 @@ impl

ValueBuilderTrait for PublicationBuilder { } } -impl PublicationBuilder, T> { - /// Restrict the matching subscribers that will receive the published data - /// to the ones that have the given [`Locality`](crate::prelude::Locality). - #[zenoh_macros::unstable] - #[inline] - pub fn allowed_destination(mut self, destination: Locality) -> Self { - self.publisher = self.publisher.allowed_destination(destination); - self +impl SampleBuilderTrait for PublicationBuilder { + #[cfg(feature = "unstable")] + fn source_info(self, source_info: SourceInfo) -> Self { + Self { + source_info, + ..self + } + } + #[cfg(feature = "unstable")] + fn attachment>>(self, attachment: TA) -> Self { + Self { + attachment: attachment.into(), + ..self + } + } +} + +impl TimestampBuilderTrait for PublicationBuilder { + fn timestamp>>(self, timestamp: TS) -> Self { + Self { + timestamp: timestamp.into(), + ..self + } } } diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 37c3a2303a..0696fcbe33 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -18,9 +18,7 @@ use crate::encoding::Encoding; use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; -use crate::sample::builder::{ - QoSBuilderTrait, SampleBuilder, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, -}; +use crate::sample::builder::SampleBuilder; use crate::sample::{QoSBuilder, SourceInfo}; use crate::Id; use crate::SessionRef; @@ -287,17 +285,17 @@ impl TimestampBuilderTrait for ReplyBuilder<'_, '_, T> { impl SampleBuilderTrait for ReplyBuilder<'_, '_, T> { #[cfg(feature = "unstable")] - fn source_info(self, source_info: SourceInfo) -> Self { + fn attachment>>(self, attachment: U) -> Self { Self { - source_info, + attachment: attachment.into(), ..self } } #[cfg(feature = "unstable")] - fn attachment>>(self, attachment: U) -> Self { + fn source_info(self, source_info: SourceInfo) -> Self { Self { - attachment: attachment.into(), + source_info, ..self } } @@ -382,6 +380,63 @@ impl SyncResolve for ReplyBuilder<'_, '_, ReplyBuilderDelete> { } } +impl Query { + fn _reply_sample(&self, sample: Sample) -> ZResult<()> { + if !self._accepts_any_replies().unwrap_or(false) + && !self.key_expr().intersects(&sample.key_expr) + { + bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", sample.key_expr, self.key_expr()) + } + #[cfg(not(feature = "unstable"))] + let ext_sinfo = None; + #[cfg(feature = "unstable")] + let ext_sinfo = sample.source_info.into(); + self.inner.primitives.send_response(Response { + rid: self.inner.qid, + wire_expr: WireExpr { + scope: 0, + suffix: std::borrow::Cow::Owned(sample.key_expr.into()), + mapping: Mapping::Sender, + }, + payload: ResponseBody::Reply(zenoh::Reply { + consolidation: zenoh::Consolidation::DEFAULT, + ext_unknown: vec![], + payload: match sample.kind { + SampleKind::Put => ReplyBody::Put(Put { + timestamp: sample.timestamp, + encoding: sample.encoding.into(), + ext_sinfo, + #[cfg(feature = "shared-memory")] + ext_shm: None, + #[cfg(feature = "unstable")] + ext_attachment: sample.attachment.map(|a| a.into()), + #[cfg(not(feature = "unstable"))] + ext_attachment: None, + ext_unknown: vec![], + payload: sample.payload.into(), + }), + SampleKind::Delete => ReplyBody::Del(Del { + timestamp: sample.timestamp, + ext_sinfo, + #[cfg(feature = "unstable")] + ext_attachment: sample.attachment.map(|a| a.into()), + #[cfg(not(feature = "unstable"))] + ext_attachment: None, + ext_unknown: vec![], + }), + }, + }), + ext_qos: sample.qos.into(), + ext_tstamp: None, + ext_respid: Some(response::ext::ResponderIdType { + zid: self.inner.zid, + eid: self.eid, + }), + }); + Ok(()) + } +} + impl AsyncResolve for ReplyBuilder<'_, '_, ReplyBuilderPut> { type Future = Ready; @@ -467,63 +522,6 @@ impl<'a> AsyncResolve for ReplyErrBuilder<'a> { } } -impl Query { - fn _reply_sample(&self, sample: Sample) -> ZResult<()> { - if !self._accepts_any_replies().unwrap_or(false) - && !self.key_expr().intersects(&sample.key_expr) - { - bail!("Attempted to reply on `{}`, which does not intersect with query `{}`, despite query only allowing replies on matching key expressions", sample.key_expr, self.key_expr()) - } - #[cfg(not(feature = "unstable"))] - let ext_sinfo = None; - #[cfg(feature = "unstable")] - let ext_sinfo = sample.source_info.into(); - self.inner.primitives.send_response(Response { - rid: self.inner.qid, - wire_expr: WireExpr { - scope: 0, - suffix: std::borrow::Cow::Owned(sample.key_expr.into()), - mapping: Mapping::Sender, - }, - payload: ResponseBody::Reply(zenoh::Reply { - consolidation: zenoh::Consolidation::DEFAULT, - ext_unknown: vec![], - payload: match sample.kind { - SampleKind::Put => ReplyBody::Put(Put { - timestamp: sample.timestamp, - encoding: sample.encoding.into(), - ext_sinfo, - #[cfg(feature = "shared-memory")] - ext_shm: None, - #[cfg(feature = "unstable")] - ext_attachment: sample.attachment.map(|a| a.into()), - #[cfg(not(feature = "unstable"))] - ext_attachment: None, - ext_unknown: vec![], - payload: sample.payload.into(), - }), - SampleKind::Delete => ReplyBody::Del(Del { - timestamp: sample.timestamp, - ext_sinfo, - #[cfg(feature = "unstable")] - ext_attachment: sample.attachment.map(|a| a.into()), - #[cfg(not(feature = "unstable"))] - ext_attachment: None, - ext_unknown: vec![], - }), - }, - }), - ext_qos: sample.qos.into(), - ext_tstamp: None, - ext_respid: Some(response::ext::ResponderIdType { - zid: self.inner.zid, - eid: self.eid, - }), - }); - Ok(()) - } -} - pub(crate) struct QueryableState { pub(crate) id: Id, pub(crate) key_expr: WireExpr<'static>, diff --git a/zenoh/tests/session.rs b/zenoh/tests/session.rs index 0518316be9..8c2d2e9937 100644 --- a/zenoh/tests/session.rs +++ b/zenoh/tests/session.rs @@ -15,7 +15,6 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::Duration; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh_core::ztimeout; const TIMEOUT: Duration = Duration::from_secs(60); diff --git a/zenoh/tests/unicity.rs b/zenoh/tests/unicity.rs index a71a0a8034..f34704fb7e 100644 --- a/zenoh/tests/unicity.rs +++ b/zenoh/tests/unicity.rs @@ -16,7 +16,6 @@ use std::sync::Arc; use std::time::Duration; use tokio::runtime::Handle; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh_core::ztimeout; const TIMEOUT: Duration = Duration::from_secs(60); From 23931f92d5f9c321d8a0247a1379cc76b0275def Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Mon, 1 Apr 2024 15:48:02 +0200 Subject: [PATCH 095/124] more cleanup --- plugins/zenoh-plugin-rest/src/lib.rs | 1 - zenoh/src/query.rs | 1 - zenoh/src/sample/builder.rs | 2 ++ zenoh/src/sample/mod.rs | 8 +++++++- zenoh/src/session.rs | 1 + zenoh/tests/attachments.rs | 2 +- 6 files changed, 11 insertions(+), 4 deletions(-) diff --git a/plugins/zenoh-plugin-rest/src/lib.rs b/plugins/zenoh-plugin-rest/src/lib.rs index f78c541eff..43c3f33776 100644 --- a/plugins/zenoh-plugin-rest/src/lib.rs +++ b/plugins/zenoh-plugin-rest/src/lib.rs @@ -34,7 +34,6 @@ use zenoh::plugins::{RunningPluginTrait, ZenohPlugin}; use zenoh::prelude::r#async::*; use zenoh::query::{QueryConsolidation, Reply}; use zenoh::runtime::Runtime; -use zenoh::sample::builder::ValueBuilderTrait; use zenoh::selector::TIME_RANGE_KEY; use zenoh::Session; use zenoh_plugin_trait::{plugin_long_version, plugin_version, Plugin, PluginControl}; diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index 9f96db4f4b..cb1116130d 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -15,7 +15,6 @@ //! Query primitives. use crate::handlers::{locked, Callback, DefaultHandler}; use crate::prelude::*; -use crate::sample::builder::{QoSBuilderTrait, SampleBuilderTrait, ValueBuilderTrait}; #[zenoh_macros::unstable] use crate::sample::Attachment; use crate::sample::QoSBuilder; diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index 295451abc1..5fab36617d 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -14,9 +14,11 @@ use std::marker::PhantomData; +#[cfg(feature = "unstable")] use crate::sample::Attachment; use crate::sample::QoS; use crate::sample::QoSBuilder; +#[cfg(feature = "unstable")] use crate::sample::SourceInfo; use crate::Encoding; use crate::KeyExpr; diff --git a/zenoh/src/sample/mod.rs b/zenoh/src/sample/mod.rs index be80f8277e..6e457578a3 100644 --- a/zenoh/src/sample/mod.rs +++ b/zenoh/src/sample/mod.rs @@ -117,7 +117,12 @@ impl DataInfoIntoSample for Option { IntoPayload: Into, { if let Some(data_info) = self { - data_info.into_sample(key_expr, payload, attachment) + data_info.into_sample( + key_expr, + payload, + #[cfg(feature = "unstable")] + attachment, + ) } else { Sample { key_expr: key_expr.into(), @@ -172,6 +177,7 @@ impl SourceInfo { } } +#[zenoh_macros::unstable] impl From for Option { fn from(source_info: SourceInfo) -> Option { if source_info.is_empty() { diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 63cc0bb7fa..c44cb4f817 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -809,6 +809,7 @@ impl Session { #[cfg(feature = "unstable")] attachment: None, handler: DefaultHandler, + #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), } } diff --git a/zenoh/tests/attachments.rs b/zenoh/tests/attachments.rs index 2725351ab0..9fb99b7cc0 100644 --- a/zenoh/tests/attachments.rs +++ b/zenoh/tests/attachments.rs @@ -14,7 +14,7 @@ #[cfg(feature = "unstable")] #[test] fn pubsub() { - use zenoh::{prelude::sync::*, sample::builder::SampleBuilderTrait}; + use zenoh::prelude::sync::*; let zenoh = zenoh::open(Config::default()).res().unwrap(); let _sub = zenoh From e816f4efdc8591ce4db6ea9c88cc2109e1c037d4 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Thu, 4 Apr 2024 11:36:02 +0200 Subject: [PATCH 096/124] Add unicast open/close time tests (#898) * Add unicast open/close time * Remove unused import * Add print to tests --- io/zenoh-transport/tests/unicast_time.rs | 521 +++++++++++++++++++++++ 1 file changed, 521 insertions(+) create mode 100644 io/zenoh-transport/tests/unicast_time.rs diff --git a/io/zenoh-transport/tests/unicast_time.rs b/io/zenoh-transport/tests/unicast_time.rs new file mode 100644 index 0000000000..75d3ae1d98 --- /dev/null +++ b/io/zenoh-transport/tests/unicast_time.rs @@ -0,0 +1,521 @@ +// +// Copyright (c) 2023 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// +use std::{ + convert::TryFrom, + sync::Arc, + time::{Duration, Instant}, +}; +use zenoh_core::ztimeout; +use zenoh_link::EndPoint; +use zenoh_protocol::core::{WhatAmI, ZenohId}; +use zenoh_result::ZResult; +use zenoh_transport::{ + multicast::TransportMulticast, + unicast::{test_helpers::make_transport_manager_builder, TransportUnicast}, + DummyTransportPeerEventHandler, TransportEventHandler, TransportManager, + TransportMulticastEventHandler, TransportPeer, TransportPeerEventHandler, +}; + +const TIMEOUT: Duration = Duration::from_secs(60); +const TIMEOUT_EXPECTED: Duration = Duration::from_secs(5); +const SLEEP: Duration = Duration::from_millis(100); + +macro_rules! ztimeout_expected { + ($f:expr) => { + tokio::time::timeout(TIMEOUT_EXPECTED, $f).await.unwrap() + }; +} + +#[cfg(test)] +#[derive(Default)] +struct SHRouterOpenClose; + +impl TransportEventHandler for SHRouterOpenClose { + fn new_unicast( + &self, + _peer: TransportPeer, + _transport: TransportUnicast, + ) -> ZResult> { + Ok(Arc::new(DummyTransportPeerEventHandler)) + } + + fn new_multicast( + &self, + _transport: TransportMulticast, + ) -> ZResult> { + panic!(); + } +} + +// Transport Handler for the client +struct SHClientOpenClose {} + +impl SHClientOpenClose { + fn new() -> Self { + Self {} + } +} + +impl TransportEventHandler for SHClientOpenClose { + fn new_unicast( + &self, + _peer: TransportPeer, + _transport: TransportUnicast, + ) -> ZResult> { + Ok(Arc::new(DummyTransportPeerEventHandler)) + } + + fn new_multicast( + &self, + _transport: TransportMulticast, + ) -> ZResult> { + panic!(); + } +} + +async fn time_transport( + listen_endpoint: &EndPoint, + connect_endpoint: &EndPoint, + lowlatency_transport: bool, +) { + if lowlatency_transport { + println!(">>> Low latency transport"); + } else { + println!(">>> Universal transport"); + } + /* [ROUTER] */ + let router_id = ZenohId::try_from([1]).unwrap(); + + let router_handler = Arc::new(SHRouterOpenClose); + // Create the router transport manager + let unicast = make_transport_manager_builder( + #[cfg(feature = "transport_multilink")] + 1, + #[cfg(feature = "shared-memory")] + false, + lowlatency_transport, + ) + .max_sessions(1); + let router_manager = TransportManager::builder() + .whatami(WhatAmI::Router) + .zid(router_id) + .unicast(unicast) + .build(router_handler.clone()) + .unwrap(); + + /* [CLIENT] */ + let client01_id = ZenohId::try_from([2]).unwrap(); + + // Create the transport transport manager for the first client + let unicast = make_transport_manager_builder( + #[cfg(feature = "transport_multilink")] + 1, + #[cfg(feature = "shared-memory")] + false, + lowlatency_transport, + ) + .max_sessions(1); + let client01_manager = TransportManager::builder() + .whatami(WhatAmI::Client) + .zid(client01_id) + .unicast(unicast) + .build(Arc::new(SHClientOpenClose::new())) + .unwrap(); + + /* [1] */ + // Add the locator on the router + let start = Instant::now(); + ztimeout!(router_manager.add_listener(listen_endpoint.clone())).unwrap(); + println!("Add listener {}: {:#?}", listen_endpoint, start.elapsed()); + + // Open a transport from the client to the router + let start = Instant::now(); + let c_ses1 = + ztimeout_expected!(client01_manager.open_transport_unicast(connect_endpoint.clone())) + .unwrap(); + println!( + "Open transport {}: {:#?}", + connect_endpoint, + start.elapsed() + ); + + // Verify that the transport has been open on the router + ztimeout!(async { + loop { + let transports = ztimeout!(router_manager.get_transports_unicast()); + let s = transports + .iter() + .find(|s| s.get_zid().unwrap() == client01_id); + + match s { + Some(s) => { + let links = s.get_links().unwrap(); + assert_eq!(links.len(), 1); + break; + } + None => tokio::time::sleep(SLEEP).await, + } + } + }); + + /* [2] */ + // Close the open transport on the client + let start = Instant::now(); + ztimeout!(c_ses1.close()).unwrap(); + println!( + "Close transport {}: {:#?}", + connect_endpoint, + start.elapsed() + ); + + // Verify that the transport has been closed also on the router + ztimeout!(async { + loop { + let transports = ztimeout!(router_manager.get_transports_unicast()); + let index = transports + .iter() + .find(|s| s.get_zid().unwrap() == client01_id); + if index.is_none() { + break; + } + tokio::time::sleep(SLEEP).await; + } + }); + + /* [3] */ + let start = Instant::now(); + ztimeout!(router_manager.del_listener(listen_endpoint)).unwrap(); + println!( + "Delete listener {}: {:#?}", + listen_endpoint, + start.elapsed() + ); + + ztimeout!(async { + while !router_manager.get_listeners().await.is_empty() { + tokio::time::sleep(SLEEP).await; + } + }); + + // Wait a little bit + tokio::time::sleep(SLEEP).await; + + ztimeout!(router_manager.close()); + ztimeout!(client01_manager.close()); + + // Wait a little bit + tokio::time::sleep(SLEEP).await; +} + +async fn time_universal_transport(endpoint: &EndPoint) { + time_transport(endpoint, endpoint, false).await +} + +async fn time_lowlatency_transport(endpoint: &EndPoint) { + time_transport(endpoint, endpoint, true).await +} + +#[cfg(feature = "transport_tcp")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_tcp_only() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = format!("tcp/127.0.0.1:{}", 13000).parse().unwrap(); + time_universal_transport(&endpoint).await; +} + +#[cfg(feature = "transport_tcp")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_tcp_only_with_lowlatency_transport() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = format!("tcp/127.0.0.1:{}", 13100).parse().unwrap(); + time_lowlatency_transport(&endpoint).await; +} + +#[cfg(feature = "transport_udp")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_udp_only() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = format!("udp/127.0.0.1:{}", 13010).parse().unwrap(); + time_universal_transport(&endpoint).await; +} + +#[cfg(feature = "transport_udp")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_udp_only_with_lowlatency_transport() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = format!("udp/127.0.0.1:{}", 13110).parse().unwrap(); + time_lowlatency_transport(&endpoint).await; +} + +#[cfg(feature = "transport_ws")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_ws_only() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = format!("ws/127.0.0.1:{}", 13020).parse().unwrap(); + time_universal_transport(&endpoint).await; +} + +#[cfg(feature = "transport_ws")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_ws_only_with_lowlatency_transport() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = format!("ws/127.0.0.1:{}", 13120).parse().unwrap(); + time_lowlatency_transport(&endpoint).await; +} + +#[cfg(feature = "transport_unixpipe")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_unixpipe_only() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = "unixpipe/time_unixpipe_only".parse().unwrap(); + time_universal_transport(&endpoint).await; +} + +#[cfg(feature = "transport_unixpipe")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_unixpipe_only_with_lowlatency_transport() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = "unixpipe/time_unixpipe_only_with_lowlatency_transport" + .parse() + .unwrap(); + time_lowlatency_transport(&endpoint).await; +} + +#[cfg(all(feature = "transport_unixsock-stream", target_family = "unix"))] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_unix_only() { + let _ = env_logger::try_init(); + let f1 = "zenoh-test-unix-socket-9.sock"; + let _ = std::fs::remove_file(f1); + let endpoint: EndPoint = format!("unixsock-stream/{f1}").parse().unwrap(); + time_universal_transport(&endpoint).await; + let _ = std::fs::remove_file(f1); + let _ = std::fs::remove_file(format!("{f1}.lock")); +} + +#[cfg(feature = "transport_tls")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_tls_only() { + use zenoh_link::tls::config::*; + + let _ = env_logger::try_init(); + // NOTE: this an auto-generated pair of certificate and key. + // The target domain is localhost, so it has no real + // mapping to any existing domain. The certificate and key + // have been generated using: https://github.com/jsha/minica + let key = "-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAsfqAuhElN4HnyeqLovSd4Qe+nNv5AwCjSO+HFiF30x3vQ1Hi +qRA0UmyFlSqBnFH3TUHm4Jcad40QfrX8f11NKGZdpvKHsMYqYjZnYkRFGS2s4fQy +aDbV5M06s3UDX8ETPgY41Y8fCKTSVdi9iHkwcVrXMxUu4IBBx0C1r2GSo3gkIBnU +cELdFdaUOSbdCipJhbnkwixEr2h7PXxwba7SIZgZtRaQWak1VE9b716qe3iMuMha +Efo/UoFmeZCPu5spfwaOZsnCsxRPk2IjbzlsHTJ09lM9wmbEFHBMVAXejLTk++Sr +Xt8jASZhNen/2GzyLQNAquGn98lCMQ6SsE9vLQIDAQABAoIBAGQkKggHm6Q20L+4 +2+bNsoOqguLplpvM4RMpyx11qWE9h6GeUmWD+5yg+SysJQ9aw0ZSHWEjRD4ePji9 +lxvm2IIxzuIftp+NcM2gBN2ywhpfq9XbO/2NVR6PJ0dQQJzBG12bzKDFDdYkP0EU +WdiPL+WoEkvo0F57bAd77n6G7SZSgxYekBF+5S6rjbu5I1cEKW+r2vLehD4uFCVX +Q0Tu7TyIOE1KJ2anRb7ZXVUaguNj0/Er7EDT1+wN8KJKvQ1tYGIq/UUBtkP9nkOI +9XJd25k6m5AQPDddzd4W6/5+M7kjyVPi3CsQcpBPss6ueyecZOMaKqdWAHeEyaak +r67TofUCgYEA6GBa+YkRvp0Ept8cd5mh4gCRM8wUuhtzTQnhubCPivy/QqMWScdn +qD0OiARLAsqeoIfkAVgyqebVnxwTrKTvWe0JwpGylEVWQtpGz3oHgjST47yZxIiY +CSAaimi2CYnJZ+QB2oBkFVwNCuXdPEGX6LgnOGva19UKrm6ONsy6V9MCgYEAxBJu +fu4dGXZreARKEHa/7SQjI9ayAFuACFlON/EgSlICzQyG/pumv1FsMEiFrv6w7PRj +4AGqzyzGKXWVDRMrUNVeGPSKJSmlPGNqXfPaXRpVEeB7UQhAs5wyMrWDl8jEW7Ih +XcWhMLn1f/NOAKyrSDSEaEM+Nuu+xTifoAghvP8CgYEAlta9Fw+nihDIjT10cBo0 +38w4dOP7bFcXQCGy+WMnujOYPzw34opiue1wOlB3FIfL8i5jjY/fyzPA5PhHuSCT +Ec9xL3B9+AsOFHU108XFi/pvKTwqoE1+SyYgtEmGKKjdKOfzYA9JaCgJe1J8inmV +jwXCx7gTJVjwBwxSmjXIm+sCgYBQF8NhQD1M0G3YCdCDZy7BXRippCL0OGxVfL2R +5oKtOVEBl9NxH/3+evE5y/Yn5Mw7Dx3ZPHUcygpslyZ6v9Da5T3Z7dKcmaVwxJ+H +n3wcugv0EIHvOPLNK8npovINR6rGVj6BAqD0uZHKYYYEioQxK5rGyGkaoDQ+dgHm +qku12wKBgQDem5FvNp5iW7mufkPZMqf3sEGtu612QeqejIPFM1z7VkUgetsgPBXD +tYsqC2FtWzY51VOEKNpnfH7zH5n+bjoI9nAEAW63TK9ZKkr2hRGsDhJdGzmLfQ7v +F6/CuIw9EsAq6qIB8O88FXQqald+BZOx6AzB8Oedsz/WtMmIEmr/+Q== +-----END RSA PRIVATE KEY-----"; + + let cert = "-----BEGIN CERTIFICATE----- +MIIDLjCCAhagAwIBAgIIeUtmIdFQznMwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE +AxMVbWluaWNhIHJvb3QgY2EgMDc4ZGE3MCAXDTIzMDMwNjE2MDMxOFoYDzIxMjMw +MzA2MTYwMzE4WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQCx+oC6ESU3gefJ6oui9J3hB76c2/kDAKNI74cWIXfT +He9DUeKpEDRSbIWVKoGcUfdNQebglxp3jRB+tfx/XU0oZl2m8oewxipiNmdiREUZ +Lazh9DJoNtXkzTqzdQNfwRM+BjjVjx8IpNJV2L2IeTBxWtczFS7ggEHHQLWvYZKj +eCQgGdRwQt0V1pQ5Jt0KKkmFueTCLESvaHs9fHBtrtIhmBm1FpBZqTVUT1vvXqp7 +eIy4yFoR+j9SgWZ5kI+7myl/Bo5mycKzFE+TYiNvOWwdMnT2Uz3CZsQUcExUBd6M +tOT75Kte3yMBJmE16f/YbPItA0Cq4af3yUIxDpKwT28tAgMBAAGjdjB0MA4GA1Ud +DwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0T +AQH/BAIwADAfBgNVHSMEGDAWgBTWfAmQ/BUIQm/9/llJJs2jUMWzGzAUBgNVHREE +DTALgglsb2NhbGhvc3QwDQYJKoZIhvcNAQELBQADggEBAG/POnBob0S7iYwsbtI2 +3LTTbRnmseIErtJuJmI9yYzgVIm6sUSKhlIUfAIm4rfRuzE94KFeWR2w9RabxOJD +wjYLLKvQ6rFY5g2AV/J0TwDjYuq0absdaDPZ8MKJ+/lpGYK3Te+CTOfq5FJRFt1q +GOkXAxnNpGg0obeRWRKFiAMHbcw6a8LIMfRjCooo3+uSQGsbVzGxSB4CYo720KcC +9vB1K9XALwzoqCewP4aiQsMY1GWpAmzXJftY3w+lka0e9dBYcdEdOqxSoZb5OBBZ +p5e60QweRuJsb60aUaCG8HoICevXYK2fFqCQdlb5sIqQqXyN2K6HuKAFywsjsGyJ +abY= +-----END CERTIFICATE-----"; + + // Configure the client + let ca = "-----BEGIN CERTIFICATE----- +MIIDSzCCAjOgAwIBAgIIB42n1ZIkOakwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE +AxMVbWluaWNhIHJvb3QgY2EgMDc4ZGE3MCAXDTIzMDMwNjE2MDMwN1oYDzIxMjMw +MzA2MTYwMzA3WjAgMR4wHAYDVQQDExVtaW5pY2Egcm9vdCBjYSAwNzhkYTcwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIuCq24O4P4Aep5vAVlrIQ7P8+ +uWWgcHIFYa02TmhBUB/hjo0JANCQvAtpVNuQ8NyKPlqnnq1cttePbSYVeA0rrnOs +DcfySAiyGBEY9zMjFfHJtH1wtrPcJEU8XIEY3xUlrAJE2CEuV9dVYgfEEydnvgLc +8Ug0WXSiARjqbnMW3l8jh6bYCp/UpL/gSM4mxdKrgpfyPoweGhlOWXc3RTS7cqM9 +T25acURGOSI6/g8GF0sNE4VZmUvHggSTmsbLeXMJzxDWO+xVehRmbQx3IkG7u++b +QdRwGIJcDNn7zHlDMHtQ0Z1DBV94fZNBwCULhCBB5g20XTGw//S7Fj2FPwyhAgMB +AAGjgYYwgYMwDgYDVR0PAQH/BAQDAgKEMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggr +BgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBTWfAmQ/BUIQm/9 +/llJJs2jUMWzGzAfBgNVHSMEGDAWgBTWfAmQ/BUIQm/9/llJJs2jUMWzGzANBgkq +hkiG9w0BAQsFAAOCAQEAvtcZFAELKiTuOiAeYts6zeKxc+nnHCzayDeD/BDCbxGJ +e1n+xdHjLtWGd+/Anc+fvftSYBPTFQqCi84lPiUIln5z/rUxE+ke81hNPIfw2obc +yIg87xCabQpVyEh8s+MV+7YPQ1+fH4FuSi2Fck1FejxkVqN2uOZPvOYUmSTsaVr1 +8SfRnwJNZ9UMRPM2bD4Jkvj0VcL42JM3QkOClOzYW4j/vll2cSs4kx7er27cIoo1 +Ck0v2xSPAiVjg6w65rUQeW6uB5m0T2wyj+wm0At8vzhZPlgS1fKhcmT2dzOq3+oN +R+IdLiXcyIkg0m9N8I17p0ljCSkbrgGMD3bbePRTfg== +-----END CERTIFICATE-----"; + + let mut endpoint: EndPoint = format!("tls/localhost:{}", 13030).parse().unwrap(); + endpoint + .config_mut() + .extend( + [ + (TLS_ROOT_CA_CERTIFICATE_RAW, ca), + (TLS_SERVER_PRIVATE_KEY_RAW, key), + (TLS_SERVER_CERTIFICATE_RAW, cert), + ] + .iter() + .map(|(k, v)| ((*k).to_owned(), (*v).to_owned())), + ) + .unwrap(); + + time_universal_transport(&endpoint).await; +} + +#[cfg(feature = "transport_quic")] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_quic_only() { + use zenoh_link::quic::config::*; + + // NOTE: this an auto-generated pair of certificate and key. + // The target domain is localhost, so it has no real + // mapping to any existing domain. The certificate and key + // have been generated using: https://github.com/jsha/minica + let key = "-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAsfqAuhElN4HnyeqLovSd4Qe+nNv5AwCjSO+HFiF30x3vQ1Hi +qRA0UmyFlSqBnFH3TUHm4Jcad40QfrX8f11NKGZdpvKHsMYqYjZnYkRFGS2s4fQy +aDbV5M06s3UDX8ETPgY41Y8fCKTSVdi9iHkwcVrXMxUu4IBBx0C1r2GSo3gkIBnU +cELdFdaUOSbdCipJhbnkwixEr2h7PXxwba7SIZgZtRaQWak1VE9b716qe3iMuMha +Efo/UoFmeZCPu5spfwaOZsnCsxRPk2IjbzlsHTJ09lM9wmbEFHBMVAXejLTk++Sr +Xt8jASZhNen/2GzyLQNAquGn98lCMQ6SsE9vLQIDAQABAoIBAGQkKggHm6Q20L+4 +2+bNsoOqguLplpvM4RMpyx11qWE9h6GeUmWD+5yg+SysJQ9aw0ZSHWEjRD4ePji9 +lxvm2IIxzuIftp+NcM2gBN2ywhpfq9XbO/2NVR6PJ0dQQJzBG12bzKDFDdYkP0EU +WdiPL+WoEkvo0F57bAd77n6G7SZSgxYekBF+5S6rjbu5I1cEKW+r2vLehD4uFCVX +Q0Tu7TyIOE1KJ2anRb7ZXVUaguNj0/Er7EDT1+wN8KJKvQ1tYGIq/UUBtkP9nkOI +9XJd25k6m5AQPDddzd4W6/5+M7kjyVPi3CsQcpBPss6ueyecZOMaKqdWAHeEyaak +r67TofUCgYEA6GBa+YkRvp0Ept8cd5mh4gCRM8wUuhtzTQnhubCPivy/QqMWScdn +qD0OiARLAsqeoIfkAVgyqebVnxwTrKTvWe0JwpGylEVWQtpGz3oHgjST47yZxIiY +CSAaimi2CYnJZ+QB2oBkFVwNCuXdPEGX6LgnOGva19UKrm6ONsy6V9MCgYEAxBJu +fu4dGXZreARKEHa/7SQjI9ayAFuACFlON/EgSlICzQyG/pumv1FsMEiFrv6w7PRj +4AGqzyzGKXWVDRMrUNVeGPSKJSmlPGNqXfPaXRpVEeB7UQhAs5wyMrWDl8jEW7Ih +XcWhMLn1f/NOAKyrSDSEaEM+Nuu+xTifoAghvP8CgYEAlta9Fw+nihDIjT10cBo0 +38w4dOP7bFcXQCGy+WMnujOYPzw34opiue1wOlB3FIfL8i5jjY/fyzPA5PhHuSCT +Ec9xL3B9+AsOFHU108XFi/pvKTwqoE1+SyYgtEmGKKjdKOfzYA9JaCgJe1J8inmV +jwXCx7gTJVjwBwxSmjXIm+sCgYBQF8NhQD1M0G3YCdCDZy7BXRippCL0OGxVfL2R +5oKtOVEBl9NxH/3+evE5y/Yn5Mw7Dx3ZPHUcygpslyZ6v9Da5T3Z7dKcmaVwxJ+H +n3wcugv0EIHvOPLNK8npovINR6rGVj6BAqD0uZHKYYYEioQxK5rGyGkaoDQ+dgHm +qku12wKBgQDem5FvNp5iW7mufkPZMqf3sEGtu612QeqejIPFM1z7VkUgetsgPBXD +tYsqC2FtWzY51VOEKNpnfH7zH5n+bjoI9nAEAW63TK9ZKkr2hRGsDhJdGzmLfQ7v +F6/CuIw9EsAq6qIB8O88FXQqald+BZOx6AzB8Oedsz/WtMmIEmr/+Q== +-----END RSA PRIVATE KEY-----"; + + let cert = "-----BEGIN CERTIFICATE----- +MIIDLjCCAhagAwIBAgIIeUtmIdFQznMwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE +AxMVbWluaWNhIHJvb3QgY2EgMDc4ZGE3MCAXDTIzMDMwNjE2MDMxOFoYDzIxMjMw +MzA2MTYwMzE4WjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQCx+oC6ESU3gefJ6oui9J3hB76c2/kDAKNI74cWIXfT +He9DUeKpEDRSbIWVKoGcUfdNQebglxp3jRB+tfx/XU0oZl2m8oewxipiNmdiREUZ +Lazh9DJoNtXkzTqzdQNfwRM+BjjVjx8IpNJV2L2IeTBxWtczFS7ggEHHQLWvYZKj +eCQgGdRwQt0V1pQ5Jt0KKkmFueTCLESvaHs9fHBtrtIhmBm1FpBZqTVUT1vvXqp7 +eIy4yFoR+j9SgWZ5kI+7myl/Bo5mycKzFE+TYiNvOWwdMnT2Uz3CZsQUcExUBd6M +tOT75Kte3yMBJmE16f/YbPItA0Cq4af3yUIxDpKwT28tAgMBAAGjdjB0MA4GA1Ud +DwEB/wQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0T +AQH/BAIwADAfBgNVHSMEGDAWgBTWfAmQ/BUIQm/9/llJJs2jUMWzGzAUBgNVHREE +DTALgglsb2NhbGhvc3QwDQYJKoZIhvcNAQELBQADggEBAG/POnBob0S7iYwsbtI2 +3LTTbRnmseIErtJuJmI9yYzgVIm6sUSKhlIUfAIm4rfRuzE94KFeWR2w9RabxOJD +wjYLLKvQ6rFY5g2AV/J0TwDjYuq0absdaDPZ8MKJ+/lpGYK3Te+CTOfq5FJRFt1q +GOkXAxnNpGg0obeRWRKFiAMHbcw6a8LIMfRjCooo3+uSQGsbVzGxSB4CYo720KcC +9vB1K9XALwzoqCewP4aiQsMY1GWpAmzXJftY3w+lka0e9dBYcdEdOqxSoZb5OBBZ +p5e60QweRuJsb60aUaCG8HoICevXYK2fFqCQdlb5sIqQqXyN2K6HuKAFywsjsGyJ +abY= +-----END CERTIFICATE-----"; + + // Configure the client + let ca = "-----BEGIN CERTIFICATE----- +MIIDSzCCAjOgAwIBAgIIB42n1ZIkOakwDQYJKoZIhvcNAQELBQAwIDEeMBwGA1UE +AxMVbWluaWNhIHJvb3QgY2EgMDc4ZGE3MCAXDTIzMDMwNjE2MDMwN1oYDzIxMjMw +MzA2MTYwMzA3WjAgMR4wHAYDVQQDExVtaW5pY2Egcm9vdCBjYSAwNzhkYTcwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDIuCq24O4P4Aep5vAVlrIQ7P8+ +uWWgcHIFYa02TmhBUB/hjo0JANCQvAtpVNuQ8NyKPlqnnq1cttePbSYVeA0rrnOs +DcfySAiyGBEY9zMjFfHJtH1wtrPcJEU8XIEY3xUlrAJE2CEuV9dVYgfEEydnvgLc +8Ug0WXSiARjqbnMW3l8jh6bYCp/UpL/gSM4mxdKrgpfyPoweGhlOWXc3RTS7cqM9 +T25acURGOSI6/g8GF0sNE4VZmUvHggSTmsbLeXMJzxDWO+xVehRmbQx3IkG7u++b +QdRwGIJcDNn7zHlDMHtQ0Z1DBV94fZNBwCULhCBB5g20XTGw//S7Fj2FPwyhAgMB +AAGjgYYwgYMwDgYDVR0PAQH/BAQDAgKEMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggr +BgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBTWfAmQ/BUIQm/9 +/llJJs2jUMWzGzAfBgNVHSMEGDAWgBTWfAmQ/BUIQm/9/llJJs2jUMWzGzANBgkq +hkiG9w0BAQsFAAOCAQEAvtcZFAELKiTuOiAeYts6zeKxc+nnHCzayDeD/BDCbxGJ +e1n+xdHjLtWGd+/Anc+fvftSYBPTFQqCi84lPiUIln5z/rUxE+ke81hNPIfw2obc +yIg87xCabQpVyEh8s+MV+7YPQ1+fH4FuSi2Fck1FejxkVqN2uOZPvOYUmSTsaVr1 +8SfRnwJNZ9UMRPM2bD4Jkvj0VcL42JM3QkOClOzYW4j/vll2cSs4kx7er27cIoo1 +Ck0v2xSPAiVjg6w65rUQeW6uB5m0T2wyj+wm0At8vzhZPlgS1fKhcmT2dzOq3+oN +R+IdLiXcyIkg0m9N8I17p0ljCSkbrgGMD3bbePRTfg== +-----END CERTIFICATE-----"; + + // Define the locator + let mut endpoint: EndPoint = format!("quic/localhost:{}", 13040).parse().unwrap(); + endpoint + .config_mut() + .extend( + [ + (TLS_ROOT_CA_CERTIFICATE_RAW, ca), + (TLS_SERVER_PRIVATE_KEY_RAW, key), + (TLS_SERVER_CERTIFICATE_RAW, cert), + ] + .iter() + .map(|(k, v)| ((*k).to_owned(), (*v).to_owned())), + ) + .unwrap(); + + time_universal_transport(&endpoint).await; +} + +#[cfg(all(feature = "transport_vsock", target_os = "linux"))] +#[tokio::test(flavor = "multi_thread", worker_threads = 4)] +#[ignore] +async fn time_vsock_only() { + let _ = env_logger::try_init(); + let endpoint: EndPoint = "vsock/VMADDR_CID_LOCAL:17000".parse().unwrap(); + time_lowlatency_transport(&endpoint).await; +} From 2da0aeb0c59a5634b1975fad1200fb92256ec733 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 5 Apr 2024 11:06:54 +0200 Subject: [PATCH 097/124] Declare message can be Push/Request/RequestContinuous/Response (#902) * Declare message can be Push/Request/RequestContinuous/Response * Address review comments * Remove F: Future flag from DeclareInterest * cargo fmt --all --- commons/zenoh-codec/src/network/declare.rs | 236 +++++++----------- commons/zenoh-codec/tests/codec.rs | 16 ++ commons/zenoh-protocol/src/network/declare.rs | 215 ++++++++-------- commons/zenoh-protocol/src/network/mod.rs | 6 +- zenoh/src/key_expr.rs | 4 +- zenoh/src/net/routing/dispatcher/face.rs | 3 +- zenoh/src/net/routing/dispatcher/resource.rs | 4 +- zenoh/src/net/routing/hat/client/pubsub.rs | 10 +- zenoh/src/net/routing/hat/client/queries.rs | 8 +- .../net/routing/hat/linkstate_peer/pubsub.rs | 14 +- .../net/routing/hat/linkstate_peer/queries.rs | 14 +- zenoh/src/net/routing/hat/p2p_peer/pubsub.rs | 10 +- zenoh/src/net/routing/hat/p2p_peer/queries.rs | 8 +- zenoh/src/net/routing/hat/router/pubsub.rs | 22 +- zenoh/src/net/routing/hat/router/queries.rs | 22 +- zenoh/src/net/routing/mod.rs | 3 +- zenoh/src/net/runtime/adminspace.rs | 8 +- zenoh/src/net/tests/tables.rs | 12 +- zenoh/src/session.rs | 19 +- 19 files changed, 283 insertions(+), 351 deletions(-) diff --git a/commons/zenoh-codec/src/network/declare.rs b/commons/zenoh-codec/src/network/declare.rs index d7a25ea0a9..173fbe5e4a 100644 --- a/commons/zenoh-codec/src/network/declare.rs +++ b/commons/zenoh-codec/src/network/declare.rs @@ -19,12 +19,16 @@ use zenoh_buffers::{ ZBuf, }; use zenoh_protocol::{ - common::{iext, imsg, ZExtZ64}, + common::{ + iext, + imsg::{self, HEADER_BITS}, + ZExtZ64, + }, core::{ExprId, ExprLen, WireExpr}, network::{ declare::{ self, common, interest, keyexpr, queryable, subscriber, token, Declare, DeclareBody, - Interest, + DeclareMode, Interest, }, id, Mapping, }, @@ -48,8 +52,7 @@ where DeclareBody::DeclareToken(r) => self.write(&mut *writer, r)?, DeclareBody::UndeclareToken(r) => self.write(&mut *writer, r)?, DeclareBody::DeclareInterest(r) => self.write(&mut *writer, r)?, - DeclareBody::FinalInterest(r) => self.write(&mut *writer, r)?, - DeclareBody::UndeclareInterest(r) => self.write(&mut *writer, r)?, + DeclareBody::DeclareFinal(r) => self.write(&mut *writer, r)?, } Ok(()) @@ -77,8 +80,7 @@ where D_TOKEN => DeclareBody::DeclareToken(codec.read(&mut *reader)?), U_TOKEN => DeclareBody::UndeclareToken(codec.read(&mut *reader)?), D_INTEREST => DeclareBody::DeclareInterest(codec.read(&mut *reader)?), - F_INTEREST => DeclareBody::FinalInterest(codec.read(&mut *reader)?), - U_INTEREST => DeclareBody::UndeclareInterest(codec.read(&mut *reader)?), + D_FINAL => DeclareBody::DeclareFinal(codec.read(&mut *reader)?), _ => return Err(DidntRead), }; @@ -95,7 +97,7 @@ where fn write(self, writer: &mut W, x: &Declare) -> Self::Output { let Declare { - interest_id, + mode, ext_qos, ext_tstamp, ext_nodeid, @@ -104,9 +106,13 @@ where // Header let mut header = id::DECLARE; - if x.interest_id.is_some() { - header |= declare::flag::I; - } + header |= match mode { + DeclareMode::Push => 0b00, + DeclareMode::Response(_) => 0b01, + DeclareMode::Request(_) => 0b10, + DeclareMode::RequestContinuous(_) => 0b11, + } << HEADER_BITS; + let mut n_exts = ((ext_qos != &declare::ext::QoSType::DEFAULT) as u8) + (ext_tstamp.is_some() as u8) + ((ext_nodeid != &declare::ext::NodeIdType::DEFAULT) as u8); @@ -116,8 +122,11 @@ where self.write(&mut *writer, header)?; // Body - if let Some(interest_id) = interest_id { - self.write(&mut *writer, interest_id)?; + if let DeclareMode::Request(rid) + | DeclareMode::RequestContinuous(rid) + | DeclareMode::Response(rid) = mode + { + self.write(&mut *writer, rid)?; } // Extensions @@ -166,10 +175,14 @@ where return Err(DidntRead); } - let mut interest_id = None; - if imsg::has_flag(self.header, declare::flag::I) { - interest_id = Some(self.codec.read(&mut *reader)?); - } + // Body + let mode = match (self.header >> HEADER_BITS) & 0b11 { + 0b00 => DeclareMode::Push, + 0b01 => DeclareMode::Response(self.codec.read(&mut *reader)?), + 0b10 => DeclareMode::Request(self.codec.read(&mut *reader)?), + 0b11 => DeclareMode::RequestContinuous(self.codec.read(&mut *reader)?), + _ => return Err(DidntRead), + }; // Extensions let mut ext_qos = declare::ext::QoSType::DEFAULT; @@ -206,7 +219,7 @@ where let body: DeclareBody = self.codec.read(&mut *reader)?; Ok(Declare { - interest_id, + mode, ext_qos, ext_tstamp, ext_nodeid, @@ -215,6 +228,59 @@ where } } +// Final +impl WCodec<&common::DeclareFinal, &mut W> for Zenoh080 +where + W: Writer, +{ + type Output = Result<(), DidntWrite>; + + fn write(self, writer: &mut W, x: &common::DeclareFinal) -> Self::Output { + let common::DeclareFinal = x; + + // Header + let header = declare::id::D_FINAL; + self.write(&mut *writer, header)?; + + Ok(()) + } +} + +impl RCodec for Zenoh080 +where + R: Reader, +{ + type Error = DidntRead; + + fn read(self, reader: &mut R) -> Result { + let header: u8 = self.read(&mut *reader)?; + let codec = Zenoh080Header::new(header); + + codec.read(reader) + } +} + +impl RCodec for Zenoh080Header +where + R: Reader, +{ + type Error = DidntRead; + + fn read(self, reader: &mut R) -> Result { + if imsg::mid(self.header) != declare::id::D_FINAL { + return Err(DidntRead); + } + + // Extensions + let has_ext = imsg::has_flag(self.header, token::flag::Z); + if has_ext { + extension::skip_all(reader, "Final")?; + } + + Ok(common::DeclareFinal) + } +} + // DeclareKeyExpr impl WCodec<&keyexpr::DeclareKeyExpr, &mut W> for Zenoh080 where @@ -907,7 +973,7 @@ where } = x; // Header - let header = declare::id::D_INTEREST | x.flags(); + let header = declare::id::D_INTEREST; self.write(&mut *writer, header)?; // Body @@ -976,140 +1042,6 @@ where } } -// FinalInterest -impl WCodec<&interest::FinalInterest, &mut W> for Zenoh080 -where - W: Writer, -{ - type Output = Result<(), DidntWrite>; - - fn write(self, writer: &mut W, x: &interest::FinalInterest) -> Self::Output { - let interest::FinalInterest { id } = x; - - // Header - let header = declare::id::F_INTEREST; - self.write(&mut *writer, header)?; - - // Body - self.write(&mut *writer, id)?; - - Ok(()) - } -} - -impl RCodec for Zenoh080 -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - let header: u8 = self.read(&mut *reader)?; - let codec = Zenoh080Header::new(header); - - codec.read(reader) - } -} - -impl RCodec for Zenoh080Header -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - if imsg::mid(self.header) != declare::id::F_INTEREST { - return Err(DidntRead); - } - - // Body - let id: interest::InterestId = self.codec.read(&mut *reader)?; - - // Extensions - let has_ext = imsg::has_flag(self.header, token::flag::Z); - if has_ext { - extension::skip_all(reader, "FinalInterest")?; - } - - Ok(interest::FinalInterest { id }) - } -} - -// UndeclareInterest -impl WCodec<&interest::UndeclareInterest, &mut W> for Zenoh080 -where - W: Writer, -{ - type Output = Result<(), DidntWrite>; - - fn write(self, writer: &mut W, x: &interest::UndeclareInterest) -> Self::Output { - let interest::UndeclareInterest { id, ext_wire_expr } = x; - - // Header - let header = declare::id::U_INTEREST | interest::flag::Z; - self.write(&mut *writer, header)?; - - // Body - self.write(&mut *writer, id)?; - - // Extension - self.write(&mut *writer, (ext_wire_expr, false))?; - - Ok(()) - } -} - -impl RCodec for Zenoh080 -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - let header: u8 = self.read(&mut *reader)?; - let codec = Zenoh080Header::new(header); - - codec.read(reader) - } -} - -impl RCodec for Zenoh080Header -where - R: Reader, -{ - type Error = DidntRead; - - fn read(self, reader: &mut R) -> Result { - if imsg::mid(self.header) != declare::id::U_INTEREST { - return Err(DidntRead); - } - - // Body - let id: interest::InterestId = self.codec.read(&mut *reader)?; - - // Extensions - let mut ext_wire_expr = common::ext::WireExprType::null(); - - let mut has_ext = imsg::has_flag(self.header, interest::flag::Z); - while has_ext { - let ext: u8 = self.codec.read(&mut *reader)?; - let eodec = Zenoh080Header::new(ext); - match iext::eid(ext) { - common::ext::WireExprExt::ID => { - let (we, ext): (common::ext::WireExprType, bool) = eodec.read(&mut *reader)?; - ext_wire_expr = we; - has_ext = ext; - } - _ => { - has_ext = extension::skip(reader, "UndeclareInterest", ext)?; - } - } - } - - Ok(interest::UndeclareInterest { id, ext_wire_expr }) - } -} - // WARNING: this is a temporary extension used for undeclarations impl WCodec<(&common::ext::WireExprType, bool), &mut W> for Zenoh080 where diff --git a/commons/zenoh-codec/tests/codec.rs b/commons/zenoh-codec/tests/codec.rs index 2f0e870c4f..d28ba9a4d3 100644 --- a/commons/zenoh-codec/tests/codec.rs +++ b/commons/zenoh-codec/tests/codec.rs @@ -31,6 +31,22 @@ use zenoh_protocol::{ zenoh, zextunit, zextz64, zextzbuf, }; +#[test] +fn zbuf_test() { + let mut buffer = vec![0u8; 64]; + + let zbuf = ZBuf::empty(); + let mut writer = buffer.writer(); + + let codec = Zenoh080::new(); + codec.write(&mut writer, &zbuf).unwrap(); + println!("Buffer: {:?}", buffer); + + let mut reader = buffer.reader(); + let ret: ZBuf = codec.read(&mut reader).unwrap(); + assert_eq!(ret, zbuf); +} + const NUM_ITER: usize = 100; const MAX_PAYLOAD_SIZE: usize = 256; diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 10027259c2..996e7768ee 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -18,6 +18,8 @@ use crate::{ zextz64, zextzbuf, }; use alloc::borrow::Cow; +pub use common::*; +use core::sync::atomic::AtomicU32; pub use interest::*; pub use keyexpr::*; pub use queryable::*; @@ -31,24 +33,59 @@ pub mod flag { } /// Flags: -/// - I: Interest If I==1 then the declare is in a response to an Interest with future==false -/// - X: Reserved +/// - |: Mode The mode of the the declaration* +/// -/ /// - Z: Extension If Z==1 then at least one extension is present /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ -/// |Z|X|I| DECLARE | +/// |Z|Mod| DECLARE | /// +-+-+-+---------+ -/// ~interest_id:z32~ if I==1 +/// ~ rid:z32 ~ if Mode != Push /// +---------------+ /// ~ [decl_exts] ~ if Z==1 /// +---------------+ /// ~ declaration ~ /// +---------------+ /// +/// *Mode of declaration: +/// - Mode 0b00: Push +/// - Mode 0b01: Response +/// - Mode 0b10: Request +/// - Mode 0b11: RequestContinuous + +/// The resolution of a RequestId +pub type DeclareRequestId = u32; +pub type AtomicDeclareRequestId = AtomicU32; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum DeclareMode { + Push, + Request(DeclareRequestId), + RequestContinuous(DeclareRequestId), + Response(DeclareRequestId), +} + +impl DeclareMode { + #[cfg(feature = "test")] + pub fn rand() -> Self { + use rand::Rng; + + let mut rng = rand::thread_rng(); + + match rng.gen_range(0..4) { + 0 => DeclareMode::Push, + 1 => DeclareMode::Request(rng.gen()), + 2 => DeclareMode::RequestContinuous(rng.gen()), + 3 => DeclareMode::Response(rng.gen()), + _ => unreachable!(), + } + } +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct Declare { - pub interest_id: Option, + pub mode: DeclareMode, pub ext_qos: ext::QoSType, pub ext_tstamp: Option, pub ext_nodeid: ext::NodeIdType, @@ -85,8 +122,8 @@ pub mod id { pub const U_TOKEN: u8 = 0x07; pub const D_INTEREST: u8 = 0x08; - pub const F_INTEREST: u8 = 0x09; - pub const U_INTEREST: u8 = 0x0A; + + pub const D_FINAL: u8 = 0x1A; } #[derive(Debug, Clone, PartialEq, Eq)] @@ -100,8 +137,7 @@ pub enum DeclareBody { DeclareToken(DeclareToken), UndeclareToken(UndeclareToken), DeclareInterest(DeclareInterest), - FinalInterest(FinalInterest), - UndeclareInterest(UndeclareInterest), + DeclareFinal(DeclareFinal), } impl DeclareBody { @@ -111,7 +147,7 @@ impl DeclareBody { let mut rng = rand::thread_rng(); - match rng.gen_range(0..11) { + match rng.gen_range(0..10) { 0 => DeclareBody::DeclareKeyExpr(DeclareKeyExpr::rand()), 1 => DeclareBody::UndeclareKeyExpr(UndeclareKeyExpr::rand()), 2 => DeclareBody::DeclareSubscriber(DeclareSubscriber::rand()), @@ -121,8 +157,7 @@ impl DeclareBody { 6 => DeclareBody::DeclareToken(DeclareToken::rand()), 7 => DeclareBody::UndeclareToken(UndeclareToken::rand()), 8 => DeclareBody::DeclareInterest(DeclareInterest::rand()), - 9 => DeclareBody::FinalInterest(FinalInterest::rand()), - 10 => DeclareBody::UndeclareInterest(UndeclareInterest::rand()), + 9 => DeclareBody::DeclareFinal(DeclareFinal::rand()), _ => unreachable!(), } } @@ -135,14 +170,14 @@ impl Declare { let mut rng = rand::thread_rng(); - let interest_id = rng.gen_bool(0.5).then_some(rng.gen::()); + let mode = DeclareMode::rand(); let ext_qos = ext::QoSType::rand(); let ext_tstamp = rng.gen_bool(0.5).then(ext::TimestampType::rand); let ext_nodeid = ext::NodeIdType::rand(); let body = DeclareBody::rand(); Self { - interest_id, + mode, ext_qos, ext_tstamp, ext_nodeid, @@ -154,6 +189,29 @@ impl Declare { pub mod common { use super::*; + /// ```text + /// Flags: + /// - X: Reserved + /// - X: Reserved + /// - Z: Extension If Z==1 then at least one extension is present + /// + /// 7 6 5 4 3 2 1 0 + /// +-+-+-+-+-+-+-+-+ + /// |Z|x|x| D_FINAL | + /// +---------------+ + /// ~ [final_exts] ~ if Z==1 + /// +---------------+ + /// ``` + #[derive(Debug, Clone, PartialEq, Eq)] + pub struct DeclareFinal; + + impl DeclareFinal { + #[cfg(feature = "test")] + pub fn rand() -> Self { + Self + } + } + pub mod ext { use super::*; @@ -545,7 +603,7 @@ pub mod queryable { /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ - /// |Z|0_2| U_QBL | + /// |Z|X|X| U_QBL | /// +---------------+ /// ~ qbls_id:z32 ~ /// +---------------+ @@ -668,44 +726,51 @@ pub mod interest { pub type InterestId = u32; pub mod flag { - pub const C: u8 = 1 << 5; // 0x20 Current if C==1 then the interest refers to the current declarations. - pub const F: u8 = 1 << 6; // 0x40 Future if F==1 then the interest refers to the future declarations. + // pub const X: u8 = 1 << 5; // 0x20 Reserved + // pub const X: u8 = 1 << 6; // 0x40 Reserved pub const Z: u8 = 1 << 7; // 0x80 Extensions if Z==1 then an extension will follow } /// # DeclareInterest message /// - /// The DECLARE INTEREST message is sent to request the transmission of existing and future - /// declarations of a given kind matching a target keyexpr. E.g., a declare interest could be sent to - /// request the transmisison of all existing subscriptions matching `a/*`. A FINAL INTEREST is used to - /// mark the end of the transmission of exisiting matching declarations. + /// The DECLARE INTEREST message is sent to request the transmission of current and/or future + /// declarations of a given kind matching a target keyexpr. E.g., a declare interest could be + /// sent to request the transmisison of all current subscriptions matching `a/*`. + /// + /// The behaviour of a DECLARE INTEREST depends on the DECLARE MODE in the DECLARE MESSAGE: + /// - Push: only future declarations + /// - Request: only current declarations + /// - RequestContinous: current and future declarations + /// - Response: invalid /// - /// E.g., the [`DeclareInterest`]/[`FinalInterest`]/[`UndeclareInterest`] message flow is the following: + /// E.g., the [`DeclareInterest`] message flow is the following: /// /// ```text /// A B /// | DECL INTEREST | - /// |------------------>| -- This is a DeclareInterest e.g. for subscriber declarations/undeclarations. + /// |------------------>| -- Sent in Declare::RequestContinuous. + /// | | This is a DeclareInterest e.g. for subscriber declarations/undeclarations. /// | | /// | DECL SUBSCRIBER | - /// |<------------------| + /// |<------------------| -- Sent in Declare::Response /// | DECL SUBSCRIBER | - /// |<------------------| + /// |<------------------| -- Sent in Declare::Response /// | DECL SUBSCRIBER | - /// |<------------------| + /// |<------------------| -- Sent in Declare::Response /// | | - /// | FINAL INTEREST | - /// |<------------------| -- The FinalInterest signals that all known subscribers have been transmitted. + /// | FINAL | + /// |<------------------| -- Sent in Declare::Response /// | | /// | DECL SUBSCRIBER | - /// |<------------------| -- This is a new subscriber declaration. + /// |<------------------| -- Sent in Declare::Push. This is a new subscriber declaration. /// | UNDECL SUBSCRIBER | - /// |<------------------| -- This is a new subscriber undeclaration. + /// |<------------------| -- Sent in Declare::Push. This is a new subscriber undeclaration. /// | | /// | ... | /// | | - /// | UNDECL INTEREST | - /// |------------------>| -- This is an UndeclareInterest to stop receiving subscriber declarations/undeclarations. + /// | FINAL | + /// |------------------>| -- Sent in Declare::RequestContinuous. + /// | | This stops the transmission of subscriber declarations/undeclarations. /// | | /// ``` /// @@ -713,15 +778,13 @@ pub mod interest { /// /// ```text /// Flags: - /// - C: Current if C==1 then the interest refers to the current declarations. - /// - F: Future if F==1 then the interest refers to the future declarations. Note that if F==0 then: - /// - Declarations SHOULD NOT be sent after the FinalInterest; - /// - UndeclareInterest SHOULD NOT be sent after the FinalInterest. + /// - X: Reserved + /// - X: Reserved /// - Z: Extension If Z==1 then at least one extension is present /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ - /// |Z|F|C| D_INT | + /// |Z|F|X| D_INT | /// +---------------+ /// ~ intst_id:z32 ~ /// +---------------+ @@ -752,17 +815,6 @@ pub mod interest { } impl DeclareInterest { - pub fn flags(&self) -> u8 { - let mut interest = self.interest; - if self.interest.current() { - interest += Interest::CURRENT; - } - if self.interest.future() { - interest += Interest::FUTURE; - } - interest.flags - } - pub fn options(&self) -> u8 { let mut interest = self.interest; if let Some(we) = self.wire_expr.as_ref() { @@ -801,9 +853,6 @@ pub mod interest { } impl Interest { - // Header - pub const CURRENT: Interest = Interest::flags(interest::flag::C); - pub const FUTURE: Interest = Interest::flags(interest::flag::F); // Flags pub const KEYEXPRS: Interest = Interest::options(1); pub const SUBSCRIBERS: Interest = Interest::options(1 << 1); @@ -820,10 +869,6 @@ pub mod interest { | Interest::TOKENS.options, ); - const fn flags(flags: u8) -> Self { - Self { flags, options: 0 } - } - const fn options(options: u8) -> Self { Self { flags: 0, options } } @@ -835,14 +880,6 @@ pub mod interest { } } - pub const fn current(&self) -> bool { - imsg::has_flag(self.flags, Self::CURRENT.flags) - } - - pub const fn future(&self) -> bool { - imsg::has_flag(self.flags, Self::FUTURE.flags) - } - pub const fn keyexprs(&self) -> bool { imsg::has_flag(self.options, Self::KEYEXPRS.options) } @@ -881,12 +918,6 @@ pub mod interest { let mut rng = rand::thread_rng(); let mut s = Self::empty(); - if rng.gen_bool(0.5) { - s += Interest::CURRENT; - } - if rng.gen_bool(0.5) { - s += Interest::FUTURE; - } if rng.gen_bool(0.5) { s += Interest::KEYEXPRS; } @@ -905,9 +936,7 @@ pub mod interest { impl PartialEq for Interest { fn eq(&self, other: &Self) -> bool { - self.current() == other.current() - && self.future() == other.future() - && self.keyexprs() == other.keyexprs() + self.keyexprs() == other.keyexprs() && self.subscribers() == other.subscribers() && self.queryables() == other.queryables() && self.tokens() == other.tokens() @@ -918,16 +947,6 @@ pub mod interest { impl Debug for Interest { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Interest {{ ")?; - if self.current() { - write!(f, "C:Y, ")?; - } else { - write!(f, "C:N, ")?; - } - if self.future() { - write!(f, "F:Y, ")?; - } else { - write!(f, "F:N, ")?; - } if self.keyexprs() { write!(f, "K:Y, ")?; } else { @@ -1003,38 +1022,6 @@ pub mod interest { } } - /// ```text - /// Flags: - /// - X: Reserved - /// - X: Reserved - /// - Z: Extension If Z==1 then at least one extension is present - /// - /// 7 6 5 4 3 2 1 0 - /// +-+-+-+-+-+-+-+-+ - /// |Z|X|X| F_INT | - /// +---------------+ - /// ~ intst_id:z32 ~ - /// +---------------+ - /// ~ [decl_exts] ~ if Z==1 - /// +---------------+ - /// ``` - #[derive(Debug, Clone, PartialEq, Eq)] - pub struct FinalInterest { - pub id: InterestId, - } - - impl FinalInterest { - #[cfg(feature = "test")] - pub fn rand() -> Self { - use rand::Rng; - let mut rng = rand::thread_rng(); - - let id: InterestId = rng.gen(); - - Self { id } - } - } - /// ```text /// Flags: /// - X: Reserved diff --git a/commons/zenoh-protocol/src/network/mod.rs b/commons/zenoh-protocol/src/network/mod.rs index 0e198ddf0f..cbf9894aef 100644 --- a/commons/zenoh-protocol/src/network/mod.rs +++ b/commons/zenoh-protocol/src/network/mod.rs @@ -20,9 +20,9 @@ pub mod response; use core::fmt; pub use declare::{ - Declare, DeclareBody, DeclareInterest, DeclareKeyExpr, DeclareQueryable, DeclareSubscriber, - DeclareToken, UndeclareInterest, UndeclareKeyExpr, UndeclareQueryable, UndeclareSubscriber, - UndeclareToken, + Declare, DeclareBody, DeclareFinal, DeclareInterest, DeclareKeyExpr, DeclareMode, + DeclareQueryable, DeclareSubscriber, DeclareToken, UndeclareInterest, UndeclareKeyExpr, + UndeclareQueryable, UndeclareSubscriber, UndeclareToken, }; pub use oam::Oam; pub use push::Push; diff --git a/zenoh/src/key_expr.rs b/zenoh/src/key_expr.rs index aaa1d13724..17aa0425b6 100644 --- a/zenoh/src/key_expr.rs +++ b/zenoh/src/key_expr.rs @@ -53,7 +53,7 @@ pub use zenoh_keyexpr::*; pub use zenoh_macros::{kedefine, keformat, kewrite}; use zenoh_protocol::{ core::{key_expr::canon::Canonizable, ExprId, WireExpr}, - network::{declare, DeclareBody, Mapping, UndeclareKeyExpr}, + network::{declare, DeclareBody, DeclareMode, Mapping, UndeclareKeyExpr}, }; use zenoh_result::ZResult; @@ -664,7 +664,7 @@ impl SyncResolve for KeyExprUndeclaration<'_> { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(zenoh_protocol::network::Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/dispatcher/face.rs b/zenoh/src/net/routing/dispatcher/face.rs index cb565053c9..3531dd2d88 100644 --- a/zenoh/src/net/routing/dispatcher/face.rs +++ b/zenoh/src/net/routing/dispatcher/face.rs @@ -211,8 +211,7 @@ impl Primitives for Face { zenoh_protocol::network::DeclareBody::DeclareToken(_m) => todo!(), zenoh_protocol::network::DeclareBody::UndeclareToken(_m) => todo!(), zenoh_protocol::network::DeclareBody::DeclareInterest(_m) => todo!(), - zenoh_protocol::network::DeclareBody::FinalInterest(_m) => todo!(), - zenoh_protocol::network::DeclareBody::UndeclareInterest(_m) => todo!(), + zenoh_protocol::network::DeclareBody::DeclareFinal(_m) => todo!(), } drop(ctrl_lock); } diff --git a/zenoh/src/net/routing/dispatcher/resource.rs b/zenoh/src/net/routing/dispatcher/resource.rs index 194b97fca8..941b37f916 100644 --- a/zenoh/src/net/routing/dispatcher/resource.rs +++ b/zenoh/src/net/routing/dispatcher/resource.rs @@ -27,7 +27,7 @@ use zenoh_protocol::{ network::{ declare::{ ext, queryable::ext::QueryableInfoType, subscriber::ext::SubscriberInfo, Declare, - DeclareBody, DeclareKeyExpr, + DeclareBody, DeclareKeyExpr, DeclareMode, }, Mapping, }, @@ -452,7 +452,7 @@ impl Resource { .insert(expr_id, nonwild_prefix.clone()); face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/client/pubsub.rs b/zenoh/src/net/routing/hat/client/pubsub.rs index e85bb77bf9..6c689d3336 100644 --- a/zenoh/src/net/routing/hat/client/pubsub.rs +++ b/zenoh/src/net/routing/hat/client/pubsub.rs @@ -30,7 +30,7 @@ use zenoh_protocol::{ core::{Reliability, WhatAmI}, network::declare::{ common::ext::WireExprType, ext, subscriber::ext::SubscriberInfo, Declare, DeclareBody, - DeclareSubscriber, UndeclareSubscriber, + DeclareMode, DeclareSubscriber, UndeclareSubscriber, }, }; use zenoh_sync::get_mut_unchecked; @@ -53,7 +53,7 @@ fn propagate_simple_subscription_to( let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -137,7 +137,7 @@ fn declare_client_subscription( .primitives .send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -171,7 +171,7 @@ fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -206,7 +206,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/client/queries.rs b/zenoh/src/net/routing/hat/client/queries.rs index 5c0bc5349b..28e1d75460 100644 --- a/zenoh/src/net/routing/hat/client/queries.rs +++ b/zenoh/src/net/routing/hat/client/queries.rs @@ -33,7 +33,7 @@ use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::declare::{ common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, - DeclareQueryable, UndeclareQueryable, + DeclareMode, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; @@ -93,7 +93,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -165,7 +165,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -418,7 +418,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -460,7 +460,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs index 150c12a632..356793e3a3 100644 --- a/zenoh/src/net/routing/hat/linkstate_peer/queries.rs +++ b/zenoh/src/net/routing/hat/linkstate_peer/queries.rs @@ -36,7 +36,7 @@ use zenoh_protocol::{ core::{WhatAmI, WireExpr, ZenohId}, network::declare::{ common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, - DeclareQueryable, UndeclareQueryable, + DeclareMode, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; @@ -126,7 +126,7 @@ fn send_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -170,7 +170,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -339,7 +339,7 @@ fn send_forget_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -365,7 +365,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs index b495248788..5ac0b22846 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/pubsub.rs @@ -30,7 +30,7 @@ use zenoh_protocol::{ core::{Reliability, WhatAmI}, network::declare::{ common::ext::WireExprType, ext, subscriber::ext::SubscriberInfo, Declare, DeclareBody, - DeclareSubscriber, UndeclareSubscriber, + DeclareMode, DeclareSubscriber, UndeclareSubscriber, }, }; use zenoh_sync::get_mut_unchecked; @@ -53,7 +53,7 @@ fn propagate_simple_subscription_to( let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -137,7 +137,7 @@ fn declare_client_subscription( .primitives .send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -171,7 +171,7 @@ fn propagate_forget_simple_subscription(tables: &mut Tables, res: &Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -206,7 +206,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/p2p_peer/queries.rs b/zenoh/src/net/routing/hat/p2p_peer/queries.rs index 72c32b9217..c2d62c7658 100644 --- a/zenoh/src/net/routing/hat/p2p_peer/queries.rs +++ b/zenoh/src/net/routing/hat/p2p_peer/queries.rs @@ -33,7 +33,7 @@ use zenoh_protocol::{ core::{WhatAmI, WireExpr}, network::declare::{ common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, - DeclareQueryable, UndeclareQueryable, + DeclareMode, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; @@ -93,7 +93,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -165,7 +165,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -412,7 +412,7 @@ fn propagate_forget_simple_subscription_to_peers(tables: &mut Tables, res: &Arc< if let Some(id) = face_hat_mut!(&mut face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -564,7 +564,7 @@ pub(super) fn undeclare_client_subscription( if let Some(id) = face_hat_mut!(face).local_subs.remove(res) { face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -606,7 +606,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -635,7 +635,7 @@ pub(super) fn pubsub_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(sub, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -774,7 +774,7 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: if forget { dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -800,7 +800,7 @@ pub(super) fn pubsub_linkstate_change(tables: &mut Tables, zid: &ZenohId, links: }; dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/hat/router/queries.rs b/zenoh/src/net/routing/hat/router/queries.rs index 99e787beb5..e647cf2dc7 100644 --- a/zenoh/src/net/routing/hat/router/queries.rs +++ b/zenoh/src/net/routing/hat/router/queries.rs @@ -36,7 +36,7 @@ use zenoh_protocol::{ core::{WhatAmI, WireExpr, ZenohId}, network::declare::{ common::ext::WireExprType, ext, queryable::ext::QueryableInfoType, Declare, DeclareBody, - DeclareQueryable, UndeclareQueryable, + DeclareMode, DeclareQueryable, UndeclareQueryable, }, }; use zenoh_sync::get_mut_unchecked; @@ -194,7 +194,7 @@ fn send_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -248,7 +248,7 @@ fn propagate_simple_queryable( let key_expr = Resource::decl_key(res, &mut dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -473,7 +473,7 @@ fn send_forget_sourced_queryable_to_net_childs( someface.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType { @@ -499,7 +499,7 @@ fn propagate_forget_simple_queryable(tables: &mut Tables, res: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -775,7 +775,7 @@ pub(super) fn queries_new_face(tables: &mut Tables, face: &mut Arc) { let key_expr = Resource::decl_key(qabl, face); face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -874,7 +874,7 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links if forget { dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -900,7 +900,7 @@ pub(super) fn queries_linkstate_change(tables: &mut Tables, zid: &ZenohId, links let key_expr = Resource::decl_key(res, dst_face); dst_face.primitives.send_declare(RoutingContext::with_expr( Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/routing/mod.rs b/zenoh/src/net/routing/mod.rs index 0ddf12b82f..77f51c16b3 100644 --- a/zenoh/src/net/routing/mod.rs +++ b/zenoh/src/net/routing/mod.rs @@ -117,8 +117,7 @@ impl RoutingContext { DeclareBody::DeclareToken(m) => Some(&m.wire_expr), DeclareBody::UndeclareToken(m) => Some(&m.ext_wire_expr.wire_expr), DeclareBody::DeclareInterest(m) => m.wire_expr.as_ref(), - DeclareBody::FinalInterest(_) => None, - DeclareBody::UndeclareInterest(m) => Some(&m.ext_wire_expr.wire_expr), + DeclareBody::DeclareFinal(_) => None, }, NetworkBody::OAM(_) => None, } diff --git a/zenoh/src/net/runtime/adminspace.rs b/zenoh/src/net/runtime/adminspace.rs index d460ee3f1c..a5739d830c 100644 --- a/zenoh/src/net/runtime/adminspace.rs +++ b/zenoh/src/net/runtime/adminspace.rs @@ -39,8 +39,8 @@ use zenoh_protocol::{ }, network::{ declare::{queryable::ext::QueryableInfoType, subscriber::ext::SubscriberInfo}, - ext, Declare, DeclareBody, DeclareQueryable, DeclareSubscriber, Push, Request, Response, - ResponseFinal, + ext, Declare, DeclareBody, DeclareMode, DeclareQueryable, DeclareSubscriber, Push, Request, + Response, ResponseFinal, }, zenoh::{PushBody, RequestBody}, }; @@ -276,7 +276,7 @@ impl AdminSpace { zlock!(admin.primitives).replace(primitives.clone()); primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, @@ -289,7 +289,7 @@ impl AdminSpace { }); primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/net/tests/tables.rs b/zenoh/src/net/tests/tables.rs index 4067f2ad8f..55ff9f0a4d 100644 --- a/zenoh/src/net/tests/tables.rs +++ b/zenoh/src/net/tests/tables.rs @@ -26,7 +26,7 @@ use zenoh_protocol::core::{ key_expr::keyexpr, ExprId, Reliability, WhatAmI, WireExpr, ZenohId, EMPTY_EXPR_ID, }; use zenoh_protocol::network::declare::subscriber::ext::SubscriberInfo; -use zenoh_protocol::network::{ext, Declare, DeclareBody, DeclareKeyExpr}; +use zenoh_protocol::network::{ext, Declare, DeclareBody, DeclareKeyExpr, DeclareMode}; use zenoh_protocol::zenoh::{PushBody, Put}; #[test] @@ -579,7 +579,7 @@ fn client_test() { Primitives::send_declare( primitives0.as_ref(), Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -607,7 +607,7 @@ fn client_test() { Primitives::send_declare( primitives0.as_ref(), Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -629,7 +629,7 @@ fn client_test() { Primitives::send_declare( primitives1.as_ref(), Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -657,7 +657,7 @@ fn client_test() { Primitives::send_declare( primitives1.as_ref(), Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -679,7 +679,7 @@ fn client_test() { Primitives::send_declare( primitives2.as_ref(), Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index addb757807..9bc6c9c331 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -71,7 +71,7 @@ use zenoh_protocol::{ network::{ declare::{ self, common::ext::WireExprType, queryable::ext::QueryableInfoType, - subscriber::ext::SubscriberInfo, Declare, DeclareBody, DeclareKeyExpr, + subscriber::ext::SubscriberInfo, Declare, DeclareBody, DeclareKeyExpr, DeclareMode, DeclareQueryable, DeclareSubscriber, UndeclareQueryable, UndeclareSubscriber, }, ext, @@ -872,7 +872,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1085,7 +1085,7 @@ impl Session { // }; primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1142,7 +1142,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1194,7 +1194,7 @@ impl Session { distance: 0, }; primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1216,7 +1216,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1252,7 +1252,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: declare::ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: declare::ext::NodeIdType::DEFAULT, @@ -1277,7 +1277,7 @@ impl Session { let primitives = state.primitives.as_ref().unwrap().clone(); drop(state); primitives.send_declare(Declare { - interest_id: None, + mode: DeclareMode::Push, ext_qos: ext::QoSType::DECLARE, ext_tstamp: None, ext_nodeid: ext::NodeIdType::DEFAULT, @@ -2047,8 +2047,7 @@ impl Primitives for Session { DeclareBody::DeclareToken(_) => todo!(), DeclareBody::UndeclareToken(_) => todo!(), DeclareBody::DeclareInterest(_) => todo!(), - DeclareBody::FinalInterest(_) => todo!(), - DeclareBody::UndeclareInterest(_) => todo!(), + DeclareBody::DeclareFinal(_) => todo!(), } } From 8f8eb2589a57c1074622c125f5111c4afde9a1e7 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Fri, 5 Apr 2024 11:19:56 +0200 Subject: [PATCH 098/124] typedefs for complex builder types (#890) --- zenoh/src/publication.rs | 21 +++++++++++++++------ zenoh/src/queryable.rs | 8 ++++++-- zenoh/src/session.rs | 4 ++-- 3 files changed, 23 insertions(+), 10 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index 64fa5b49c6..c176ad32e0 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -46,7 +46,8 @@ pub struct PublicationBuilderPut { #[derive(Debug, Clone)] pub struct PublicationBuilderDelete; -/// A builder for initializing a [`put`](crate::Session::put) and [`delete`](crate::Session::delete) operations +/// A builder for initializing [`Session::put`](crate::Session::put), [`Session::delete`](crate::Session::delete), +/// [`Publisher::put`](crate::Publisher::put), and [`Publisher::delete`](crate::Publisher::delete) operations. /// /// # Examples /// ``` @@ -78,6 +79,17 @@ pub struct PublicationBuilder { pub(crate) attachment: Option, } +pub type SessionPutBuilder<'a, 'b> = + PublicationBuilder, PublicationBuilderPut>; + +pub type SessionDeleteBuilder<'a, 'b> = + PublicationBuilder, PublicationBuilderDelete>; + +pub type PublisherPutBuilder<'a> = PublicationBuilder<&'a Publisher<'a>, PublicationBuilderPut>; + +pub type PublisherDeleteBuilder<'a> = + PublicationBuilder<&'a Publisher<'a>, PublicationBuilderDelete>; + impl QoSBuilderTrait for PublicationBuilder, T> { #[inline] fn congestion_control(self, congestion_control: CongestionControl) -> Self { @@ -405,10 +417,7 @@ impl<'a> Publisher<'a> { /// # } /// ``` #[inline] - pub fn put( - &self, - payload: IntoPayload, - ) -> PublicationBuilder<&Publisher<'_>, PublicationBuilderPut> + pub fn put(&self, payload: IntoPayload) -> PublisherPutBuilder<'_> where IntoPayload: Into, { @@ -439,7 +448,7 @@ impl<'a> Publisher<'a> { /// publisher.delete().res().await.unwrap(); /// # } /// ``` - pub fn delete(&self) -> PublicationBuilder<&Publisher<'_>, PublicationBuilderDelete> { + pub fn delete(&self) -> PublisherDeleteBuilder<'_> { PublicationBuilder { publisher: self, kind: PublicationBuilderDelete, diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 0696fcbe33..c2fd67fcf4 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -129,7 +129,7 @@ impl Query { &self, key_expr: TryIntoKeyExpr, payload: IntoPayload, - ) -> ReplyBuilder<'_, 'b, ReplyBuilderPut> + ) -> ReplyPutBuilder<'_, 'b> where TryIntoKeyExpr: TryInto>, >>::Error: Into, @@ -171,7 +171,7 @@ impl Query { pub fn reply_del<'b, TryIntoKeyExpr>( &self, key_expr: TryIntoKeyExpr, - ) -> ReplyBuilder<'_, 'b, ReplyBuilderDelete> + ) -> ReplyDeleteBuilder<'_, 'b> where TryIntoKeyExpr: TryInto>, >>::Error: Into, @@ -274,6 +274,10 @@ pub struct ReplyBuilder<'a, 'b, T> { attachment: Option, } +pub type ReplyPutBuilder<'a, 'b> = ReplyBuilder<'a, 'b, ReplyBuilderPut>; + +pub type ReplyDeleteBuilder<'a, 'b> = ReplyBuilder<'a, 'b, ReplyBuilderDelete>; + impl TimestampBuilderTrait for ReplyBuilder<'_, '_, T> { fn timestamp>>(self, timestamp: U) -> Self { Self { diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index c44cb4f817..d9427f270c 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -705,7 +705,7 @@ impl Session { &'a self, key_expr: TryIntoKeyExpr, payload: IntoPayload, - ) -> PublicationBuilder, PublicationBuilderPut> + ) -> SessionPutBuilder<'a, 'b> where TryIntoKeyExpr: TryInto>, >>::Error: Into, @@ -745,7 +745,7 @@ impl Session { pub fn delete<'a, 'b: 'a, TryIntoKeyExpr>( &'a self, key_expr: TryIntoKeyExpr, - ) -> PublicationBuilder, PublicationBuilderDelete> + ) -> SessionDeleteBuilder<'a, 'b> where TryIntoKeyExpr: TryInto>, >>::Error: Into, From 71a9423738076d07ff6a83a043cee250cb06350d Mon Sep 17 00:00:00 2001 From: OlivierHecart Date: Fri, 5 Apr 2024 14:19:38 +0200 Subject: [PATCH 099/124] Declare message can be Push/Request/RequestContinuous/Response 2 (#906) * Declare message can be Push/Request/RequestContinuous/Response * Address review comments * Remove F: Future flag from DeclareInterest * cargo fmt --all * Remove unused Interest flags field * Update doc * Remove unneeded interest_id field * Update commons/zenoh-protocol/src/network/declare.rs --------- Co-authored-by: Luca Cominardi --- commons/zenoh-codec/src/network/declare.rs | 6 +- commons/zenoh-protocol/src/network/declare.rs | 58 +++++++++++-------- 2 files changed, 34 insertions(+), 30 deletions(-) diff --git a/commons/zenoh-codec/src/network/declare.rs b/commons/zenoh-codec/src/network/declare.rs index 173fbe5e4a..6e9dad12ce 100644 --- a/commons/zenoh-codec/src/network/declare.rs +++ b/commons/zenoh-codec/src/network/declare.rs @@ -967,7 +967,6 @@ where fn write(self, writer: &mut W, x: &interest::DeclareInterest) -> Self::Output { let interest::DeclareInterest { - id, interest: _, wire_expr, } = x; @@ -977,7 +976,6 @@ where self.write(&mut *writer, header)?; // Body - self.write(&mut *writer, id)?; self.write(&mut *writer, x.options())?; if let Some(we) = wire_expr.as_ref() { self.write(&mut *writer, we)?; @@ -1012,9 +1010,8 @@ where } // Body - let id: interest::InterestId = self.codec.read(&mut *reader)?; let options: u8 = self.codec.read(&mut *reader)?; - let interest = Interest::from((imsg::flags(self.header), options)); + let interest = Interest::from(options); let mut wire_expr = None; if interest.restricted() { @@ -1035,7 +1032,6 @@ where } Ok(interest::DeclareInterest { - id, interest, wire_expr, }) diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 996e7768ee..6cd2b2200f 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -733,23 +733,23 @@ pub mod interest { /// # DeclareInterest message /// - /// The DECLARE INTEREST message is sent to request the transmission of current and/or future + /// The DECLARE INTEREST message is sent to request the transmission of current and optionally future /// declarations of a given kind matching a target keyexpr. E.g., a declare interest could be /// sent to request the transmisison of all current subscriptions matching `a/*`. /// /// The behaviour of a DECLARE INTEREST depends on the DECLARE MODE in the DECLARE MESSAGE: - /// - Push: only future declarations + /// - Push: invalid /// - Request: only current declarations /// - RequestContinous: current and future declarations /// - Response: invalid /// - /// E.g., the [`DeclareInterest`] message flow is the following: + /// E.g., the [`DeclareInterest`] message flow is the following for a Request: /// /// ```text /// A B /// | DECL INTEREST | - /// |------------------>| -- Sent in Declare::RequestContinuous. - /// | | This is a DeclareInterest e.g. for subscriber declarations/undeclarations. + /// |------------------>| -- Sent in Declare::Request. + /// | | This is a DeclareInterest e.g. for subscriber declarations. /// | | /// | DECL SUBSCRIBER | /// |<------------------| -- Sent in Declare::Response @@ -760,6 +760,26 @@ pub mod interest { /// | | /// | FINAL | /// |<------------------| -- Sent in Declare::Response + /// ``` + /// + /// + /// And the [`DeclareInterest`] message flow is the following for a RequestContinuous: + /// + /// ```text + /// A B + /// | DECL INTEREST | + /// |------------------>| -- Sent in Declare::RequestContinuous. + /// | | This is a DeclareInterest e.g. for subscriber declarations/undeclarations. + /// | | + /// | DECL SUBSCRIBER | + /// |<------------------| -- Sent in Declare::Push + /// | DECL SUBSCRIBER | + /// |<------------------| -- Sent in Declare::Push + /// | DECL SUBSCRIBER | + /// |<------------------| -- Sent in Declare::Push + /// | | + /// | FINAL | + /// |<------------------| -- Sent in Declare::Response /// | | /// | DECL SUBSCRIBER | /// |<------------------| -- Sent in Declare::Push. This is a new subscriber declaration. @@ -784,9 +804,7 @@ pub mod interest { /// /// 7 6 5 4 3 2 1 0 /// +-+-+-+-+-+-+-+-+ - /// |Z|F|X| D_INT | - /// +---------------+ - /// ~ intst_id:z32 ~ + /// |Z|X|X| D_INT | /// +---------------+ /// |A|M|N|R|T|Q|S|K| (*) /// +---------------+ @@ -809,7 +827,6 @@ pub mod interest { /// ``` #[derive(Debug, Clone, PartialEq, Eq)] pub struct DeclareInterest { - pub id: InterestId, pub interest: Interest, pub wire_expr: Option>, } @@ -834,12 +851,10 @@ pub mod interest { use rand::Rng; let mut rng = rand::thread_rng(); - let id: InterestId = rng.gen(); let wire_expr = rng.gen_bool(0.5).then_some(WireExpr::rand()); let interest = Interest::rand(); Self { - id, wire_expr, interest, } @@ -848,7 +863,6 @@ pub mod interest { #[derive(Clone, Copy)] pub struct Interest { - flags: u8, options: u8, } @@ -870,14 +884,11 @@ pub mod interest { ); const fn options(options: u8) -> Self { - Self { flags: 0, options } + Self { options } } pub const fn empty() -> Self { - Self { - flags: 0, - options: 0, - } + Self { options: 0 } } pub const fn keyexprs(&self) -> bool { @@ -982,17 +993,17 @@ pub mod interest { impl Add for Interest { type Output = Self; + #[allow(clippy::suspicious_arithmetic_impl)] // Allows to implement Add & Sub for Interest fn add(self, rhs: Self) -> Self::Output { Self { - flags: self.flags | rhs.flags, options: self.options | rhs.options, } } } impl AddAssign for Interest { + #[allow(clippy::suspicious_op_assign_impl)] // Allows to implement Add & Sub for Interest fn add_assign(&mut self, rhs: Self) { - self.flags |= rhs.flags; self.options |= rhs.options; } } @@ -1002,7 +1013,6 @@ pub mod interest { fn sub(self, rhs: Self) -> Self::Output { Self { - flags: self.flags & !rhs.flags, options: self.options & !rhs.options, } } @@ -1010,15 +1020,13 @@ pub mod interest { impl SubAssign for Interest { fn sub_assign(&mut self, rhs: Self) { - self.flags &= !rhs.flags; self.options &= !rhs.options; } } - impl From<(u8, u8)> for Interest { - fn from(value: (u8, u8)) -> Self { - let (flags, options) = value; - Self { flags, options } + impl From for Interest { + fn from(options: u8) -> Self { + Self { options } } } From eb1a80ac9ddc7c15942238e477993825f559cd17 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 5 Apr 2024 14:47:11 +0200 Subject: [PATCH 100/124] Fix use and unstable visibility --- zenoh/src/prelude.rs | 5 ++++- zenoh/src/queryable.rs | 3 +-- zenoh/src/sample/builder.rs | 8 +++----- zenoh/src/session.rs | 1 + zenoh/tests/qos.rs | 1 - zenoh/tests/routing.rs | 1 - 6 files changed, 9 insertions(+), 10 deletions(-) diff --git a/zenoh/src/prelude.rs b/zenoh/src/prelude.rs index 850148f506..2e95e8d908 100644 --- a/zenoh/src/prelude.rs +++ b/zenoh/src/prelude.rs @@ -62,8 +62,11 @@ pub(crate) mod common { pub use zenoh_protocol::core::{CongestionControl, Reliability, WhatAmI}; pub use crate::sample::builder::{ - QoSBuilderTrait, SampleBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, + QoSBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, }; + + #[zenoh_macros::unstable] + pub use crate::sample::builder::SampleBuilderTrait; } /// Prelude to import when using Zenoh's sync API. diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index c2fd67fcf4..794ff3a504 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -18,13 +18,12 @@ use crate::encoding::Encoding; use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; -use crate::sample::builder::SampleBuilder; use crate::sample::{QoSBuilder, SourceInfo}; use crate::Id; use crate::SessionRef; use crate::Undeclarable; #[cfg(feature = "unstable")] -use crate::{query::ReplyKeyExpr, sample::Attachment}; +use crate::{query::ReplyKeyExpr, sample::Attachment, sample::builder::SampleBuilder}; use std::fmt; use std::future::Ready; use std::ops::Deref; diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index 5fab36617d..fca55edd09 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -15,11 +15,8 @@ use std::marker::PhantomData; #[cfg(feature = "unstable")] -use crate::sample::Attachment; -use crate::sample::QoS; -use crate::sample::QoSBuilder; -#[cfg(feature = "unstable")] -use crate::sample::SourceInfo; +use crate::sample::{Attachment, SourceInfo}; +use crate::sample::{QoS, QoSBuilder}; use crate::Encoding; use crate::KeyExpr; use crate::Payload; @@ -47,6 +44,7 @@ pub trait TimestampBuilderTrait { fn timestamp>>(self, timestamp: T) -> Self; } +#[zenoh_macros::unstable] pub trait SampleBuilderTrait { /// Attach source information #[zenoh_macros::unstable] diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index d9427f270c..67bec5f488 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -41,6 +41,7 @@ use crate::Priority; use crate::Sample; use crate::SampleKind; use crate::Selector; +#[cfg(feature = "unstable")] use crate::SourceInfo; use crate::Value; use log::{error, trace, warn}; diff --git a/zenoh/tests/qos.rs b/zenoh/tests/qos.rs index 5fd3edd985..1885c316ea 100644 --- a/zenoh/tests/qos.rs +++ b/zenoh/tests/qos.rs @@ -13,7 +13,6 @@ // use std::time::Duration; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh::{publication::Priority, SessionDeclarations}; use zenoh_core::ztimeout; diff --git a/zenoh/tests/routing.rs b/zenoh/tests/routing.rs index 1031630a68..56bacd7fdd 100644 --- a/zenoh/tests/routing.rs +++ b/zenoh/tests/routing.rs @@ -18,7 +18,6 @@ use std::time::Duration; use tokio_util::{sync::CancellationToken, task::TaskTracker}; use zenoh::config::{Config, ModeDependentValue}; use zenoh::prelude::r#async::*; -use zenoh::sample::builder::QoSBuilderTrait; use zenoh::Result; use zenoh_core::ztimeout; use zenoh_protocol::core::{WhatAmI, WhatAmIMatcher}; From a43e4518875f1c4be314943e78fdc483ae6f9844 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 5 Apr 2024 14:50:16 +0200 Subject: [PATCH 101/124] Add payload and encoding accessors for Query --- zenoh/src/queryable.rs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 794ff3a504..6fbb4e9090 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -23,7 +23,7 @@ use crate::Id; use crate::SessionRef; use crate::Undeclarable; #[cfg(feature = "unstable")] -use crate::{query::ReplyKeyExpr, sample::Attachment, sample::builder::SampleBuilder}; +use crate::{query::ReplyKeyExpr, sample::builder::SampleBuilder, sample::Attachment}; use std::fmt; use std::future::Ready; use std::ops::Deref; @@ -97,6 +97,18 @@ impl Query { self.inner.value.as_ref() } + /// This Query's payload. + #[inline(always)] + pub fn payload(&self) -> Option<&Payload> { + self.inner.value.as_ref().map(|v| &v.payload) + } + + /// This Query's encoding. + #[inline(always)] + pub fn encoding(&self) -> Option<&Encoding> { + self.inner.value.as_ref().map(|v| &v.encoding) + } + #[zenoh_macros::unstable] pub fn attachment(&self) -> Option<&Attachment> { self.inner.attachment.as_ref() From 1ad8c84c8b3f2f0f93f5dadb3a190af198e4e289 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Fri, 5 Apr 2024 14:52:08 +0200 Subject: [PATCH 102/124] cargo fmt --all --- zenoh/src/prelude.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/zenoh/src/prelude.rs b/zenoh/src/prelude.rs index 2e95e8d908..e2327c0dcc 100644 --- a/zenoh/src/prelude.rs +++ b/zenoh/src/prelude.rs @@ -61,9 +61,7 @@ pub(crate) mod common { pub use crate::publication::PublisherDeclarations; pub use zenoh_protocol::core::{CongestionControl, Reliability, WhatAmI}; - pub use crate::sample::builder::{ - QoSBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait, - }; + pub use crate::sample::builder::{QoSBuilderTrait, TimestampBuilderTrait, ValueBuilderTrait}; #[zenoh_macros::unstable] pub use crate::sample::builder::SampleBuilderTrait; From 5ee2bdb26780926b381d9bb93f0d52a262b06488 Mon Sep 17 00:00:00 2001 From: OlivierHecart Date: Fri, 5 Apr 2024 15:16:12 +0200 Subject: [PATCH 103/124] Declare message can be Push/Request/RequestContinuous/Response 3 (#908) * Declare message can be Push/Request/RequestContinuous/Response * Address review comments * Remove F: Future flag from DeclareInterest * cargo fmt --all * Remove unused Interest flags field * Update doc * Remove unneeded interest_id field * Update commons/zenoh-protocol/src/network/declare.rs * Remove unused UndeclareInterest --------- Co-authored-by: Luca Cominardi --- commons/zenoh-protocol/src/network/declare.rs | 34 ------------------- commons/zenoh-protocol/src/network/mod.rs | 4 +-- 2 files changed, 2 insertions(+), 36 deletions(-) diff --git a/commons/zenoh-protocol/src/network/declare.rs b/commons/zenoh-protocol/src/network/declare.rs index 6cd2b2200f..31e8adcc6e 100644 --- a/commons/zenoh-protocol/src/network/declare.rs +++ b/commons/zenoh-protocol/src/network/declare.rs @@ -1029,38 +1029,4 @@ pub mod interest { Self { options } } } - - /// ```text - /// Flags: - /// - X: Reserved - /// - X: Reserved - /// - Z: Extension If Z==1 then at least one extension is present - /// - /// 7 6 5 4 3 2 1 0 - /// +-+-+-+-+-+-+-+-+ - /// |Z|X|X| U_INT | - /// +---------------+ - /// ~ intst_id:z32 ~ - /// +---------------+ - /// ~ [decl_exts] ~ if Z==1 - /// +---------------+ - /// ``` - #[derive(Debug, Clone, PartialEq, Eq)] - pub struct UndeclareInterest { - pub id: InterestId, - pub ext_wire_expr: common::ext::WireExprType, - } - - impl UndeclareInterest { - #[cfg(feature = "test")] - pub fn rand() -> Self { - use rand::Rng; - let mut rng = rand::thread_rng(); - - let id: InterestId = rng.gen(); - let ext_wire_expr = common::ext::WireExprType::rand(); - - Self { id, ext_wire_expr } - } - } } diff --git a/commons/zenoh-protocol/src/network/mod.rs b/commons/zenoh-protocol/src/network/mod.rs index cbf9894aef..e60388f425 100644 --- a/commons/zenoh-protocol/src/network/mod.rs +++ b/commons/zenoh-protocol/src/network/mod.rs @@ -21,8 +21,8 @@ use core::fmt; pub use declare::{ Declare, DeclareBody, DeclareFinal, DeclareInterest, DeclareKeyExpr, DeclareMode, - DeclareQueryable, DeclareSubscriber, DeclareToken, UndeclareInterest, UndeclareKeyExpr, - UndeclareQueryable, UndeclareSubscriber, UndeclareToken, + DeclareQueryable, DeclareSubscriber, DeclareToken, UndeclareKeyExpr, UndeclareQueryable, + UndeclareSubscriber, UndeclareToken, }; pub use oam::Oam; pub use push::Push; From 5aedd2c09219073895ab1fcd105f2fb2b05a5d86 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Mon, 8 Apr 2024 18:01:43 +0200 Subject: [PATCH 104/124] Payload tuple generic impl --- Cargo.lock | 7 + Cargo.toml | 1 + zenoh/Cargo.toml | 1 + zenoh/src/payload.rs | 1143 ++++++++++++++++++++++++++++++++++++--- zenoh/src/queryable.rs | 1 + zenoh/src/session.rs | 13 +- zenoh/src/subscriber.rs | 3 - 7 files changed, 1093 insertions(+), 76 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d3ea8978b5..3f74af9ed1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3954,6 +3954,12 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "unwrap-infallible" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "151ac09978d3c2862c4e39b557f4eceee2cc72150bc4cb4f16abf061b6e381fb" + [[package]] name = "unzip-n" version = "0.1.2" @@ -4479,6 +4485,7 @@ dependencies = [ "tokio", "tokio-util", "uhlc", + "unwrap-infallible", "uuid", "vec_map", "zenoh-buffers", diff --git a/Cargo.toml b/Cargo.toml index 9210c96b70..d02f84eca8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -159,6 +159,7 @@ tokio-rustls = "0.25.0" console-subscriber = "0.2" typenum = "1.16.0" uhlc = { version = "0.7.0", default-features = false } # Default features are disabled due to usage in no_std crates +unwrap-infallible = "0.1.5" unzip-n = "0.1.2" url = "2.3.1" urlencoding = "2.1.2" diff --git a/zenoh/Cargo.toml b/zenoh/Cargo.toml index d20a4b914e..80cf8ba1bc 100644 --- a/zenoh/Cargo.toml +++ b/zenoh/Cargo.toml @@ -90,6 +90,7 @@ serde_yaml = { workspace = true } socket2 = { workspace = true } stop-token = { workspace = true } uhlc = { workspace = true, features = ["default"] } +unwrap-infallible = { workspace = true } uuid = { workspace = true, features = ["default"] } vec_map = { workspace = true } zenoh-buffers = { workspace = true, features = ["std"] } diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index ed2a58145c..db3126d93d 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -14,17 +14,38 @@ //! Payload primitives. use crate::buffers::ZBuf; +use std::marker::PhantomData; use std::{ borrow::Cow, convert::Infallible, fmt::Debug, ops::Deref, string::FromUtf8Error, sync::Arc, }; -use zenoh_buffers::buffer::Buffer; +use unwrap_infallible::UnwrapInfallible; use zenoh_buffers::{ - buffer::SplitBuffer, reader::HasReader, writer::HasWriter, ZBufReader, ZSlice, + buffer::{Buffer, SplitBuffer}, + reader::{HasReader, Reader}, + writer::HasWriter, + ZBufReader, ZSlice, }; -use zenoh_result::ZResult; +use zenoh_codec::{RCodec, WCodec, Zenoh080}; +use zenoh_result::{ZError, ZResult}; #[cfg(feature = "shared-memory")] use zenoh_shm::SharedMemoryBuf; +/// Trait to encode a type `T` into a [`Value`]. +pub trait Serialize { + type Output; + + /// The implementer should take care of serializing the type `T` and set the proper [`Encoding`]. + fn serialize(self, t: T) -> Self::Output; +} + +pub trait Deserialize<'a, T> { + type Error; + + /// The implementer should take care of deserializing the type `T` based on the [`Encoding`] information. + fn deserialize(self, t: &'a Payload) -> Result; +} + +/// A payload contains the serialized bytes of user data. #[repr(transparent)] #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct Payload(ZBuf); @@ -57,14 +78,17 @@ impl Payload { pub fn reader(&self) -> PayloadReader<'_> { PayloadReader(self.0.reader()) } -} - -/// A reader that implements [`std::io::Read`] trait to read from a [`Payload`]. -pub struct PayloadReader<'a>(ZBufReader<'a>); -impl std::io::Read for PayloadReader<'_> { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - self.0.read(buf) + /// Get a [`PayloadReader`] implementing [`std::io::Read`] trait. + pub fn iter(&self) -> PayloadIterator<'_, T> + where + T: TryFrom, + ZSerde: for<'b> Deserialize<'b, T, Error = ZDeserializeError>, + { + PayloadIterator { + reader: self.0.reader(), + _t: PhantomData::, + } } } @@ -99,19 +123,45 @@ impl Payload { } } -/// Trait to encode a type `T` into a [`Value`]. -pub trait Serialize { - type Output; +/// A reader that implements [`std::io::Read`] trait to read from a [`Payload`]. +pub struct PayloadReader<'a>(ZBufReader<'a>); - /// The implementer should take care of serializing the type `T` and set the proper [`Encoding`]. - fn serialize(self, t: T) -> Self::Output; +impl std::io::Read for PayloadReader<'_> { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + std::io::Read::read(&mut self.0, buf) + } } -pub trait Deserialize<'a, T> { - type Error; +/// An iterator that implements [`std::iter::Iterator`] trait to iterate on values `T` in a [`Payload`]. +/// Note that [`Payload`] contains a serialized version of `T` and iterating over a [`Payload`] performs lazy deserialization. +pub struct PayloadIterator<'a, T> +where + ZSerde: Deserialize<'a, T>, +{ + reader: ZBufReader<'a>, + _t: PhantomData, +} - /// The implementer should take care of deserializing the type `T` based on the [`Encoding`] information. - fn deserialize(self, t: &'a Payload) -> Result; +impl<'a, T> Iterator for PayloadIterator<'a, T> +where + ZSerde: for<'b> Deserialize<'b, T, Error = ZDeserializeError>, +{ + type Item = T; + + fn next(&mut self) -> Option { + let codec = Zenoh080::new(); + + let kbuf: ZBuf = codec.read(&mut self.reader).ok()?; + let kpld = Payload::new(kbuf); + + let t = ZSerde.deserialize(&kpld).ok()?; + Some(t) + } + + fn size_hint(&self) -> (usize, Option) { + let remaining = self.reader.remaining(); + (remaining, Some(remaining)) + } } /// The default serializer for Zenoh payload. It supports primitives types, such as: vec, int, uint, float, string, bool. @@ -122,7 +172,7 @@ pub struct ZSerde; #[derive(Debug, Clone, Copy)] pub struct ZDeserializeError; -// Bytes +// ZBuf impl Serialize for ZSerde { type Output = Payload; @@ -131,9 +181,9 @@ impl Serialize for ZSerde { } } -impl From for ZBuf { - fn from(value: Payload) -> Self { - value.0 +impl From for Payload { + fn from(t: ZBuf) -> Self { + ZSerde.serialize(t) } } @@ -141,16 +191,23 @@ impl Deserialize<'_, ZBuf> for ZSerde { type Error = Infallible; fn deserialize(self, v: &Payload) -> Result { - Ok(v.into()) + Ok(v.0.clone()) + } +} + +impl From for ZBuf { + fn from(value: Payload) -> Self { + value.0 } } impl From<&Payload> for ZBuf { fn from(value: &Payload) -> Self { - value.0.clone() + ZSerde.deserialize(value).unwrap_infallible() } } +// Vec impl Serialize> for ZSerde { type Output = Payload; @@ -159,11 +216,9 @@ impl Serialize> for ZSerde { } } -impl Serialize<&[u8]> for ZSerde { - type Output = Payload; - - fn serialize(self, t: &[u8]) -> Self::Output { - Payload::new(t.to_vec()) +impl From> for Payload { + fn from(t: Vec) -> Self { + ZSerde.serialize(t) } } @@ -171,16 +226,38 @@ impl Deserialize<'_, Vec> for ZSerde { type Error = Infallible; fn deserialize(self, v: &Payload) -> Result, Self::Error> { - Ok(Vec::from(v)) + Ok(v.0.contiguous().to_vec()) + } +} + +impl From for Vec { + fn from(value: Payload) -> Self { + ZSerde.deserialize(&value).unwrap_infallible() } } impl From<&Payload> for Vec { fn from(value: &Payload) -> Self { - Cow::from(value).to_vec() + ZSerde.deserialize(value).unwrap_infallible() } } +// &[u8] +impl Serialize<&[u8]> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &[u8]) -> Self::Output { + Payload::new(t.to_vec()) + } +} + +impl From<&[u8]> for Payload { + fn from(t: &[u8]) -> Self { + ZSerde.serialize(t) + } +} + +// Cow<[u8]> impl<'a> Serialize> for ZSerde { type Output = Payload; @@ -189,6 +266,12 @@ impl<'a> Serialize> for ZSerde { } } +impl From> for Payload { + fn from(t: Cow<'_, [u8]>) -> Self { + ZSerde.serialize(t) + } +} + impl<'a> Deserialize<'a, Cow<'a, [u8]>> for ZSerde { type Error = Infallible; @@ -199,7 +282,7 @@ impl<'a> Deserialize<'a, Cow<'a, [u8]>> for ZSerde { impl<'a> From<&'a Payload> for Cow<'a, [u8]> { fn from(value: &'a Payload) -> Self { - value.0.contiguous() + ZSerde.deserialize(value).unwrap_infallible() } } @@ -212,11 +295,9 @@ impl Serialize for ZSerde { } } -impl Serialize<&str> for ZSerde { - type Output = Payload; - - fn serialize(self, s: &str) -> Self::Output { - Self.serialize(s.to_string()) +impl From for Payload { + fn from(t: String) -> Self { + ZSerde.serialize(t) } } @@ -224,7 +305,16 @@ impl Deserialize<'_, String> for ZSerde { type Error = FromUtf8Error; fn deserialize(self, v: &Payload) -> Result { - String::from_utf8(Vec::from(v)) + let v: Vec = ZSerde.deserialize(v).unwrap_infallible(); + String::from_utf8(v) + } +} + +impl TryFrom for String { + type Error = FromUtf8Error; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) } } @@ -236,14 +326,22 @@ impl TryFrom<&Payload> for String { } } -impl TryFrom for String { - type Error = FromUtf8Error; +// &str +impl Serialize<&str> for ZSerde { + type Output = Payload; - fn try_from(value: Payload) -> Result { - ZSerde.deserialize(&value) + fn serialize(self, s: &str) -> Self::Output { + Self.serialize(s.to_string()) } } +impl From<&str> for Payload { + fn from(t: &str) -> Self { + ZSerde.serialize(t) + } +} + +// Cow impl<'a> Serialize> for ZSerde { type Output = Payload; @@ -252,6 +350,12 @@ impl<'a> Serialize> for ZSerde { } } +impl From> for Payload { + fn from(t: Cow<'_, str>) -> Self { + ZSerde.serialize(t) + } +} + impl<'a> Deserialize<'a, Cow<'a, str>> for ZSerde { type Error = FromUtf8Error; @@ -277,7 +381,11 @@ macro_rules! impl_int { fn serialize(self, t: $t) -> Self::Output { let bs = t.to_le_bytes(); - let end = 1 + bs.iter().rposition(|b| *b != 0).unwrap_or(bs.len() - 1); + let end = if t == 0 as $t { + 0 + } else { + 1 + bs.iter().rposition(|b| *b != 0).unwrap_or(bs.len() - 1) + }; // SAFETY: // - 0 is a valid start index because bs is guaranteed to always have a length greater or equal than 1 // - end is a valid end index because is bounded between 0 and bs.len() @@ -285,6 +393,12 @@ macro_rules! impl_int { } } + impl From<$t> for Payload { + fn from(t: $t) -> Self { + ZSerde.serialize(t) + } + } + impl Serialize<&$t> for ZSerde { type Output = Payload; @@ -293,11 +407,23 @@ macro_rules! impl_int { } } + impl From<&$t> for Payload { + fn from(t: &$t) -> Self { + ZSerde.serialize(t) + } + } + impl Serialize<&mut $t> for ZSerde { type Output = Payload; fn serialize(self, t: &mut $t) -> Self::Output { - Self.serialize(*t) + ZSerde.serialize(*t) + } + } + + impl From<&mut $t> for Payload { + fn from(t: &mut $t) -> Self { + ZSerde.serialize(t) } } @@ -319,6 +445,14 @@ macro_rules! impl_int { } } + impl TryFrom for $t { + type Error = ZDeserializeError; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } + } + impl TryFrom<&Payload> for $t { type Error = ZDeserializeError; @@ -349,12 +483,18 @@ impl_int!(f64, ZSerde::ZENOH_FLOAT); // Zenoh bool impl Serialize for ZSerde { - type Output = ZBuf; + type Output = Payload; fn serialize(self, t: bool) -> Self::Output { // SAFETY: casting a bool into an integer is well-defined behaviour. // 0 is false, 1 is true: https://doc.rust-lang.org/std/primitive.bool.html - ZBuf::from((t as u8).to_le_bytes()) + Payload::new(ZBuf::from((t as u8).to_le_bytes())) + } +} + +impl From for Payload { + fn from(t: bool) -> Self { + ZSerde.serialize(t) } } @@ -391,6 +531,14 @@ impl Serialize<&serde_json::Value> for ZSerde { } } +impl TryFrom<&serde_json::Value> for Payload { + type Error = serde_json::Error; + + fn try_from(value: &serde_json::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Serialize for ZSerde { type Output = Result; @@ -399,6 +547,14 @@ impl Serialize for ZSerde { } } +impl TryFrom for Payload { + type Error = serde_json::Error; + + fn try_from(value: serde_json::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Deserialize<'_, serde_json::Value> for ZSerde { type Error = serde_json::Error; @@ -407,11 +563,11 @@ impl Deserialize<'_, serde_json::Value> for ZSerde { } } -impl TryFrom for Payload { +impl TryFrom<&Payload> for serde_json::Value { type Error = serde_json::Error; - fn try_from(value: serde_json::Value) -> Result { - ZSerde.serialize(value) + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) } } @@ -426,6 +582,14 @@ impl Serialize<&serde_yaml::Value> for ZSerde { } } +impl TryFrom<&serde_yaml::Value> for Payload { + type Error = serde_yaml::Error; + + fn try_from(value: &serde_yaml::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Serialize for ZSerde { type Output = Result; @@ -434,6 +598,14 @@ impl Serialize for ZSerde { } } +impl TryFrom for Payload { + type Error = serde_yaml::Error; + + fn try_from(value: serde_yaml::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Deserialize<'_, serde_yaml::Value> for ZSerde { type Error = serde_yaml::Error; @@ -442,11 +614,11 @@ impl Deserialize<'_, serde_yaml::Value> for ZSerde { } } -impl TryFrom for Payload { +impl TryFrom<&Payload> for serde_yaml::Value { type Error = serde_yaml::Error; - fn try_from(value: serde_yaml::Value) -> Result { - ZSerde.serialize(value) + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) } } @@ -461,6 +633,14 @@ impl Serialize<&serde_cbor::Value> for ZSerde { } } +impl TryFrom<&serde_cbor::Value> for Payload { + type Error = serde_cbor::Error; + + fn try_from(value: &serde_cbor::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Serialize for ZSerde { type Output = Result; @@ -469,6 +649,14 @@ impl Serialize for ZSerde { } } +impl TryFrom for Payload { + type Error = serde_cbor::Error; + + fn try_from(value: serde_cbor::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Deserialize<'_, serde_cbor::Value> for ZSerde { type Error = serde_cbor::Error; @@ -477,11 +665,11 @@ impl Deserialize<'_, serde_cbor::Value> for ZSerde { } } -impl TryFrom for Payload { +impl TryFrom<&Payload> for serde_cbor::Value { type Error = serde_cbor::Error; - fn try_from(value: serde_cbor::Value) -> Result { - ZSerde.serialize(value) + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) } } @@ -500,6 +688,14 @@ impl Serialize<&serde_pickle::Value> for ZSerde { } } +impl TryFrom<&serde_pickle::Value> for Payload { + type Error = serde_pickle::Error; + + fn try_from(value: &serde_pickle::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Serialize for ZSerde { type Output = Result; @@ -508,6 +704,14 @@ impl Serialize for ZSerde { } } +impl TryFrom for Payload { + type Error = serde_pickle::Error; + + fn try_from(value: serde_pickle::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Deserialize<'_, serde_pickle::Value> for ZSerde { type Error = serde_pickle::Error; @@ -516,11 +720,11 @@ impl Deserialize<'_, serde_pickle::Value> for ZSerde { } } -impl TryFrom for Payload { +impl TryFrom<&Payload> for serde_pickle::Value { type Error = serde_pickle::Error; - fn try_from(value: serde_pickle::Value) -> Result { - ZSerde.serialize(value) + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) } } @@ -553,15 +757,86 @@ impl Serialize for ZSerde { } } -impl From for Payload +// Tuple +impl Serialize<(A, B)> for ZSerde where - ZSerde: Serialize, + A: Into, + B: Into, { - fn from(t: T) -> Self { - ZSerde.serialize(t) + type Output = Payload; + + fn serialize(self, t: (A, B)) -> Self::Output { + let (a, b) = t; + + let codec = Zenoh080::new(); + let mut buffer: ZBuf = ZBuf::empty(); + let mut writer = buffer.writer(); + let apld: Payload = a.into(); + let bpld: Payload = b.into(); + + // SAFETY: we are serializing slices on a ZBuf, so serialization will never + // fail unless we run out of memory. In that case, Rust memory allocator + // will panic before the serializer has any chance to fail. + unsafe { + codec.write(&mut writer, &apld.0).unwrap_unchecked(); + codec.write(&mut writer, &bpld.0).unwrap_unchecked(); + } + + Payload::new(buffer) + } +} + +impl<'a, A, B> Deserialize<'a, (A, B)> for ZSerde +where + A: TryFrom, + >::Error: Debug, + B: TryFrom, + >::Error: Debug, +{ + type Error = ZError; + + fn deserialize(self, payload: &'a Payload) -> Result<(A, B), Self::Error> { + let codec = Zenoh080::new(); + let mut reader = payload.0.reader(); + + let abuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; + let apld = Payload::new(abuf); + + let bbuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; + let bpld = Payload::new(bbuf); + + let a = A::try_from(apld).map_err(|e| zerror!("{:?}", e))?; + let b = B::try_from(bpld).map_err(|e| zerror!("{:?}", e))?; + Ok((a, b)) } } +// Iterator +// impl Serialize for ZSerde +// where +// I: Iterator, +// T: Into, +// { +// type Output = Payload; + +// fn serialize(self, iter: I) -> Self::Output { +// let codec = Zenoh080::new(); +// let mut buffer: ZBuf = ZBuf::empty(); +// let mut writer = buffer.writer(); +// for t in iter { +// let tpld: Payload = t.into(); +// // SAFETY: we are serializing slices on a ZBuf, so serialization will never +// // fail unless we run out of memory. In that case, Rust memory allocator +// // will panic before the serializer has any chance to fail. +// unsafe { +// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); +// } +// } + +// Payload::new(buffer) +// } +// } + // For convenience to always convert a Value the examples #[derive(Debug, Clone, PartialEq, Eq)] pub enum StringOrBase64 { @@ -619,14 +894,18 @@ mod tests { ($t:ty, $in:expr) => { let i = $in; let t = i.clone(); + println!("Serialize:\t{:?}", t); let v = Payload::serialize(t); + println!("Deserialize:\t{:?}", v); let o: $t = v.deserialize().unwrap(); - assert_eq!(i, o) + assert_eq!(i, o); + println!(""); }; } let mut rng = rand::thread_rng(); + // unsigned integer serialize_deserialize!(u8, u8::MIN); serialize_deserialize!(u16, u16::MIN); serialize_deserialize!(u32, u32::MIN); @@ -647,6 +926,7 @@ mod tests { serialize_deserialize!(usize, rng.gen::()); } + // signed integer serialize_deserialize!(i8, i8::MIN); serialize_deserialize!(i16, i16::MIN); serialize_deserialize!(i32, i32::MIN); @@ -667,6 +947,7 @@ mod tests { serialize_deserialize!(isize, rng.gen::()); } + // float serialize_deserialize!(f32, f32::MIN); serialize_deserialize!(f64, f64::MIN); @@ -678,13 +959,747 @@ mod tests { serialize_deserialize!(f64, rng.gen::()); } + // String serialize_deserialize!(String, ""); serialize_deserialize!(String, String::from("abcdefghijklmnopqrstuvwxyz")); + // Vec serialize_deserialize!(Vec, vec![0u8; 0]); serialize_deserialize!(Vec, vec![0u8; 64]); + // ZBuf serialize_deserialize!(ZBuf, ZBuf::from(vec![0u8; 0])); serialize_deserialize!(ZBuf, ZBuf::from(vec![0u8; 64])); + + // Tuple + serialize_deserialize!((usize, usize), (0, 1)); + serialize_deserialize!((usize, String), (0, String::from("a"))); + serialize_deserialize!((String, String), (String::from("a"), String::from("b"))); + + // Iterator + // let mut hm = Vec::new(); + // hm.push(0); + // hm.push(1); + // Payload::serialize(hm.iter()); + + // let mut hm = HashMap::new(); + // hm.insert(0, 0); + // hm.insert(1, 1); + // Payload::serialize(hm.iter().map(|(k, v)| (k, v))); + // for (k, v) in sample.payload().iter::<(String, serde_json::Value)>() {} } } + +// macro_rules! impl_iterator_inner { +// ($iter:expr) => {{ +// let codec = Zenoh080::new(); +// let mut buffer: ZBuf = ZBuf::empty(); +// let mut writer = buffer.writer(); +// for t in $iter { +// let tpld = ZSerde.serialize(t); +// // SAFETY: we are serializing slices on a ZBuf, so serialization will never +// // fail unless we run out of memory. In that case, Rust memory allocator +// // will panic before the serializer has any chance to fail. +// unsafe { +// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); +// } +// } + +// Payload::new(buffer) +// }}; +// } + +// impl<'a> Serialize> for ZSerde { +// type Output = Payload; + +// fn serialize(self, iter: std::slice::Iter<'_, i32>) -> Self::Output { +// impl_iterator_inner!(iter) +// } +// } + +// impl<'a> Serialize> for ZSerde { +// type Output = Payload; + +// fn serialize(self, iter: std::slice::IterMut<'_, i32>) -> Self::Output { +// impl_iterator_inner!(iter) +// } +// } + +// impl Serialize<&mut dyn Iterator> for ZSerde { +// type Output = Payload; + +// fn serialize(self, iter: &mut dyn Iterator) -> Self::Output { +// let codec = Zenoh080::new(); +// let mut buffer: ZBuf = ZBuf::empty(); +// let mut writer = buffer.writer(); +// for t in iter { +// let tpld = ZSerde.serialize(t); +// // SAFETY: we are serializing slices on a ZBuf, so serialization will never +// // fail unless we run out of memory. In that case, Rust memory allocator +// // will panic before the serializer has any chance to fail. +// unsafe { +// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); +// } +// } + +// Payload::new(buffer) +// } +// } + +// impl Serialize<(A, B)> for ZSerde +// where +// ZSerde: Serialize, +// ZSerde: Serialize, +// { +// type Output = Payload; + +// fn serialize(self, t: (A, B)) -> Self::Output { +// let (a, b) = t; + +// let codec = Zenoh080::new(); +// let mut buffer: ZBuf = ZBuf::empty(); +// let mut writer = buffer.writer(); +// let apld = Payload::serialize::(a); +// let bpld = Payload::serialize::(b); + +// // SAFETY: we are serializing slices on a ZBuf, so serialization will never +// // fail unless we run out of memory. In that case, Rust memory allocator +// // will panic before the serializer has any chance to fail. +// unsafe { +// codec.write(&mut writer, &apld.0).unwrap_unchecked(); +// codec.write(&mut writer, &bpld.0).unwrap_unchecked(); +// } + +// Payload::new(buffer) +// } +// } + +// impl<'a, A, B> Deserialize<'a, (A, B)> for ZSerde +// where +// A: TryFrom, +// ZSerde: Deserialize<'a, A>, +// >::Error: Debug, +// B: TryFrom, +// ZSerde: Deserialize<'a, B>, +// >::Error: Debug, +// { +// type Error = ZError; + +// fn deserialize(self, payload: &'a Payload) -> Result<(A, B), Self::Error> { +// let codec = Zenoh080::new(); +// let mut reader = payload.0.reader(); + +// let abuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; +// let apld = Payload::new(abuf); + +// let bbuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; +// let bpld = Payload::new(bbuf); + +// let a = A::try_from(apld).map_err(|e| zerror!("{:?}", e))?; +// let b = B::try_from(bpld).map_err(|e| zerror!("{:?}", e))?; +// Ok((a, b)) +// } +// } + +// impl Serialize<&mut dyn Iterator> for ZSerde +// where +// ZSerde: Serialize, +// { +// type Output = Payload; + +// fn serialize(self, iter: &mut dyn Iterator) -> Self::Output { +// let codec = Zenoh080::new(); +// let mut buffer: ZBuf = ZBuf::empty(); +// let mut writer = buffer.writer(); +// for t in iter { +// let tpld = ZSerde.serialize(t); +// // SAFETY: we are serializing slices on a ZBuf, so serialization will never +// // fail unless we run out of memory. In that case, Rust memory allocator +// // will panic before the serializer has any chance to fail. +// unsafe { +// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); +// } +// } + +// Payload::new(buffer) +// } +// } + +// Iterator +// macro_rules! impl_iterator_serialize { +// ($a:ty) => { +// impl Serialize<&mut dyn Iterator> for ZSerde +// { +// type Output = Payload; + +// fn serialize(self, iter: &mut dyn Iterator) -> Self::Output { +// let codec = Zenoh080::new(); +// let mut buffer: ZBuf = ZBuf::empty(); +// let mut writer = buffer.writer(); +// for t in iter { +// let tpld = ZSerde.serialize(t); +// // SAFETY: we are serializing slices on a ZBuf, so serialization will never +// // fail unless we run out of memory. In that case, Rust memory allocator +// // will panic before the serializer has any chance to fail. +// unsafe { +// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); +// } +// } + +// Payload::new(buffer) +// } +// } +// }; +// } + +// Tuples +// macro_rules! impl_tuple_serialize { +// ($a:ty, $b:ty) => { +// impl Serialize<($a, $b)> for ZSerde +// { +// type Output = Payload; + +// fn serialize(self, t: ($a, $b)) -> Self::Output { +// let (a, b) = t; + +// let codec = Zenoh080::new(); +// let mut buffer: ZBuf = ZBuf::empty(); +// let mut writer = buffer.writer(); +// let apld = Payload::serialize::<$a>(a); +// let bpld = Payload::serialize::<$b>(b); + +// // SAFETY: we are serializing slices on a ZBuf, so serialization will never +// // fail unless we run out of memory. In that case, Rust memory allocator +// // will panic before the serializer has any chance to fail. +// unsafe { +// codec.write(&mut writer, &apld.0).unwrap_unchecked(); +// codec.write(&mut writer, &bpld.0).unwrap_unchecked(); +// } + +// Payload::new(buffer) +// } +// } +// } + +// } + +// macro_rules! impl_tuple_deserialize { +// ($a:ty, $b:ty) => { +// impl<'a> Deserialize<'a, ($a, $b)> for ZSerde { +// type Error = ZError; + +// fn deserialize(self, payload: &'a Payload) -> Result<($a, $b), Self::Error> { +// let codec = Zenoh080::new(); +// let mut reader = payload.0.reader(); + +// let abuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; +// let apld = Payload::new(abuf); + +// let bbuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; +// let bpld = Payload::new(bbuf); + +// let a = apld.deserialize::<$a>().map_err(|e| zerror!("{:?}", e))?; +// let b = bpld.deserialize::<$b>().map_err(|e| zerror!("{:?}", e))?; +// Ok((a, b)) +// } +// } +// }; +// } + +// impl_tuple_serialize!(u8, u8); +// impl_tuple_deserialize!(u8, u8); +// impl_tuple_serialize!(u8, u16); +// impl_tuple_deserialize!(u8, u16); +// impl_tuple_serialize!(u8, u32); +// impl_tuple_deserialize!(u8, u32); +// impl_tuple_serialize!(u8, u64); +// impl_tuple_deserialize!(u8, u64); +// impl_tuple_serialize!(u8, usize); +// impl_tuple_deserialize!(u8, usize); +// impl_tuple_serialize!(u8, i8); +// impl_tuple_deserialize!(u8, i8); +// impl_tuple_serialize!(u8, i16); +// impl_tuple_deserialize!(u8, i16); +// impl_tuple_serialize!(u8, i32); +// impl_tuple_deserialize!(u8, i32); +// impl_tuple_serialize!(u8, isize); +// impl_tuple_deserialize!(u8, isize); +// impl_tuple_serialize!(u8, f32); +// impl_tuple_deserialize!(u8, f32); +// impl_tuple_serialize!(u8, f64); +// impl_tuple_deserialize!(u8, f64); +// impl_tuple_serialize!(u8, bool); +// impl_tuple_deserialize!(u8, bool); +// impl_tuple_serialize!(u8, ZBuf); +// impl_tuple_deserialize!(u8, ZBuf); +// impl_tuple_serialize!(u8, Vec); +// impl_tuple_deserialize!(u8, Vec); +// impl_tuple_serialize!(u8, String); +// impl_tuple_deserialize!(u8, String); +// impl_tuple_serialize!(u8, &[u8]); +// impl_tuple_serialize!(u16, u8); +// impl_tuple_deserialize!(u16, u8); +// impl_tuple_serialize!(u16, u16); +// impl_tuple_deserialize!(u16, u16); +// impl_tuple_serialize!(u16, u32); +// impl_tuple_deserialize!(u16, u32); +// impl_tuple_serialize!(u16, u64); +// impl_tuple_deserialize!(u16, u64); +// impl_tuple_serialize!(u16, usize); +// impl_tuple_deserialize!(u16, usize); +// impl_tuple_serialize!(u16, i8); +// impl_tuple_deserialize!(u16, i8); +// impl_tuple_serialize!(u16, i16); +// impl_tuple_deserialize!(u16, i16); +// impl_tuple_serialize!(u16, i32); +// impl_tuple_deserialize!(u16, i32); +// impl_tuple_serialize!(u16, isize); +// impl_tuple_deserialize!(u16, isize); +// impl_tuple_serialize!(u16, f32); +// impl_tuple_deserialize!(u16, f32); +// impl_tuple_serialize!(u16, f64); +// impl_tuple_deserialize!(u16, f64); +// impl_tuple_serialize!(u16, bool); +// impl_tuple_deserialize!(u16, bool); +// impl_tuple_serialize!(u16, ZBuf); +// impl_tuple_deserialize!(u16, ZBuf); +// impl_tuple_serialize!(u16, Vec); +// impl_tuple_deserialize!(u16, Vec); +// impl_tuple_serialize!(u16, String); +// impl_tuple_deserialize!(u16, String); +// impl_tuple_serialize!(u16, &[u8]); +// impl_tuple_serialize!(u32, u8); +// impl_tuple_deserialize!(u32, u8); +// impl_tuple_serialize!(u32, u16); +// impl_tuple_deserialize!(u32, u16); +// impl_tuple_serialize!(u32, u32); +// impl_tuple_deserialize!(u32, u32); +// impl_tuple_serialize!(u32, u64); +// impl_tuple_deserialize!(u32, u64); +// impl_tuple_serialize!(u32, usize); +// impl_tuple_deserialize!(u32, usize); +// impl_tuple_serialize!(u32, i8); +// impl_tuple_deserialize!(u32, i8); +// impl_tuple_serialize!(u32, i16); +// impl_tuple_deserialize!(u32, i16); +// impl_tuple_serialize!(u32, i32); +// impl_tuple_deserialize!(u32, i32); +// impl_tuple_serialize!(u32, isize); +// impl_tuple_deserialize!(u32, isize); +// impl_tuple_serialize!(u32, f32); +// impl_tuple_deserialize!(u32, f32); +// impl_tuple_serialize!(u32, f64); +// impl_tuple_deserialize!(u32, f64); +// impl_tuple_serialize!(u32, bool); +// impl_tuple_deserialize!(u32, bool); +// impl_tuple_serialize!(u32, ZBuf); +// impl_tuple_deserialize!(u32, ZBuf); +// impl_tuple_serialize!(u32, Vec); +// impl_tuple_deserialize!(u32, Vec); +// impl_tuple_serialize!(u32, String); +// impl_tuple_deserialize!(u32, String); +// impl_tuple_serialize!(u32, &[u8]); +// impl_tuple_serialize!(u64, u8); +// impl_tuple_deserialize!(u64, u8); +// impl_tuple_serialize!(u64, u16); +// impl_tuple_deserialize!(u64, u16); +// impl_tuple_serialize!(u64, u32); +// impl_tuple_deserialize!(u64, u32); +// impl_tuple_serialize!(u64, u64); +// impl_tuple_deserialize!(u64, u64); +// impl_tuple_serialize!(u64, usize); +// impl_tuple_deserialize!(u64, usize); +// impl_tuple_serialize!(u64, i8); +// impl_tuple_deserialize!(u64, i8); +// impl_tuple_serialize!(u64, i16); +// impl_tuple_deserialize!(u64, i16); +// impl_tuple_serialize!(u64, i32); +// impl_tuple_deserialize!(u64, i32); +// impl_tuple_serialize!(u64, isize); +// impl_tuple_deserialize!(u64, isize); +// impl_tuple_serialize!(u64, f32); +// impl_tuple_deserialize!(u64, f32); +// impl_tuple_serialize!(u64, f64); +// impl_tuple_deserialize!(u64, f64); +// impl_tuple_serialize!(u64, bool); +// impl_tuple_deserialize!(u64, bool); +// impl_tuple_serialize!(u64, ZBuf); +// impl_tuple_deserialize!(u64, ZBuf); +// impl_tuple_serialize!(u64, Vec); +// impl_tuple_deserialize!(u64, Vec); +// impl_tuple_serialize!(u64, String); +// impl_tuple_deserialize!(u64, String); +// impl_tuple_serialize!(u64, &[u8]); +// impl_tuple_serialize!(usize, u8); +// impl_tuple_deserialize!(usize, u8); +// impl_tuple_serialize!(usize, u16); +// impl_tuple_deserialize!(usize, u16); +// impl_tuple_serialize!(usize, u32); +// impl_tuple_deserialize!(usize, u32); +// impl_tuple_serialize!(usize, u64); +// impl_tuple_deserialize!(usize, u64); +// impl_tuple_serialize!(usize, usize); +// impl_tuple_deserialize!(usize, usize); +// impl_tuple_serialize!(usize, i8); +// impl_tuple_deserialize!(usize, i8); +// impl_tuple_serialize!(usize, i16); +// impl_tuple_deserialize!(usize, i16); +// impl_tuple_serialize!(usize, i32); +// impl_tuple_deserialize!(usize, i32); +// impl_tuple_serialize!(usize, isize); +// impl_tuple_deserialize!(usize, isize); +// impl_tuple_serialize!(usize, f32); +// impl_tuple_deserialize!(usize, f32); +// impl_tuple_serialize!(usize, f64); +// impl_tuple_deserialize!(usize, f64); +// impl_tuple_serialize!(usize, bool); +// impl_tuple_deserialize!(usize, bool); +// impl_tuple_serialize!(usize, ZBuf); +// impl_tuple_deserialize!(usize, ZBuf); +// impl_tuple_serialize!(usize, Vec); +// impl_tuple_deserialize!(usize, Vec); +// impl_tuple_serialize!(usize, String); +// impl_tuple_deserialize!(usize, String); +// impl_tuple_serialize!(usize, &[u8]); +// impl_tuple_serialize!(i8, u8); +// impl_tuple_deserialize!(i8, u8); +// impl_tuple_serialize!(i8, u16); +// impl_tuple_deserialize!(i8, u16); +// impl_tuple_serialize!(i8, u32); +// impl_tuple_deserialize!(i8, u32); +// impl_tuple_serialize!(i8, u64); +// impl_tuple_deserialize!(i8, u64); +// impl_tuple_serialize!(i8, usize); +// impl_tuple_deserialize!(i8, usize); +// impl_tuple_serialize!(i8, i8); +// impl_tuple_deserialize!(i8, i8); +// impl_tuple_serialize!(i8, i16); +// impl_tuple_deserialize!(i8, i16); +// impl_tuple_serialize!(i8, i32); +// impl_tuple_deserialize!(i8, i32); +// impl_tuple_serialize!(i8, isize); +// impl_tuple_deserialize!(i8, isize); +// impl_tuple_serialize!(i8, f32); +// impl_tuple_deserialize!(i8, f32); +// impl_tuple_serialize!(i8, f64); +// impl_tuple_deserialize!(i8, f64); +// impl_tuple_serialize!(i8, bool); +// impl_tuple_deserialize!(i8, bool); +// impl_tuple_serialize!(i8, ZBuf); +// impl_tuple_deserialize!(i8, ZBuf); +// impl_tuple_serialize!(i8, Vec); +// impl_tuple_deserialize!(i8, Vec); +// impl_tuple_serialize!(i8, String); +// impl_tuple_deserialize!(i8, String); +// impl_tuple_serialize!(i8, &[u8]); +// impl_tuple_serialize!(i16, u8); +// impl_tuple_deserialize!(i16, u8); +// impl_tuple_serialize!(i16, u16); +// impl_tuple_deserialize!(i16, u16); +// impl_tuple_serialize!(i16, u32); +// impl_tuple_deserialize!(i16, u32); +// impl_tuple_serialize!(i16, u64); +// impl_tuple_deserialize!(i16, u64); +// impl_tuple_serialize!(i16, usize); +// impl_tuple_deserialize!(i16, usize); +// impl_tuple_serialize!(i16, i8); +// impl_tuple_deserialize!(i16, i8); +// impl_tuple_serialize!(i16, i16); +// impl_tuple_deserialize!(i16, i16); +// impl_tuple_serialize!(i16, i32); +// impl_tuple_deserialize!(i16, i32); +// impl_tuple_serialize!(i16, isize); +// impl_tuple_deserialize!(i16, isize); +// impl_tuple_serialize!(i16, f32); +// impl_tuple_deserialize!(i16, f32); +// impl_tuple_serialize!(i16, f64); +// impl_tuple_deserialize!(i16, f64); +// impl_tuple_serialize!(i16, bool); +// impl_tuple_deserialize!(i16, bool); +// impl_tuple_serialize!(i16, ZBuf); +// impl_tuple_deserialize!(i16, ZBuf); +// impl_tuple_serialize!(i16, Vec); +// impl_tuple_deserialize!(i16, Vec); +// impl_tuple_serialize!(i16, String); +// impl_tuple_deserialize!(i16, String); +// impl_tuple_serialize!(i16, &[u8]); +// impl_tuple_serialize!(i32, u8); +// impl_tuple_deserialize!(i32, u8); +// impl_tuple_serialize!(i32, u16); +// impl_tuple_deserialize!(i32, u16); +// impl_tuple_serialize!(i32, u32); +// impl_tuple_deserialize!(i32, u32); +// impl_tuple_serialize!(i32, u64); +// impl_tuple_deserialize!(i32, u64); +// impl_tuple_serialize!(i32, usize); +// impl_tuple_deserialize!(i32, usize); +// impl_tuple_serialize!(i32, i8); +// impl_tuple_deserialize!(i32, i8); +// impl_tuple_serialize!(i32, i16); +// impl_tuple_deserialize!(i32, i16); +// impl_tuple_serialize!(i32, i32); +// impl_tuple_deserialize!(i32, i32); +// impl_tuple_serialize!(i32, isize); +// impl_tuple_deserialize!(i32, isize); +// impl_tuple_serialize!(i32, f32); +// impl_tuple_deserialize!(i32, f32); +// impl_tuple_serialize!(i32, f64); +// impl_tuple_deserialize!(i32, f64); +// impl_tuple_serialize!(i32, bool); +// impl_tuple_deserialize!(i32, bool); +// impl_tuple_serialize!(i32, ZBuf); +// impl_tuple_deserialize!(i32, ZBuf); +// impl_tuple_serialize!(i32, Vec); +// impl_tuple_deserialize!(i32, Vec); +// impl_tuple_serialize!(i32, String); +// impl_tuple_deserialize!(i32, String); +// impl_tuple_serialize!(i32, &[u8]); +// impl_tuple_serialize!(isize, u8); +// impl_tuple_deserialize!(isize, u8); +// impl_tuple_serialize!(isize, u16); +// impl_tuple_deserialize!(isize, u16); +// impl_tuple_serialize!(isize, u32); +// impl_tuple_deserialize!(isize, u32); +// impl_tuple_serialize!(isize, u64); +// impl_tuple_deserialize!(isize, u64); +// impl_tuple_serialize!(isize, usize); +// impl_tuple_deserialize!(isize, usize); +// impl_tuple_serialize!(isize, i8); +// impl_tuple_deserialize!(isize, i8); +// impl_tuple_serialize!(isize, i16); +// impl_tuple_deserialize!(isize, i16); +// impl_tuple_serialize!(isize, i32); +// impl_tuple_deserialize!(isize, i32); +// impl_tuple_serialize!(isize, isize); +// impl_tuple_deserialize!(isize, isize); +// impl_tuple_serialize!(isize, f32); +// impl_tuple_deserialize!(isize, f32); +// impl_tuple_serialize!(isize, f64); +// impl_tuple_deserialize!(isize, f64); +// impl_tuple_serialize!(isize, bool); +// impl_tuple_deserialize!(isize, bool); +// impl_tuple_serialize!(isize, ZBuf); +// impl_tuple_deserialize!(isize, ZBuf); +// impl_tuple_serialize!(isize, Vec); +// impl_tuple_deserialize!(isize, Vec); +// impl_tuple_serialize!(isize, String); +// impl_tuple_deserialize!(isize, String); +// impl_tuple_serialize!(isize, &[u8]); +// impl_tuple_serialize!(f32, u8); +// impl_tuple_deserialize!(f32, u8); +// impl_tuple_serialize!(f32, u16); +// impl_tuple_deserialize!(f32, u16); +// impl_tuple_serialize!(f32, u32); +// impl_tuple_deserialize!(f32, u32); +// impl_tuple_serialize!(f32, u64); +// impl_tuple_deserialize!(f32, u64); +// impl_tuple_serialize!(f32, usize); +// impl_tuple_deserialize!(f32, usize); +// impl_tuple_serialize!(f32, i8); +// impl_tuple_deserialize!(f32, i8); +// impl_tuple_serialize!(f32, i16); +// impl_tuple_deserialize!(f32, i16); +// impl_tuple_serialize!(f32, i32); +// impl_tuple_deserialize!(f32, i32); +// impl_tuple_serialize!(f32, isize); +// impl_tuple_deserialize!(f32, isize); +// impl_tuple_serialize!(f32, f32); +// impl_tuple_deserialize!(f32, f32); +// impl_tuple_serialize!(f32, f64); +// impl_tuple_deserialize!(f32, f64); +// impl_tuple_serialize!(f32, bool); +// impl_tuple_deserialize!(f32, bool); +// impl_tuple_serialize!(f32, ZBuf); +// impl_tuple_deserialize!(f32, ZBuf); +// impl_tuple_serialize!(f32, Vec); +// impl_tuple_deserialize!(f32, Vec); +// impl_tuple_serialize!(f32, String); +// impl_tuple_deserialize!(f32, String); +// impl_tuple_serialize!(f32, &[u8]); +// impl_tuple_serialize!(f64, u8); +// impl_tuple_deserialize!(f64, u8); +// impl_tuple_serialize!(f64, u16); +// impl_tuple_deserialize!(f64, u16); +// impl_tuple_serialize!(f64, u32); +// impl_tuple_deserialize!(f64, u32); +// impl_tuple_serialize!(f64, u64); +// impl_tuple_deserialize!(f64, u64); +// impl_tuple_serialize!(f64, usize); +// impl_tuple_deserialize!(f64, usize); +// impl_tuple_serialize!(f64, i8); +// impl_tuple_deserialize!(f64, i8); +// impl_tuple_serialize!(f64, i16); +// impl_tuple_deserialize!(f64, i16); +// impl_tuple_serialize!(f64, i32); +// impl_tuple_deserialize!(f64, i32); +// impl_tuple_serialize!(f64, isize); +// impl_tuple_deserialize!(f64, isize); +// impl_tuple_serialize!(f64, f32); +// impl_tuple_deserialize!(f64, f32); +// impl_tuple_serialize!(f64, f64); +// impl_tuple_deserialize!(f64, f64); +// impl_tuple_serialize!(f64, bool); +// impl_tuple_deserialize!(f64, bool); +// impl_tuple_serialize!(f64, ZBuf); +// impl_tuple_deserialize!(f64, ZBuf); +// impl_tuple_serialize!(f64, Vec); +// impl_tuple_deserialize!(f64, Vec); +// impl_tuple_serialize!(f64, String); +// impl_tuple_deserialize!(f64, String); +// impl_tuple_serialize!(f64, &[u8]); +// impl_tuple_serialize!(bool, u8); +// impl_tuple_deserialize!(bool, u8); +// impl_tuple_serialize!(bool, u16); +// impl_tuple_deserialize!(bool, u16); +// impl_tuple_serialize!(bool, u32); +// impl_tuple_deserialize!(bool, u32); +// impl_tuple_serialize!(bool, u64); +// impl_tuple_deserialize!(bool, u64); +// impl_tuple_serialize!(bool, usize); +// impl_tuple_deserialize!(bool, usize); +// impl_tuple_serialize!(bool, i8); +// impl_tuple_deserialize!(bool, i8); +// impl_tuple_serialize!(bool, i16); +// impl_tuple_deserialize!(bool, i16); +// impl_tuple_serialize!(bool, i32); +// impl_tuple_deserialize!(bool, i32); +// impl_tuple_serialize!(bool, isize); +// impl_tuple_deserialize!(bool, isize); +// impl_tuple_serialize!(bool, f32); +// impl_tuple_deserialize!(bool, f32); +// impl_tuple_serialize!(bool, f64); +// impl_tuple_deserialize!(bool, f64); +// impl_tuple_serialize!(bool, bool); +// impl_tuple_deserialize!(bool, bool); +// impl_tuple_serialize!(bool, ZBuf); +// impl_tuple_deserialize!(bool, ZBuf); +// impl_tuple_serialize!(bool, Vec); +// impl_tuple_deserialize!(bool, Vec); +// impl_tuple_serialize!(bool, String); +// impl_tuple_deserialize!(bool, String); +// impl_tuple_serialize!(bool, &[u8]); +// impl_tuple_serialize!(ZBuf, u8); +// impl_tuple_deserialize!(ZBuf, u8); +// impl_tuple_serialize!(ZBuf, u16); +// impl_tuple_deserialize!(ZBuf, u16); +// impl_tuple_serialize!(ZBuf, u32); +// impl_tuple_deserialize!(ZBuf, u32); +// impl_tuple_serialize!(ZBuf, u64); +// impl_tuple_deserialize!(ZBuf, u64); +// impl_tuple_serialize!(ZBuf, usize); +// impl_tuple_deserialize!(ZBuf, usize); +// impl_tuple_serialize!(ZBuf, i8); +// impl_tuple_deserialize!(ZBuf, i8); +// impl_tuple_serialize!(ZBuf, i16); +// impl_tuple_deserialize!(ZBuf, i16); +// impl_tuple_serialize!(ZBuf, i32); +// impl_tuple_deserialize!(ZBuf, i32); +// impl_tuple_serialize!(ZBuf, isize); +// impl_tuple_deserialize!(ZBuf, isize); +// impl_tuple_serialize!(ZBuf, f32); +// impl_tuple_deserialize!(ZBuf, f32); +// impl_tuple_serialize!(ZBuf, f64); +// impl_tuple_deserialize!(ZBuf, f64); +// impl_tuple_serialize!(ZBuf, bool); +// impl_tuple_deserialize!(ZBuf, bool); +// impl_tuple_serialize!(ZBuf, ZBuf); +// impl_tuple_deserialize!(ZBuf, ZBuf); +// impl_tuple_serialize!(ZBuf, Vec); +// impl_tuple_deserialize!(ZBuf, Vec); +// impl_tuple_serialize!(ZBuf, String); +// impl_tuple_deserialize!(ZBuf, String); +// impl_tuple_serialize!(ZBuf, &[u8]); +// impl_tuple_serialize!(Vec, u8); +// impl_tuple_deserialize!(Vec, u8); +// impl_tuple_serialize!(Vec, u16); +// impl_tuple_deserialize!(Vec, u16); +// impl_tuple_serialize!(Vec, u32); +// impl_tuple_deserialize!(Vec, u32); +// impl_tuple_serialize!(Vec, u64); +// impl_tuple_deserialize!(Vec, u64); +// impl_tuple_serialize!(Vec, usize); +// impl_tuple_deserialize!(Vec, usize); +// impl_tuple_serialize!(Vec, i8); +// impl_tuple_deserialize!(Vec, i8); +// impl_tuple_serialize!(Vec, i16); +// impl_tuple_deserialize!(Vec, i16); +// impl_tuple_serialize!(Vec, i32); +// impl_tuple_deserialize!(Vec, i32); +// impl_tuple_serialize!(Vec, isize); +// impl_tuple_deserialize!(Vec, isize); +// impl_tuple_serialize!(Vec, f32); +// impl_tuple_deserialize!(Vec, f32); +// impl_tuple_serialize!(Vec, f64); +// impl_tuple_deserialize!(Vec, f64); +// impl_tuple_serialize!(Vec, bool); +// impl_tuple_deserialize!(Vec, bool); +// impl_tuple_serialize!(Vec, ZBuf); +// impl_tuple_deserialize!(Vec, ZBuf); +// impl_tuple_serialize!(Vec, Vec); +// impl_tuple_deserialize!(Vec, Vec); +// impl_tuple_serialize!(Vec, String); +// impl_tuple_deserialize!(Vec, String); +// impl_tuple_serialize!(Vec, &[u8]); +// impl_tuple_serialize!(String, u8); +// impl_tuple_deserialize!(String, u8); +// impl_tuple_serialize!(String, u16); +// impl_tuple_deserialize!(String, u16); +// impl_tuple_serialize!(String, u32); +// impl_tuple_deserialize!(String, u32); +// impl_tuple_serialize!(String, u64); +// impl_tuple_deserialize!(String, u64); +// impl_tuple_serialize!(String, usize); +// impl_tuple_deserialize!(String, usize); +// impl_tuple_serialize!(String, i8); +// impl_tuple_deserialize!(String, i8); +// impl_tuple_serialize!(String, i16); +// impl_tuple_deserialize!(String, i16); +// impl_tuple_serialize!(String, i32); +// impl_tuple_deserialize!(String, i32); +// impl_tuple_serialize!(String, isize); +// impl_tuple_deserialize!(String, isize); +// impl_tuple_serialize!(String, f32); +// impl_tuple_deserialize!(String, f32); +// impl_tuple_serialize!(String, f64); +// impl_tuple_deserialize!(String, f64); +// impl_tuple_serialize!(String, bool); +// impl_tuple_deserialize!(String, bool); +// impl_tuple_serialize!(String, ZBuf); +// impl_tuple_deserialize!(String, ZBuf); +// impl_tuple_serialize!(String, Vec); +// impl_tuple_deserialize!(String, Vec); +// impl_tuple_serialize!(String, String); +// impl_tuple_deserialize!(String, String); +// impl_tuple_serialize!(String, &[u8]); +// impl_tuple_serialize!(&[u8], u8); +// impl_tuple_serialize!(&[u8], u16); +// impl_tuple_serialize!(&[u8], u32); +// impl_tuple_serialize!(&[u8], u64); +// impl_tuple_serialize!(&[u8], usize); +// impl_tuple_serialize!(&[u8], i8); +// impl_tuple_serialize!(&[u8], i16); +// impl_tuple_serialize!(&[u8], i32); +// impl_tuple_serialize!(&[u8], isize); +// impl_tuple_serialize!(&[u8], f32); +// impl_tuple_serialize!(&[u8], f64); +// impl_tuple_serialize!(&[u8], bool); +// impl_tuple_serialize!(&[u8], ZBuf); +// impl_tuple_serialize!(&[u8], Vec); +// impl_tuple_serialize!(&[u8], String); +// impl_tuple_serialize!(&[u8], &[u8]); +// impl_iterator_serialize!(u8); +// impl_iterator_serialize!(u16); +// impl_iterator_serialize!(u32); +// impl_iterator_serialize!(u64); +// impl_iterator_serialize!(usize); +// impl_iterator_serialize!(i8); +// impl_iterator_serialize!(i16); +// impl_iterator_serialize!(i32); +// impl_iterator_serialize!(isize); +// impl_iterator_serialize!(f32); +// impl_iterator_serialize!(f64); +// impl_iterator_serialize!(bool); +// impl_iterator_serialize!(ZBuf); +// impl_iterator_serialize!(Vec); +// impl_iterator_serialize!(String); +// impl_iterator_serialize!(&[u8]); diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 58589bfe8f..ae9119ac8a 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -19,6 +19,7 @@ use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; use crate::sample::QoS; +#[cfg(feature = "unstable")] use crate::sample::SourceInfo; use crate::Id; use crate::SessionRef; diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index 9bc6c9c331..0c1c193568 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -57,12 +57,7 @@ use zenoh_collections::SingleOrVec; use zenoh_config::unwrap_or_default; use zenoh_core::{zconfigurable, zread, Resolve, ResolveClosure, ResolveFuture, SyncResolve}; #[cfg(feature = "unstable")] -use zenoh_protocol::network::declare::SubscriberId; -use zenoh_protocol::network::AtomicRequestId; -use zenoh_protocol::network::RequestId; -use zenoh_protocol::zenoh::reply::ReplyBody; -use zenoh_protocol::zenoh::Del; -use zenoh_protocol::zenoh::Put; +use zenoh_protocol::network::{declare::SubscriberId, ext}; use zenoh_protocol::{ core::{ key_expr::{keyexpr, OwnedKeyExpr}, @@ -74,13 +69,13 @@ use zenoh_protocol::{ subscriber::ext::SubscriberInfo, Declare, DeclareBody, DeclareKeyExpr, DeclareMode, DeclareQueryable, DeclareSubscriber, UndeclareQueryable, UndeclareSubscriber, }, - ext, request::{self, ext::TargetType, Request}, - Mapping, Push, Response, ResponseFinal, + AtomicRequestId, Mapping, Push, RequestId, Response, ResponseFinal, }, zenoh::{ query::{self, ext::QueryBodyType, Consolidation}, - PushBody, RequestBody, ResponseBody, + reply::ReplyBody, + Del, PushBody, Put, RequestBody, ResponseBody, }, }; use zenoh_result::ZResult; diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index 60a31a6577..47d41ebb1f 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -202,9 +202,6 @@ pub struct SubscriberBuilder<'a, 'b, Handler> { #[cfg(not(feature = "unstable"))] pub(crate) reliability: Reliability, - #[cfg(not(feature = "unstable"))] - pub(crate) mode: Mode, - #[cfg(feature = "unstable")] pub origin: Locality, #[cfg(not(feature = "unstable"))] From 3dea601356c7fdb08f14c7ce6c94e732db5b1836 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Mon, 8 Apr 2024 18:33:22 +0200 Subject: [PATCH 105/124] Payload iter impl --- zenoh/src/payload.rs | 821 ++++--------------------------------------- 1 file changed, 67 insertions(+), 754 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index db3126d93d..ed8c1b98c3 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -80,10 +80,11 @@ impl Payload { } /// Get a [`PayloadReader`] implementing [`std::io::Read`] trait. - pub fn iter(&self) -> PayloadIterator<'_, T> + pub fn iter<'a, T>(&'a self) -> PayloadIterator<'a, T> where T: TryFrom, - ZSerde: for<'b> Deserialize<'b, T, Error = ZDeserializeError>, + ZSerde: Deserialize<'a, T>, + >::Error: Debug, { PayloadIterator { reader: self.0.reader(), @@ -144,7 +145,8 @@ where impl<'a, T> Iterator for PayloadIterator<'a, T> where - ZSerde: for<'b> Deserialize<'b, T, Error = ZDeserializeError>, + ZSerde: for<'b> Deserialize<'b, T>, + >::Error: Debug, { type Item = T; @@ -164,6 +166,28 @@ where } } +impl FromIterator for Payload +where + ZSerde: Serialize, +{ + fn from_iter>(iter: T) -> Self { + let codec = Zenoh080::new(); + let mut buffer: ZBuf = ZBuf::empty(); + let mut writer = buffer.writer(); + for t in iter { + let tpld = ZSerde.serialize(t); + // SAFETY: we are serializing slices on a ZBuf, so serialization will never + // fail unless we run out of memory. In that case, Rust memory allocator + // will panic before the serializer has any chance to fail. + unsafe { + codec.write(&mut writer, &tpld.0).unwrap_unchecked(); + } + } + + Payload::new(buffer) + } +} + /// The default serializer for Zenoh payload. It supports primitives types, such as: vec, int, uint, float, string, bool. /// It also supports common Rust serde values. #[derive(Clone, Copy, Debug)] @@ -786,6 +810,16 @@ where } } +impl From<(A, B)> for Payload +where + A: Into, + B: Into, +{ + fn from(value: (A, B)) -> Self { + ZSerde.serialize(value) + } +} + impl<'a, A, B> Deserialize<'a, (A, B)> for ZSerde where A: TryFrom, @@ -811,31 +845,19 @@ where } } -// Iterator -// impl Serialize for ZSerde -// where -// I: Iterator, -// T: Into, -// { -// type Output = Payload; - -// fn serialize(self, iter: I) -> Self::Output { -// let codec = Zenoh080::new(); -// let mut buffer: ZBuf = ZBuf::empty(); -// let mut writer = buffer.writer(); -// for t in iter { -// let tpld: Payload = t.into(); -// // SAFETY: we are serializing slices on a ZBuf, so serialization will never -// // fail unless we run out of memory. In that case, Rust memory allocator -// // will panic before the serializer has any chance to fail. -// unsafe { -// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); -// } -// } - -// Payload::new(buffer) -// } -// } +impl TryFrom for (A, B) +where + A: TryFrom, + >::Error: Debug, + B: TryFrom, + >::Error: Debug, +{ + type Error = ZError; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} // For convenience to always convert a Value the examples #[derive(Debug, Clone, PartialEq, Eq)] @@ -977,729 +999,20 @@ mod tests { serialize_deserialize!((String, String), (String::from("a"), String::from("b"))); // Iterator - // let mut hm = Vec::new(); - // hm.push(0); - // hm.push(1); - // Payload::serialize(hm.iter()); - - // let mut hm = HashMap::new(); - // hm.insert(0, 0); - // hm.insert(1, 1); - // Payload::serialize(hm.iter().map(|(k, v)| (k, v))); - // for (k, v) in sample.payload().iter::<(String, serde_json::Value)>() {} - } -} - -// macro_rules! impl_iterator_inner { -// ($iter:expr) => {{ -// let codec = Zenoh080::new(); -// let mut buffer: ZBuf = ZBuf::empty(); -// let mut writer = buffer.writer(); -// for t in $iter { -// let tpld = ZSerde.serialize(t); -// // SAFETY: we are serializing slices on a ZBuf, so serialization will never -// // fail unless we run out of memory. In that case, Rust memory allocator -// // will panic before the serializer has any chance to fail. -// unsafe { -// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); -// } -// } - -// Payload::new(buffer) -// }}; -// } - -// impl<'a> Serialize> for ZSerde { -// type Output = Payload; - -// fn serialize(self, iter: std::slice::Iter<'_, i32>) -> Self::Output { -// impl_iterator_inner!(iter) -// } -// } - -// impl<'a> Serialize> for ZSerde { -// type Output = Payload; - -// fn serialize(self, iter: std::slice::IterMut<'_, i32>) -> Self::Output { -// impl_iterator_inner!(iter) -// } -// } - -// impl Serialize<&mut dyn Iterator> for ZSerde { -// type Output = Payload; - -// fn serialize(self, iter: &mut dyn Iterator) -> Self::Output { -// let codec = Zenoh080::new(); -// let mut buffer: ZBuf = ZBuf::empty(); -// let mut writer = buffer.writer(); -// for t in iter { -// let tpld = ZSerde.serialize(t); -// // SAFETY: we are serializing slices on a ZBuf, so serialization will never -// // fail unless we run out of memory. In that case, Rust memory allocator -// // will panic before the serializer has any chance to fail. -// unsafe { -// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); -// } -// } - -// Payload::new(buffer) -// } -// } - -// impl Serialize<(A, B)> for ZSerde -// where -// ZSerde: Serialize, -// ZSerde: Serialize, -// { -// type Output = Payload; - -// fn serialize(self, t: (A, B)) -> Self::Output { -// let (a, b) = t; - -// let codec = Zenoh080::new(); -// let mut buffer: ZBuf = ZBuf::empty(); -// let mut writer = buffer.writer(); -// let apld = Payload::serialize::(a); -// let bpld = Payload::serialize::(b); - -// // SAFETY: we are serializing slices on a ZBuf, so serialization will never -// // fail unless we run out of memory. In that case, Rust memory allocator -// // will panic before the serializer has any chance to fail. -// unsafe { -// codec.write(&mut writer, &apld.0).unwrap_unchecked(); -// codec.write(&mut writer, &bpld.0).unwrap_unchecked(); -// } - -// Payload::new(buffer) -// } -// } - -// impl<'a, A, B> Deserialize<'a, (A, B)> for ZSerde -// where -// A: TryFrom, -// ZSerde: Deserialize<'a, A>, -// >::Error: Debug, -// B: TryFrom, -// ZSerde: Deserialize<'a, B>, -// >::Error: Debug, -// { -// type Error = ZError; - -// fn deserialize(self, payload: &'a Payload) -> Result<(A, B), Self::Error> { -// let codec = Zenoh080::new(); -// let mut reader = payload.0.reader(); - -// let abuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; -// let apld = Payload::new(abuf); - -// let bbuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; -// let bpld = Payload::new(bbuf); - -// let a = A::try_from(apld).map_err(|e| zerror!("{:?}", e))?; -// let b = B::try_from(bpld).map_err(|e| zerror!("{:?}", e))?; -// Ok((a, b)) -// } -// } - -// impl Serialize<&mut dyn Iterator> for ZSerde -// where -// ZSerde: Serialize, -// { -// type Output = Payload; - -// fn serialize(self, iter: &mut dyn Iterator) -> Self::Output { -// let codec = Zenoh080::new(); -// let mut buffer: ZBuf = ZBuf::empty(); -// let mut writer = buffer.writer(); -// for t in iter { -// let tpld = ZSerde.serialize(t); -// // SAFETY: we are serializing slices on a ZBuf, so serialization will never -// // fail unless we run out of memory. In that case, Rust memory allocator -// // will panic before the serializer has any chance to fail. -// unsafe { -// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); -// } -// } - -// Payload::new(buffer) -// } -// } - -// Iterator -// macro_rules! impl_iterator_serialize { -// ($a:ty) => { -// impl Serialize<&mut dyn Iterator> for ZSerde -// { -// type Output = Payload; - -// fn serialize(self, iter: &mut dyn Iterator) -> Self::Output { -// let codec = Zenoh080::new(); -// let mut buffer: ZBuf = ZBuf::empty(); -// let mut writer = buffer.writer(); -// for t in iter { -// let tpld = ZSerde.serialize(t); -// // SAFETY: we are serializing slices on a ZBuf, so serialization will never -// // fail unless we run out of memory. In that case, Rust memory allocator -// // will panic before the serializer has any chance to fail. -// unsafe { -// codec.write(&mut writer, &tpld.0).unwrap_unchecked(); -// } -// } - -// Payload::new(buffer) -// } -// } -// }; -// } - -// Tuples -// macro_rules! impl_tuple_serialize { -// ($a:ty, $b:ty) => { -// impl Serialize<($a, $b)> for ZSerde -// { -// type Output = Payload; - -// fn serialize(self, t: ($a, $b)) -> Self::Output { -// let (a, b) = t; - -// let codec = Zenoh080::new(); -// let mut buffer: ZBuf = ZBuf::empty(); -// let mut writer = buffer.writer(); -// let apld = Payload::serialize::<$a>(a); -// let bpld = Payload::serialize::<$b>(b); - -// // SAFETY: we are serializing slices on a ZBuf, so serialization will never -// // fail unless we run out of memory. In that case, Rust memory allocator -// // will panic before the serializer has any chance to fail. -// unsafe { -// codec.write(&mut writer, &apld.0).unwrap_unchecked(); -// codec.write(&mut writer, &bpld.0).unwrap_unchecked(); -// } - -// Payload::new(buffer) -// } -// } -// } - -// } - -// macro_rules! impl_tuple_deserialize { -// ($a:ty, $b:ty) => { -// impl<'a> Deserialize<'a, ($a, $b)> for ZSerde { -// type Error = ZError; - -// fn deserialize(self, payload: &'a Payload) -> Result<($a, $b), Self::Error> { -// let codec = Zenoh080::new(); -// let mut reader = payload.0.reader(); - -// let abuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; -// let apld = Payload::new(abuf); - -// let bbuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; -// let bpld = Payload::new(bbuf); - -// let a = apld.deserialize::<$a>().map_err(|e| zerror!("{:?}", e))?; -// let b = bpld.deserialize::<$b>().map_err(|e| zerror!("{:?}", e))?; -// Ok((a, b)) -// } -// } -// }; -// } - -// impl_tuple_serialize!(u8, u8); -// impl_tuple_deserialize!(u8, u8); -// impl_tuple_serialize!(u8, u16); -// impl_tuple_deserialize!(u8, u16); -// impl_tuple_serialize!(u8, u32); -// impl_tuple_deserialize!(u8, u32); -// impl_tuple_serialize!(u8, u64); -// impl_tuple_deserialize!(u8, u64); -// impl_tuple_serialize!(u8, usize); -// impl_tuple_deserialize!(u8, usize); -// impl_tuple_serialize!(u8, i8); -// impl_tuple_deserialize!(u8, i8); -// impl_tuple_serialize!(u8, i16); -// impl_tuple_deserialize!(u8, i16); -// impl_tuple_serialize!(u8, i32); -// impl_tuple_deserialize!(u8, i32); -// impl_tuple_serialize!(u8, isize); -// impl_tuple_deserialize!(u8, isize); -// impl_tuple_serialize!(u8, f32); -// impl_tuple_deserialize!(u8, f32); -// impl_tuple_serialize!(u8, f64); -// impl_tuple_deserialize!(u8, f64); -// impl_tuple_serialize!(u8, bool); -// impl_tuple_deserialize!(u8, bool); -// impl_tuple_serialize!(u8, ZBuf); -// impl_tuple_deserialize!(u8, ZBuf); -// impl_tuple_serialize!(u8, Vec); -// impl_tuple_deserialize!(u8, Vec); -// impl_tuple_serialize!(u8, String); -// impl_tuple_deserialize!(u8, String); -// impl_tuple_serialize!(u8, &[u8]); -// impl_tuple_serialize!(u16, u8); -// impl_tuple_deserialize!(u16, u8); -// impl_tuple_serialize!(u16, u16); -// impl_tuple_deserialize!(u16, u16); -// impl_tuple_serialize!(u16, u32); -// impl_tuple_deserialize!(u16, u32); -// impl_tuple_serialize!(u16, u64); -// impl_tuple_deserialize!(u16, u64); -// impl_tuple_serialize!(u16, usize); -// impl_tuple_deserialize!(u16, usize); -// impl_tuple_serialize!(u16, i8); -// impl_tuple_deserialize!(u16, i8); -// impl_tuple_serialize!(u16, i16); -// impl_tuple_deserialize!(u16, i16); -// impl_tuple_serialize!(u16, i32); -// impl_tuple_deserialize!(u16, i32); -// impl_tuple_serialize!(u16, isize); -// impl_tuple_deserialize!(u16, isize); -// impl_tuple_serialize!(u16, f32); -// impl_tuple_deserialize!(u16, f32); -// impl_tuple_serialize!(u16, f64); -// impl_tuple_deserialize!(u16, f64); -// impl_tuple_serialize!(u16, bool); -// impl_tuple_deserialize!(u16, bool); -// impl_tuple_serialize!(u16, ZBuf); -// impl_tuple_deserialize!(u16, ZBuf); -// impl_tuple_serialize!(u16, Vec); -// impl_tuple_deserialize!(u16, Vec); -// impl_tuple_serialize!(u16, String); -// impl_tuple_deserialize!(u16, String); -// impl_tuple_serialize!(u16, &[u8]); -// impl_tuple_serialize!(u32, u8); -// impl_tuple_deserialize!(u32, u8); -// impl_tuple_serialize!(u32, u16); -// impl_tuple_deserialize!(u32, u16); -// impl_tuple_serialize!(u32, u32); -// impl_tuple_deserialize!(u32, u32); -// impl_tuple_serialize!(u32, u64); -// impl_tuple_deserialize!(u32, u64); -// impl_tuple_serialize!(u32, usize); -// impl_tuple_deserialize!(u32, usize); -// impl_tuple_serialize!(u32, i8); -// impl_tuple_deserialize!(u32, i8); -// impl_tuple_serialize!(u32, i16); -// impl_tuple_deserialize!(u32, i16); -// impl_tuple_serialize!(u32, i32); -// impl_tuple_deserialize!(u32, i32); -// impl_tuple_serialize!(u32, isize); -// impl_tuple_deserialize!(u32, isize); -// impl_tuple_serialize!(u32, f32); -// impl_tuple_deserialize!(u32, f32); -// impl_tuple_serialize!(u32, f64); -// impl_tuple_deserialize!(u32, f64); -// impl_tuple_serialize!(u32, bool); -// impl_tuple_deserialize!(u32, bool); -// impl_tuple_serialize!(u32, ZBuf); -// impl_tuple_deserialize!(u32, ZBuf); -// impl_tuple_serialize!(u32, Vec); -// impl_tuple_deserialize!(u32, Vec); -// impl_tuple_serialize!(u32, String); -// impl_tuple_deserialize!(u32, String); -// impl_tuple_serialize!(u32, &[u8]); -// impl_tuple_serialize!(u64, u8); -// impl_tuple_deserialize!(u64, u8); -// impl_tuple_serialize!(u64, u16); -// impl_tuple_deserialize!(u64, u16); -// impl_tuple_serialize!(u64, u32); -// impl_tuple_deserialize!(u64, u32); -// impl_tuple_serialize!(u64, u64); -// impl_tuple_deserialize!(u64, u64); -// impl_tuple_serialize!(u64, usize); -// impl_tuple_deserialize!(u64, usize); -// impl_tuple_serialize!(u64, i8); -// impl_tuple_deserialize!(u64, i8); -// impl_tuple_serialize!(u64, i16); -// impl_tuple_deserialize!(u64, i16); -// impl_tuple_serialize!(u64, i32); -// impl_tuple_deserialize!(u64, i32); -// impl_tuple_serialize!(u64, isize); -// impl_tuple_deserialize!(u64, isize); -// impl_tuple_serialize!(u64, f32); -// impl_tuple_deserialize!(u64, f32); -// impl_tuple_serialize!(u64, f64); -// impl_tuple_deserialize!(u64, f64); -// impl_tuple_serialize!(u64, bool); -// impl_tuple_deserialize!(u64, bool); -// impl_tuple_serialize!(u64, ZBuf); -// impl_tuple_deserialize!(u64, ZBuf); -// impl_tuple_serialize!(u64, Vec); -// impl_tuple_deserialize!(u64, Vec); -// impl_tuple_serialize!(u64, String); -// impl_tuple_deserialize!(u64, String); -// impl_tuple_serialize!(u64, &[u8]); -// impl_tuple_serialize!(usize, u8); -// impl_tuple_deserialize!(usize, u8); -// impl_tuple_serialize!(usize, u16); -// impl_tuple_deserialize!(usize, u16); -// impl_tuple_serialize!(usize, u32); -// impl_tuple_deserialize!(usize, u32); -// impl_tuple_serialize!(usize, u64); -// impl_tuple_deserialize!(usize, u64); -// impl_tuple_serialize!(usize, usize); -// impl_tuple_deserialize!(usize, usize); -// impl_tuple_serialize!(usize, i8); -// impl_tuple_deserialize!(usize, i8); -// impl_tuple_serialize!(usize, i16); -// impl_tuple_deserialize!(usize, i16); -// impl_tuple_serialize!(usize, i32); -// impl_tuple_deserialize!(usize, i32); -// impl_tuple_serialize!(usize, isize); -// impl_tuple_deserialize!(usize, isize); -// impl_tuple_serialize!(usize, f32); -// impl_tuple_deserialize!(usize, f32); -// impl_tuple_serialize!(usize, f64); -// impl_tuple_deserialize!(usize, f64); -// impl_tuple_serialize!(usize, bool); -// impl_tuple_deserialize!(usize, bool); -// impl_tuple_serialize!(usize, ZBuf); -// impl_tuple_deserialize!(usize, ZBuf); -// impl_tuple_serialize!(usize, Vec); -// impl_tuple_deserialize!(usize, Vec); -// impl_tuple_serialize!(usize, String); -// impl_tuple_deserialize!(usize, String); -// impl_tuple_serialize!(usize, &[u8]); -// impl_tuple_serialize!(i8, u8); -// impl_tuple_deserialize!(i8, u8); -// impl_tuple_serialize!(i8, u16); -// impl_tuple_deserialize!(i8, u16); -// impl_tuple_serialize!(i8, u32); -// impl_tuple_deserialize!(i8, u32); -// impl_tuple_serialize!(i8, u64); -// impl_tuple_deserialize!(i8, u64); -// impl_tuple_serialize!(i8, usize); -// impl_tuple_deserialize!(i8, usize); -// impl_tuple_serialize!(i8, i8); -// impl_tuple_deserialize!(i8, i8); -// impl_tuple_serialize!(i8, i16); -// impl_tuple_deserialize!(i8, i16); -// impl_tuple_serialize!(i8, i32); -// impl_tuple_deserialize!(i8, i32); -// impl_tuple_serialize!(i8, isize); -// impl_tuple_deserialize!(i8, isize); -// impl_tuple_serialize!(i8, f32); -// impl_tuple_deserialize!(i8, f32); -// impl_tuple_serialize!(i8, f64); -// impl_tuple_deserialize!(i8, f64); -// impl_tuple_serialize!(i8, bool); -// impl_tuple_deserialize!(i8, bool); -// impl_tuple_serialize!(i8, ZBuf); -// impl_tuple_deserialize!(i8, ZBuf); -// impl_tuple_serialize!(i8, Vec); -// impl_tuple_deserialize!(i8, Vec); -// impl_tuple_serialize!(i8, String); -// impl_tuple_deserialize!(i8, String); -// impl_tuple_serialize!(i8, &[u8]); -// impl_tuple_serialize!(i16, u8); -// impl_tuple_deserialize!(i16, u8); -// impl_tuple_serialize!(i16, u16); -// impl_tuple_deserialize!(i16, u16); -// impl_tuple_serialize!(i16, u32); -// impl_tuple_deserialize!(i16, u32); -// impl_tuple_serialize!(i16, u64); -// impl_tuple_deserialize!(i16, u64); -// impl_tuple_serialize!(i16, usize); -// impl_tuple_deserialize!(i16, usize); -// impl_tuple_serialize!(i16, i8); -// impl_tuple_deserialize!(i16, i8); -// impl_tuple_serialize!(i16, i16); -// impl_tuple_deserialize!(i16, i16); -// impl_tuple_serialize!(i16, i32); -// impl_tuple_deserialize!(i16, i32); -// impl_tuple_serialize!(i16, isize); -// impl_tuple_deserialize!(i16, isize); -// impl_tuple_serialize!(i16, f32); -// impl_tuple_deserialize!(i16, f32); -// impl_tuple_serialize!(i16, f64); -// impl_tuple_deserialize!(i16, f64); -// impl_tuple_serialize!(i16, bool); -// impl_tuple_deserialize!(i16, bool); -// impl_tuple_serialize!(i16, ZBuf); -// impl_tuple_deserialize!(i16, ZBuf); -// impl_tuple_serialize!(i16, Vec); -// impl_tuple_deserialize!(i16, Vec); -// impl_tuple_serialize!(i16, String); -// impl_tuple_deserialize!(i16, String); -// impl_tuple_serialize!(i16, &[u8]); -// impl_tuple_serialize!(i32, u8); -// impl_tuple_deserialize!(i32, u8); -// impl_tuple_serialize!(i32, u16); -// impl_tuple_deserialize!(i32, u16); -// impl_tuple_serialize!(i32, u32); -// impl_tuple_deserialize!(i32, u32); -// impl_tuple_serialize!(i32, u64); -// impl_tuple_deserialize!(i32, u64); -// impl_tuple_serialize!(i32, usize); -// impl_tuple_deserialize!(i32, usize); -// impl_tuple_serialize!(i32, i8); -// impl_tuple_deserialize!(i32, i8); -// impl_tuple_serialize!(i32, i16); -// impl_tuple_deserialize!(i32, i16); -// impl_tuple_serialize!(i32, i32); -// impl_tuple_deserialize!(i32, i32); -// impl_tuple_serialize!(i32, isize); -// impl_tuple_deserialize!(i32, isize); -// impl_tuple_serialize!(i32, f32); -// impl_tuple_deserialize!(i32, f32); -// impl_tuple_serialize!(i32, f64); -// impl_tuple_deserialize!(i32, f64); -// impl_tuple_serialize!(i32, bool); -// impl_tuple_deserialize!(i32, bool); -// impl_tuple_serialize!(i32, ZBuf); -// impl_tuple_deserialize!(i32, ZBuf); -// impl_tuple_serialize!(i32, Vec); -// impl_tuple_deserialize!(i32, Vec); -// impl_tuple_serialize!(i32, String); -// impl_tuple_deserialize!(i32, String); -// impl_tuple_serialize!(i32, &[u8]); -// impl_tuple_serialize!(isize, u8); -// impl_tuple_deserialize!(isize, u8); -// impl_tuple_serialize!(isize, u16); -// impl_tuple_deserialize!(isize, u16); -// impl_tuple_serialize!(isize, u32); -// impl_tuple_deserialize!(isize, u32); -// impl_tuple_serialize!(isize, u64); -// impl_tuple_deserialize!(isize, u64); -// impl_tuple_serialize!(isize, usize); -// impl_tuple_deserialize!(isize, usize); -// impl_tuple_serialize!(isize, i8); -// impl_tuple_deserialize!(isize, i8); -// impl_tuple_serialize!(isize, i16); -// impl_tuple_deserialize!(isize, i16); -// impl_tuple_serialize!(isize, i32); -// impl_tuple_deserialize!(isize, i32); -// impl_tuple_serialize!(isize, isize); -// impl_tuple_deserialize!(isize, isize); -// impl_tuple_serialize!(isize, f32); -// impl_tuple_deserialize!(isize, f32); -// impl_tuple_serialize!(isize, f64); -// impl_tuple_deserialize!(isize, f64); -// impl_tuple_serialize!(isize, bool); -// impl_tuple_deserialize!(isize, bool); -// impl_tuple_serialize!(isize, ZBuf); -// impl_tuple_deserialize!(isize, ZBuf); -// impl_tuple_serialize!(isize, Vec); -// impl_tuple_deserialize!(isize, Vec); -// impl_tuple_serialize!(isize, String); -// impl_tuple_deserialize!(isize, String); -// impl_tuple_serialize!(isize, &[u8]); -// impl_tuple_serialize!(f32, u8); -// impl_tuple_deserialize!(f32, u8); -// impl_tuple_serialize!(f32, u16); -// impl_tuple_deserialize!(f32, u16); -// impl_tuple_serialize!(f32, u32); -// impl_tuple_deserialize!(f32, u32); -// impl_tuple_serialize!(f32, u64); -// impl_tuple_deserialize!(f32, u64); -// impl_tuple_serialize!(f32, usize); -// impl_tuple_deserialize!(f32, usize); -// impl_tuple_serialize!(f32, i8); -// impl_tuple_deserialize!(f32, i8); -// impl_tuple_serialize!(f32, i16); -// impl_tuple_deserialize!(f32, i16); -// impl_tuple_serialize!(f32, i32); -// impl_tuple_deserialize!(f32, i32); -// impl_tuple_serialize!(f32, isize); -// impl_tuple_deserialize!(f32, isize); -// impl_tuple_serialize!(f32, f32); -// impl_tuple_deserialize!(f32, f32); -// impl_tuple_serialize!(f32, f64); -// impl_tuple_deserialize!(f32, f64); -// impl_tuple_serialize!(f32, bool); -// impl_tuple_deserialize!(f32, bool); -// impl_tuple_serialize!(f32, ZBuf); -// impl_tuple_deserialize!(f32, ZBuf); -// impl_tuple_serialize!(f32, Vec); -// impl_tuple_deserialize!(f32, Vec); -// impl_tuple_serialize!(f32, String); -// impl_tuple_deserialize!(f32, String); -// impl_tuple_serialize!(f32, &[u8]); -// impl_tuple_serialize!(f64, u8); -// impl_tuple_deserialize!(f64, u8); -// impl_tuple_serialize!(f64, u16); -// impl_tuple_deserialize!(f64, u16); -// impl_tuple_serialize!(f64, u32); -// impl_tuple_deserialize!(f64, u32); -// impl_tuple_serialize!(f64, u64); -// impl_tuple_deserialize!(f64, u64); -// impl_tuple_serialize!(f64, usize); -// impl_tuple_deserialize!(f64, usize); -// impl_tuple_serialize!(f64, i8); -// impl_tuple_deserialize!(f64, i8); -// impl_tuple_serialize!(f64, i16); -// impl_tuple_deserialize!(f64, i16); -// impl_tuple_serialize!(f64, i32); -// impl_tuple_deserialize!(f64, i32); -// impl_tuple_serialize!(f64, isize); -// impl_tuple_deserialize!(f64, isize); -// impl_tuple_serialize!(f64, f32); -// impl_tuple_deserialize!(f64, f32); -// impl_tuple_serialize!(f64, f64); -// impl_tuple_deserialize!(f64, f64); -// impl_tuple_serialize!(f64, bool); -// impl_tuple_deserialize!(f64, bool); -// impl_tuple_serialize!(f64, ZBuf); -// impl_tuple_deserialize!(f64, ZBuf); -// impl_tuple_serialize!(f64, Vec); -// impl_tuple_deserialize!(f64, Vec); -// impl_tuple_serialize!(f64, String); -// impl_tuple_deserialize!(f64, String); -// impl_tuple_serialize!(f64, &[u8]); -// impl_tuple_serialize!(bool, u8); -// impl_tuple_deserialize!(bool, u8); -// impl_tuple_serialize!(bool, u16); -// impl_tuple_deserialize!(bool, u16); -// impl_tuple_serialize!(bool, u32); -// impl_tuple_deserialize!(bool, u32); -// impl_tuple_serialize!(bool, u64); -// impl_tuple_deserialize!(bool, u64); -// impl_tuple_serialize!(bool, usize); -// impl_tuple_deserialize!(bool, usize); -// impl_tuple_serialize!(bool, i8); -// impl_tuple_deserialize!(bool, i8); -// impl_tuple_serialize!(bool, i16); -// impl_tuple_deserialize!(bool, i16); -// impl_tuple_serialize!(bool, i32); -// impl_tuple_deserialize!(bool, i32); -// impl_tuple_serialize!(bool, isize); -// impl_tuple_deserialize!(bool, isize); -// impl_tuple_serialize!(bool, f32); -// impl_tuple_deserialize!(bool, f32); -// impl_tuple_serialize!(bool, f64); -// impl_tuple_deserialize!(bool, f64); -// impl_tuple_serialize!(bool, bool); -// impl_tuple_deserialize!(bool, bool); -// impl_tuple_serialize!(bool, ZBuf); -// impl_tuple_deserialize!(bool, ZBuf); -// impl_tuple_serialize!(bool, Vec); -// impl_tuple_deserialize!(bool, Vec); -// impl_tuple_serialize!(bool, String); -// impl_tuple_deserialize!(bool, String); -// impl_tuple_serialize!(bool, &[u8]); -// impl_tuple_serialize!(ZBuf, u8); -// impl_tuple_deserialize!(ZBuf, u8); -// impl_tuple_serialize!(ZBuf, u16); -// impl_tuple_deserialize!(ZBuf, u16); -// impl_tuple_serialize!(ZBuf, u32); -// impl_tuple_deserialize!(ZBuf, u32); -// impl_tuple_serialize!(ZBuf, u64); -// impl_tuple_deserialize!(ZBuf, u64); -// impl_tuple_serialize!(ZBuf, usize); -// impl_tuple_deserialize!(ZBuf, usize); -// impl_tuple_serialize!(ZBuf, i8); -// impl_tuple_deserialize!(ZBuf, i8); -// impl_tuple_serialize!(ZBuf, i16); -// impl_tuple_deserialize!(ZBuf, i16); -// impl_tuple_serialize!(ZBuf, i32); -// impl_tuple_deserialize!(ZBuf, i32); -// impl_tuple_serialize!(ZBuf, isize); -// impl_tuple_deserialize!(ZBuf, isize); -// impl_tuple_serialize!(ZBuf, f32); -// impl_tuple_deserialize!(ZBuf, f32); -// impl_tuple_serialize!(ZBuf, f64); -// impl_tuple_deserialize!(ZBuf, f64); -// impl_tuple_serialize!(ZBuf, bool); -// impl_tuple_deserialize!(ZBuf, bool); -// impl_tuple_serialize!(ZBuf, ZBuf); -// impl_tuple_deserialize!(ZBuf, ZBuf); -// impl_tuple_serialize!(ZBuf, Vec); -// impl_tuple_deserialize!(ZBuf, Vec); -// impl_tuple_serialize!(ZBuf, String); -// impl_tuple_deserialize!(ZBuf, String); -// impl_tuple_serialize!(ZBuf, &[u8]); -// impl_tuple_serialize!(Vec, u8); -// impl_tuple_deserialize!(Vec, u8); -// impl_tuple_serialize!(Vec, u16); -// impl_tuple_deserialize!(Vec, u16); -// impl_tuple_serialize!(Vec, u32); -// impl_tuple_deserialize!(Vec, u32); -// impl_tuple_serialize!(Vec, u64); -// impl_tuple_deserialize!(Vec, u64); -// impl_tuple_serialize!(Vec, usize); -// impl_tuple_deserialize!(Vec, usize); -// impl_tuple_serialize!(Vec, i8); -// impl_tuple_deserialize!(Vec, i8); -// impl_tuple_serialize!(Vec, i16); -// impl_tuple_deserialize!(Vec, i16); -// impl_tuple_serialize!(Vec, i32); -// impl_tuple_deserialize!(Vec, i32); -// impl_tuple_serialize!(Vec, isize); -// impl_tuple_deserialize!(Vec, isize); -// impl_tuple_serialize!(Vec, f32); -// impl_tuple_deserialize!(Vec, f32); -// impl_tuple_serialize!(Vec, f64); -// impl_tuple_deserialize!(Vec, f64); -// impl_tuple_serialize!(Vec, bool); -// impl_tuple_deserialize!(Vec, bool); -// impl_tuple_serialize!(Vec, ZBuf); -// impl_tuple_deserialize!(Vec, ZBuf); -// impl_tuple_serialize!(Vec, Vec); -// impl_tuple_deserialize!(Vec, Vec); -// impl_tuple_serialize!(Vec, String); -// impl_tuple_deserialize!(Vec, String); -// impl_tuple_serialize!(Vec, &[u8]); -// impl_tuple_serialize!(String, u8); -// impl_tuple_deserialize!(String, u8); -// impl_tuple_serialize!(String, u16); -// impl_tuple_deserialize!(String, u16); -// impl_tuple_serialize!(String, u32); -// impl_tuple_deserialize!(String, u32); -// impl_tuple_serialize!(String, u64); -// impl_tuple_deserialize!(String, u64); -// impl_tuple_serialize!(String, usize); -// impl_tuple_deserialize!(String, usize); -// impl_tuple_serialize!(String, i8); -// impl_tuple_deserialize!(String, i8); -// impl_tuple_serialize!(String, i16); -// impl_tuple_deserialize!(String, i16); -// impl_tuple_serialize!(String, i32); -// impl_tuple_deserialize!(String, i32); -// impl_tuple_serialize!(String, isize); -// impl_tuple_deserialize!(String, isize); -// impl_tuple_serialize!(String, f32); -// impl_tuple_deserialize!(String, f32); -// impl_tuple_serialize!(String, f64); -// impl_tuple_deserialize!(String, f64); -// impl_tuple_serialize!(String, bool); -// impl_tuple_deserialize!(String, bool); -// impl_tuple_serialize!(String, ZBuf); -// impl_tuple_deserialize!(String, ZBuf); -// impl_tuple_serialize!(String, Vec); -// impl_tuple_deserialize!(String, Vec); -// impl_tuple_serialize!(String, String); -// impl_tuple_deserialize!(String, String); -// impl_tuple_serialize!(String, &[u8]); -// impl_tuple_serialize!(&[u8], u8); -// impl_tuple_serialize!(&[u8], u16); -// impl_tuple_serialize!(&[u8], u32); -// impl_tuple_serialize!(&[u8], u64); -// impl_tuple_serialize!(&[u8], usize); -// impl_tuple_serialize!(&[u8], i8); -// impl_tuple_serialize!(&[u8], i16); -// impl_tuple_serialize!(&[u8], i32); -// impl_tuple_serialize!(&[u8], isize); -// impl_tuple_serialize!(&[u8], f32); -// impl_tuple_serialize!(&[u8], f64); -// impl_tuple_serialize!(&[u8], bool); -// impl_tuple_serialize!(&[u8], ZBuf); -// impl_tuple_serialize!(&[u8], Vec); -// impl_tuple_serialize!(&[u8], String); -// impl_tuple_serialize!(&[u8], &[u8]); -// impl_iterator_serialize!(u8); -// impl_iterator_serialize!(u16); -// impl_iterator_serialize!(u32); -// impl_iterator_serialize!(u64); -// impl_iterator_serialize!(usize); -// impl_iterator_serialize!(i8); -// impl_iterator_serialize!(i16); -// impl_iterator_serialize!(i32); -// impl_iterator_serialize!(isize); -// impl_iterator_serialize!(f32); -// impl_iterator_serialize!(f64); -// impl_iterator_serialize!(bool); -// impl_iterator_serialize!(ZBuf); -// impl_iterator_serialize!(Vec); -// impl_iterator_serialize!(String); -// impl_iterator_serialize!(&[u8]); + let v: [usize; 5] = [0, 1, 2, 3, 4]; + let p = Payload::from_iter(v.iter()); + for (i, t) in p.iter::().enumerate() { + assert_eq!(i, t); + } + + use std::collections::HashMap; + let mut hm: HashMap = HashMap::new(); + hm.insert(0, 0); + hm.insert(1, 1); + let p = Payload::from_iter(hm.iter()); + // for (i, (k, v)) in p.iter::<(usize, usize)>().enumerate() { + // assert_eq!(i, k); + // assert_eq!(i, v); + // } + } +} From a25676b4c468c408c31f74d2a896be315a1d7f1a Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Mon, 8 Apr 2024 20:08:36 +0200 Subject: [PATCH 106/124] Improve payload serde --- zenoh/src/payload.rs | 272 ++++++++++++++++++++++++++++++++----------- 1 file changed, 202 insertions(+), 70 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index ed8c1b98c3..3c4709a6ae 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -211,6 +211,20 @@ impl From for Payload { } } +impl Serialize<&ZBuf> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &ZBuf) -> Self::Output { + Payload::new(t.clone()) + } +} + +impl From<&ZBuf> for Payload { + fn from(t: &ZBuf) -> Self { + ZSerde.serialize(t) + } +} + impl Deserialize<'_, ZBuf> for ZSerde { type Error = Infallible; @@ -246,6 +260,20 @@ impl From> for Payload { } } +impl Serialize<&Vec> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &Vec) -> Self::Output { + Payload::new(t.clone()) + } +} + +impl From<&Vec> for Payload { + fn from(t: &Vec) -> Self { + ZSerde.serialize(t) + } +} + impl Deserialize<'_, Vec> for ZSerde { type Error = Infallible; @@ -296,6 +324,20 @@ impl From> for Payload { } } +impl<'a> Serialize<&Cow<'a, [u8]>> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &Cow<'a, [u8]>) -> Self::Output { + Payload::new(t.to_vec()) + } +} + +impl From<&Cow<'_, [u8]>> for Payload { + fn from(t: &Cow<'_, [u8]>) -> Self { + ZSerde.serialize(t) + } +} + impl<'a> Deserialize<'a, Cow<'a, [u8]>> for ZSerde { type Error = Infallible; @@ -325,6 +367,20 @@ impl From for Payload { } } +impl Serialize<&String> for ZSerde { + type Output = Payload; + + fn serialize(self, s: &String) -> Self::Output { + Payload::new(s.clone().into_bytes()) + } +} + +impl From<&String> for Payload { + fn from(t: &String) -> Self { + ZSerde.serialize(t) + } +} + impl Deserialize<'_, String> for ZSerde { type Error = FromUtf8Error; @@ -380,6 +436,20 @@ impl From> for Payload { } } +impl<'a> Serialize<&Cow<'a, str>> for ZSerde { + type Output = Payload; + + fn serialize(self, s: &Cow<'a, str>) -> Self::Output { + Self.serialize(s.to_string()) + } +} + +impl From<&Cow<'_, str>> for Payload { + fn from(t: &Cow<'_, str>) -> Self { + ZSerde.serialize(t) + } +} + impl<'a> Deserialize<'a, Cow<'a, str>> for ZSerde { type Error = FromUtf8Error; @@ -437,20 +507,6 @@ macro_rules! impl_int { } } - impl Serialize<&mut $t> for ZSerde { - type Output = Payload; - - fn serialize(self, t: &mut $t) -> Self::Output { - ZSerde.serialize(*t) - } - } - - impl From<&mut $t> for Payload { - fn from(t: &mut $t) -> Self { - ZSerde.serialize(t) - } - } - impl<'a> Deserialize<'a, $t> for ZSerde { type Error = ZDeserializeError; @@ -522,6 +578,20 @@ impl From for Payload { } } +impl Serialize<&bool> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &bool) -> Self::Output { + ZSerde.serialize(*t) + } +} + +impl From<&bool> for Payload { + fn from(t: &bool) -> Self { + ZSerde.serialize(t) + } +} + impl Deserialize<'_, bool> for ZSerde { type Error = ZDeserializeError; @@ -535,6 +605,14 @@ impl Deserialize<'_, bool> for ZSerde { } } +impl TryFrom for bool { + type Error = ZDeserializeError; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} + impl TryFrom<&Payload> for bool { type Error = ZDeserializeError; @@ -545,36 +623,36 @@ impl TryFrom<&Payload> for bool { // - Zenoh advanced types encoders/decoders // JSON -impl Serialize<&serde_json::Value> for ZSerde { +impl Serialize for ZSerde { type Output = Result; - fn serialize(self, t: &serde_json::Value) -> Self::Output { - let mut payload = Payload::empty(); - serde_json::to_writer(payload.0.writer(), t)?; - Ok(payload) + fn serialize(self, t: serde_json::Value) -> Self::Output { + ZSerde.serialize(&t) } } -impl TryFrom<&serde_json::Value> for Payload { +impl TryFrom for Payload { type Error = serde_json::Error; - fn try_from(value: &serde_json::Value) -> Result { - ZSerde.serialize(value) + fn try_from(value: serde_json::Value) -> Result { + ZSerde.serialize(&value) } } -impl Serialize for ZSerde { +impl Serialize<&serde_json::Value> for ZSerde { type Output = Result; - fn serialize(self, t: serde_json::Value) -> Self::Output { - Self.serialize(&t) + fn serialize(self, t: &serde_json::Value) -> Self::Output { + let mut payload = Payload::empty(); + serde_json::to_writer(payload.0.writer(), t)?; + Ok(payload) } } -impl TryFrom for Payload { +impl TryFrom<&serde_json::Value> for Payload { type Error = serde_json::Error; - fn try_from(value: serde_json::Value) -> Result { + fn try_from(value: &serde_json::Value) -> Result { ZSerde.serialize(value) } } @@ -587,6 +665,14 @@ impl Deserialize<'_, serde_json::Value> for ZSerde { } } +impl TryFrom for serde_json::Value { + type Error = serde_json::Error; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} + impl TryFrom<&Payload> for serde_json::Value { type Error = serde_json::Error; @@ -596,36 +682,36 @@ impl TryFrom<&Payload> for serde_json::Value { } // Yaml -impl Serialize<&serde_yaml::Value> for ZSerde { +impl Serialize for ZSerde { type Output = Result; - fn serialize(self, t: &serde_yaml::Value) -> Self::Output { - let mut payload = Payload::empty(); - serde_yaml::to_writer(payload.0.writer(), t)?; - Ok(payload) + fn serialize(self, t: serde_yaml::Value) -> Self::Output { + Self.serialize(&t) } } -impl TryFrom<&serde_yaml::Value> for Payload { +impl TryFrom for Payload { type Error = serde_yaml::Error; - fn try_from(value: &serde_yaml::Value) -> Result { + fn try_from(value: serde_yaml::Value) -> Result { ZSerde.serialize(value) } } -impl Serialize for ZSerde { +impl Serialize<&serde_yaml::Value> for ZSerde { type Output = Result; - fn serialize(self, t: serde_yaml::Value) -> Self::Output { - Self.serialize(&t) + fn serialize(self, t: &serde_yaml::Value) -> Self::Output { + let mut payload = Payload::empty(); + serde_yaml::to_writer(payload.0.writer(), t)?; + Ok(payload) } } -impl TryFrom for Payload { +impl TryFrom<&serde_yaml::Value> for Payload { type Error = serde_yaml::Error; - fn try_from(value: serde_yaml::Value) -> Result { + fn try_from(value: &serde_yaml::Value) -> Result { ZSerde.serialize(value) } } @@ -638,6 +724,14 @@ impl Deserialize<'_, serde_yaml::Value> for ZSerde { } } +impl TryFrom for serde_yaml::Value { + type Error = serde_yaml::Error; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} + impl TryFrom<&Payload> for serde_yaml::Value { type Error = serde_yaml::Error; @@ -647,36 +741,36 @@ impl TryFrom<&Payload> for serde_yaml::Value { } // CBOR -impl Serialize<&serde_cbor::Value> for ZSerde { +impl Serialize for ZSerde { type Output = Result; - fn serialize(self, t: &serde_cbor::Value) -> Self::Output { - let mut payload = Payload::empty(); - serde_cbor::to_writer(payload.0.writer(), t)?; - Ok(payload) + fn serialize(self, t: serde_cbor::Value) -> Self::Output { + Self.serialize(&t) } } -impl TryFrom<&serde_cbor::Value> for Payload { +impl TryFrom for Payload { type Error = serde_cbor::Error; - fn try_from(value: &serde_cbor::Value) -> Result { + fn try_from(value: serde_cbor::Value) -> Result { ZSerde.serialize(value) } } -impl Serialize for ZSerde { +impl Serialize<&serde_cbor::Value> for ZSerde { type Output = Result; - fn serialize(self, t: serde_cbor::Value) -> Self::Output { - Self.serialize(&t) + fn serialize(self, t: &serde_cbor::Value) -> Self::Output { + let mut payload = Payload::empty(); + serde_cbor::to_writer(payload.0.writer(), t)?; + Ok(payload) } } -impl TryFrom for Payload { +impl TryFrom<&serde_cbor::Value> for Payload { type Error = serde_cbor::Error; - fn try_from(value: serde_cbor::Value) -> Result { + fn try_from(value: &serde_cbor::Value) -> Result { ZSerde.serialize(value) } } @@ -689,6 +783,14 @@ impl Deserialize<'_, serde_cbor::Value> for ZSerde { } } +impl TryFrom for serde_cbor::Value { + type Error = serde_cbor::Error; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} + impl TryFrom<&Payload> for serde_cbor::Value { type Error = serde_cbor::Error; @@ -698,6 +800,22 @@ impl TryFrom<&Payload> for serde_cbor::Value { } // Pickle +impl Serialize for ZSerde { + type Output = Result; + + fn serialize(self, t: serde_pickle::Value) -> Self::Output { + Self.serialize(&t) + } +} + +impl TryFrom for Payload { + type Error = serde_pickle::Error; + + fn try_from(value: serde_pickle::Value) -> Result { + ZSerde.serialize(value) + } +} + impl Serialize<&serde_pickle::Value> for ZSerde { type Output = Result; @@ -720,27 +838,19 @@ impl TryFrom<&serde_pickle::Value> for Payload { } } -impl Serialize for ZSerde { - type Output = Result; - - fn serialize(self, t: serde_pickle::Value) -> Self::Output { - Self.serialize(&t) - } -} - -impl TryFrom for Payload { +impl Deserialize<'_, serde_pickle::Value> for ZSerde { type Error = serde_pickle::Error; - fn try_from(value: serde_pickle::Value) -> Result { - ZSerde.serialize(value) + fn deserialize(self, v: &Payload) -> Result { + serde_pickle::value_from_reader(v.reader(), serde_pickle::DeOptions::default()) } } -impl Deserialize<'_, serde_pickle::Value> for ZSerde { +impl TryFrom for serde_pickle::Value { type Error = serde_pickle::Error; - fn deserialize(self, v: &Payload) -> Result { - serde_pickle::value_from_reader(v.reader(), serde_pickle::DeOptions::default()) + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) } } @@ -761,6 +871,12 @@ impl Serialize> for ZSerde { Payload::new(t) } } +#[cfg(feature = "shared-memory")] +impl From> for Payload { + fn from(t: Arc) -> Self { + ZSerde.serialize(t) + } +} #[cfg(feature = "shared-memory")] impl Serialize> for ZSerde { @@ -772,6 +888,13 @@ impl Serialize> for ZSerde { } } +#[cfg(feature = "shared-memory")] +impl From> for Payload { + fn from(t: Box) -> Self { + ZSerde.serialize(t) + } +} + #[cfg(feature = "shared-memory")] impl Serialize for ZSerde { type Output = Payload; @@ -781,6 +904,13 @@ impl Serialize for ZSerde { } } +#[cfg(feature = "shared-memory")] +impl From for Payload { + fn from(t: SharedMemoryBuf) -> Self { + ZSerde.serialize(t) + } +} + // Tuple impl Serialize<(A, B)> for ZSerde where @@ -859,7 +989,7 @@ where } } -// For convenience to always convert a Value the examples +// For convenience to always convert a Value in the examples #[derive(Debug, Clone, PartialEq, Eq)] pub enum StringOrBase64 { String(String), @@ -1000,7 +1130,9 @@ mod tests { // Iterator let v: [usize; 5] = [0, 1, 2, 3, 4]; + println!("Serialize:\t{:?}", v); let p = Payload::from_iter(v.iter()); + println!("Deerialize:\t{:?}", p); for (i, t) in p.iter::().enumerate() { assert_eq!(i, t); } @@ -1009,10 +1141,10 @@ mod tests { let mut hm: HashMap = HashMap::new(); hm.insert(0, 0); hm.insert(1, 1); + println!("Serialize:\t{:?}", hm); let p = Payload::from_iter(hm.iter()); - // for (i, (k, v)) in p.iter::<(usize, usize)>().enumerate() { - // assert_eq!(i, k); - // assert_eq!(i, v); - // } + println!("Deerialize:\t{:?}", p); + let o: HashMap = HashMap::from_iter(p.iter()); + assert_eq!(hm, o); } } From d0246076a3260e40a0df4fc0d0c2357126a37793 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 09:38:42 +0200 Subject: [PATCH 107/124] [u8;N] payload support. from_reader functionality. --- zenoh/src/payload.rs | 88 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 85 insertions(+), 3 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index 3c4709a6ae..a65843dcaf 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -14,6 +14,7 @@ //! Payload primitives. use crate::buffers::ZBuf; +use std::io::Read; use std::marker::PhantomData; use std::{ borrow::Cow, convert::Infallible, fmt::Debug, ops::Deref, string::FromUtf8Error, sync::Arc, @@ -79,6 +80,16 @@ impl Payload { PayloadReader(self.0.reader()) } + /// Build a [`Payload`] from a [`Reader`]. This operation copies data from the reader. + pub fn from_reader(mut reader: R) -> Result + where + R: std::io::Read, + { + let mut buf: Vec = vec![]; + reader.read_to_end(&mut buf)?; + Ok(Payload::new(buf)) + } + /// Get a [`PayloadReader`] implementing [`std::io::Read`] trait. pub fn iter<'a, T>(&'a self) -> PayloadIterator<'a, T> where @@ -91,10 +102,7 @@ impl Payload { _t: PhantomData::, } } -} -/// Provide some facilities specific to the Rust API to encode/decode a [`Value`] with an `Serialize`. -impl Payload { /// Encode an object of type `T` as a [`Value`] using the [`ZSerde`]. /// /// ```rust @@ -125,6 +133,8 @@ impl Payload { } /// A reader that implements [`std::io::Read`] trait to read from a [`Payload`]. +#[repr(transparent)] +#[derive(Debug)] pub struct PayloadReader<'a>(ZBufReader<'a>); impl std::io::Read for PayloadReader<'_> { @@ -135,6 +145,8 @@ impl std::io::Read for PayloadReader<'_> { /// An iterator that implements [`std::iter::Iterator`] trait to iterate on values `T` in a [`Payload`]. /// Note that [`Payload`] contains a serialized version of `T` and iterating over a [`Payload`] performs lazy deserialization. +#[repr(transparent)] +#[derive(Debug)] pub struct PayloadIterator<'a, T> where ZSerde: Deserialize<'a, T>, @@ -245,6 +257,65 @@ impl From<&Payload> for ZBuf { } } +// [u8; N] +impl Serialize<[u8; N]> for ZSerde { + type Output = Payload; + + fn serialize(self, t: [u8; N]) -> Self::Output { + Payload::new(t) + } +} + +impl From<[u8; N]> for Payload { + fn from(t: [u8; N]) -> Self { + ZSerde.serialize(t) + } +} + +impl Serialize<&[u8; N]> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &[u8; N]) -> Self::Output { + Payload::new(*t) + } +} + +impl From<&[u8; N]> for Payload { + fn from(t: &[u8; N]) -> Self { + ZSerde.serialize(t) + } +} + +impl Deserialize<'_, [u8; N]> for ZSerde { + type Error = ZDeserializeError; + + fn deserialize(self, v: &Payload) -> Result<[u8; N], Self::Error> { + if v.0.len() != N { + return Err(ZDeserializeError); + } + let mut dst = [0u8; N]; + let mut reader = v.reader(); + reader.read_exact(&mut dst).map_err(|_| ZDeserializeError)?; + Ok(dst) + } +} + +impl TryFrom for [u8; N] { + type Error = ZDeserializeError; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} + +impl TryFrom<&Payload> for [u8; N] { + type Error = ZDeserializeError; + + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) + } +} + // Vec impl Serialize> for ZSerde { type Output = Payload; @@ -1137,6 +1208,17 @@ mod tests { assert_eq!(i, t); } + let mut v = vec![[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]]; + println!("Serialize:\t{:?}", v); + let p = Payload::from_iter(v.drain(..)); + println!("Deerialize:\t{:?}", p); + let mut iter = p.iter::<[u8; 4]>(); + assert_eq!(iter.next().unwrap(), [0, 1, 2, 3]); + assert_eq!(iter.next().unwrap(), [4, 5, 6, 7]); + assert_eq!(iter.next().unwrap(), [8, 9, 10, 11]); + assert_eq!(iter.next().unwrap(), [12, 13, 14, 15]); + assert!(iter.next().is_none()); + use std::collections::HashMap; let mut hm: HashMap = HashMap::new(); hm.insert(0, 0); From 2a6bade7cc2d932cee30c18f97848c74511097cd Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 09:45:05 +0200 Subject: [PATCH 108/124] Improve payload test --- zenoh/src/payload.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index a65843dcaf..4899dd97e6 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -1224,9 +1224,9 @@ mod tests { hm.insert(0, 0); hm.insert(1, 1); println!("Serialize:\t{:?}", hm); - let p = Payload::from_iter(hm.iter()); + let p = Payload::from_iter(hm.drain()); println!("Deerialize:\t{:?}", p); - let o: HashMap = HashMap::from_iter(p.iter()); + let o = HashMap::from_iter(p.iter::<(usize, usize)>()); assert_eq!(hm, o); } } From 6793a6b8741fc055633c28e568c2fc8237abbeea Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 12:20:09 +0200 Subject: [PATCH 109/124] Payload zserde improvement --- zenoh/src/payload.rs | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index 4899dd97e6..59ad8b79b5 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -540,7 +540,7 @@ impl<'a> TryFrom<&'a Payload> for Cow<'a, str> { // - Integers impl macro_rules! impl_int { - ($t:ty, $encoding:expr) => { + ($t:ty) => { impl Serialize<$t> for ZSerde { type Output = Payload; @@ -615,22 +615,22 @@ macro_rules! impl_int { } // Zenoh unsigned integers -impl_int!(u8, ZSerde::ZENOH_UINT); -impl_int!(u16, ZSerde::ZENOH_UINT); -impl_int!(u32, ZSerde::ZENOH_UINT); -impl_int!(u64, ZSerde::ZENOH_UINT); -impl_int!(usize, ZSerde::ZENOH_UINT); +impl_int!(u8); +impl_int!(u16); +impl_int!(u32); +impl_int!(u64); +impl_int!(usize); // Zenoh signed integers -impl_int!(i8, ZSerde::ZENOH_INT); -impl_int!(i16, ZSerde::ZENOH_INT); -impl_int!(i32, ZSerde::ZENOH_INT); -impl_int!(i64, ZSerde::ZENOH_INT); -impl_int!(isize, ZSerde::ZENOH_INT); +impl_int!(i8); +impl_int!(i16); +impl_int!(i32); +impl_int!(i64); +impl_int!(isize); // Zenoh floats -impl_int!(f32, ZSerde::ZENOH_FLOAT); -impl_int!(f64, ZSerde::ZENOH_FLOAT); +impl_int!(f32); +impl_int!(f64); // Zenoh bool impl Serialize for ZSerde { @@ -1203,7 +1203,7 @@ mod tests { let v: [usize; 5] = [0, 1, 2, 3, 4]; println!("Serialize:\t{:?}", v); let p = Payload::from_iter(v.iter()); - println!("Deerialize:\t{:?}", p); + println!("Deserialize:\t{:?}", p); for (i, t) in p.iter::().enumerate() { assert_eq!(i, t); } @@ -1211,7 +1211,7 @@ mod tests { let mut v = vec![[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]]; println!("Serialize:\t{:?}", v); let p = Payload::from_iter(v.drain(..)); - println!("Deerialize:\t{:?}", p); + println!("Deserialize:\t{:?}", p); let mut iter = p.iter::<[u8; 4]>(); assert_eq!(iter.next().unwrap(), [0, 1, 2, 3]); assert_eq!(iter.next().unwrap(), [4, 5, 6, 7]); @@ -1225,7 +1225,7 @@ mod tests { hm.insert(1, 1); println!("Serialize:\t{:?}", hm); let p = Payload::from_iter(hm.drain()); - println!("Deerialize:\t{:?}", p); + println!("Deserialize:\t{:?}", p); let o = HashMap::from_iter(p.iter::<(usize, usize)>()); assert_eq!(hm, o); } From 7240f0169556a66fb4abca47dcfbcce736a01e53 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 12:22:08 +0200 Subject: [PATCH 110/124] Fix encoding doc: suffix to schema --- commons/zenoh-codec/src/core/encoding.rs | 4 ++-- commons/zenoh-protocol/src/core/encoding.rs | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/commons/zenoh-codec/src/core/encoding.rs b/commons/zenoh-codec/src/core/encoding.rs index cfbe0084ba..c8033cdd5f 100644 --- a/commons/zenoh-codec/src/core/encoding.rs +++ b/commons/zenoh-codec/src/core/encoding.rs @@ -62,13 +62,13 @@ where fn read(self, reader: &mut R) -> Result { let zodec = Zenoh080Bounded::::new(); let id: u32 = zodec.read(&mut *reader)?; - let (id, has_suffix) = ( + let (id, has_schema) = ( (id >> 1) as EncodingId, imsg::has_flag(id as u8, flag::S as u8), ); let mut schema = None; - if has_suffix { + if has_schema { let zodec = Zenoh080Bounded::::new(); schema = Some(zodec.read(&mut *reader)?); } diff --git a/commons/zenoh-protocol/src/core/encoding.rs b/commons/zenoh-protocol/src/core/encoding.rs index 9b9aa5bf2f..70afdbf143 100644 --- a/commons/zenoh-protocol/src/core/encoding.rs +++ b/commons/zenoh-protocol/src/core/encoding.rs @@ -18,8 +18,8 @@ pub type EncodingId = u16; /// [`Encoding`] is a metadata that indicates how the data payload should be interpreted. /// For wire-efficiency and extensibility purposes, Zenoh defines an [`Encoding`] as -/// composed of an unsigned integer prefix and a string suffix. The actual meaning of the -/// prefix and suffix are out-of-scope of the protocol definition. Therefore, Zenoh does not +/// composed of an unsigned integer prefix and a bytes schema. The actual meaning of the +/// prefix and schema are out-of-scope of the protocol definition. Therefore, Zenoh does not /// impose any encoding mapping and users are free to use any mapping they like. /// Nevertheless, it is worth highlighting that Zenoh still provides a default mapping as part /// of the API as per user convenience. That mapping has no impact on the Zenoh protocol definition. @@ -40,7 +40,7 @@ pub struct Encoding { /// +---------------+ /// ``` pub mod flag { - pub const S: u32 = 1; // 0x01 Suffix if S==1 then suffix is present + pub const S: u32 = 1; // 0x01 Suffix if S==1 then schema is present } impl Encoding { From 989509c447df98771abcfaea786e203e885db5b5 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 17:11:28 +0200 Subject: [PATCH 111/124] impl Seek for ZBufReader --- commons/zenoh-buffers/src/zbuf.rs | 144 +++++++++++++++++++++++++++--- zenoh/src/payload.rs | 7 +- 2 files changed, 134 insertions(+), 17 deletions(-) diff --git a/commons/zenoh-buffers/src/zbuf.rs b/commons/zenoh-buffers/src/zbuf.rs index fd86f454af..6fded06ae7 100644 --- a/commons/zenoh-buffers/src/zbuf.rs +++ b/commons/zenoh-buffers/src/zbuf.rs @@ -21,6 +21,8 @@ use crate::{ }; use alloc::{sync::Arc, vec::Vec}; use core::{cmp, iter, mem, num::NonZeroUsize, ops::RangeBounds, ptr}; +#[cfg(feature = "std")] +use std::io; use zenoh_collections::SingleOrVec; fn get_mut_unchecked(arc: &mut Arc) -> &mut T { @@ -270,7 +272,7 @@ impl<'a> Reader for ZBufReader<'a> { } fn read_exact(&mut self, into: &mut [u8]) -> Result<(), DidntRead> { - let len = self.read(into)?; + let len = Reader::read(self, into)?; if len.get() == into.len() { Ok(()) } else { @@ -317,7 +319,7 @@ impl<'a> Reader for ZBufReader<'a> { match (slice.len() - self.cursor.byte).cmp(&len) { cmp::Ordering::Less => { let mut buffer = crate::vec::uninit(len); - self.read_exact(&mut buffer)?; + Reader::read_exact(self, &mut buffer)?; Ok(buffer.into()) } cmp::Ordering::Equal => { @@ -388,18 +390,58 @@ impl<'a> SiphonableReader for ZBufReader<'a> { } #[cfg(feature = "std")] -impl<'a> std::io::Read for ZBufReader<'a> { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { +impl<'a> io::Read for ZBufReader<'a> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { match ::read(self, buf) { Ok(n) => Ok(n.get()), - Err(_) => Err(std::io::Error::new( - std::io::ErrorKind::UnexpectedEof, + Err(_) => Err(io::Error::new( + io::ErrorKind::UnexpectedEof, "UnexpectedEof", )), } } } +#[cfg(feature = "std")] +impl<'a> io::Seek for ZBufReader<'a> { + fn seek(&mut self, pos: io::SeekFrom) -> io::Result { + // Compute the index + let len = self.inner.len(); + let index = match pos { + io::SeekFrom::Start(pos) => pos.try_into().unwrap_or(i64::MAX), + io::SeekFrom::End(pos) => { + pos + i64::try_from(len) + .map_err(|e| io::Error::new(io::ErrorKind::UnexpectedEof, e))? + } + io::SeekFrom::Current(pos) => { + pos + i64::try_from(len - self.remaining()) + .map_err(|e| io::Error::new(io::ErrorKind::UnexpectedEof, e))? + } + }; + + let index = usize::try_from(index) + .map_err(|e| io::Error::new(io::ErrorKind::UnexpectedEof, e))? + .min(len); + + // Seek the position + let mut left = index; + let mut pos = ZBufPos { slice: 0, byte: 0 }; + while let Some(slice) = self.inner.slices.get(pos.slice) { + let len = slice.len(); + if len >= left { + pos.byte = left; + self.cursor = pos; + return Ok(index as u64); + } else { + left -= len; + } + pos.slice += 1; + } + + Err(io::ErrorKind::UnexpectedEof.into()) + } +} + // ZSlice iterator pub struct ZBufSliceIterator<'a, 'b> { reader: &'a mut ZBufReader<'b>, @@ -614,18 +656,18 @@ impl BacktrackableWriter for ZBufWriter<'_> { } #[cfg(feature = "std")] -impl<'a> std::io::Write for ZBufWriter<'a> { - fn write(&mut self, buf: &[u8]) -> std::io::Result { +impl<'a> io::Write for ZBufWriter<'a> { + fn write(&mut self, buf: &[u8]) -> io::Result { match ::write(self, buf) { Ok(n) => Ok(n.get()), - Err(_) => Err(std::io::Error::new( - std::io::ErrorKind::UnexpectedEof, + Err(_) => Err(io::Error::new( + io::ErrorKind::UnexpectedEof, "UnexpectedEof", )), } } - fn flush(&mut self) -> std::io::Result<()> { + fn flush(&mut self) -> io::Result<()> { Ok(()) } } @@ -668,4 +710,84 @@ mod tests { assert_eq!(zbuf1, zbuf2); } + + #[cfg(feature = "std")] + #[test] + fn zbuf_seek() { + use crate::reader::HasReader; + use std::io::{Seek, SeekFrom}; + + use super::{ZBuf, ZSlice}; + + let slice: ZSlice = [0u8, 1, 2, 3, 4, 5, 6, 7].to_vec().into(); + + let mut zbuf = ZBuf::empty(); + zbuf.push_zslice(slice.subslice(0, 1).unwrap()); + zbuf.push_zslice(slice.subslice(1, 4).unwrap()); + zbuf.push_zslice(slice.subslice(4, 8).unwrap()); + + let mut reader = zbuf.reader(); + + let index = reader.seek(SeekFrom::Start(0)).unwrap(); + assert_eq!(index, 0); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Start(4)).unwrap(); + assert_eq!(index, 4); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Start(8)).unwrap(); + assert_eq!(index, 8); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Start(u64::MAX)).unwrap(); + assert_eq!(index, 8); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::End(0)).unwrap(); + assert_eq!(index, 8); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::End(-4)).unwrap(); + assert_eq!(index, 4); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::End(-8)).unwrap(); + assert_eq!(index, 0); + assert_eq!(index, reader.stream_position().unwrap()); + + reader.seek(SeekFrom::End(i64::MIN)).unwrap_err(); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Start(0)).unwrap(); + assert_eq!(index, 0); + assert_eq!(index, reader.stream_position().unwrap()); + + reader.seek(SeekFrom::Current(-1)).unwrap_err(); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Current(2)).unwrap(); + assert_eq!(index, 2); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Current(2)).unwrap(); + assert_eq!(index, 4); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Current(-2)).unwrap(); + assert_eq!(index, 2); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Current(-2)).unwrap(); + assert_eq!(index, 0); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Current(i64::MAX)).unwrap(); + assert_eq!(index, 8); + assert_eq!(index, reader.stream_position().unwrap()); + + let index = reader.seek(SeekFrom::Current(-1)).unwrap(); + assert_eq!(index, 7); + assert_eq!(index, reader.stream_position().unwrap()); + } } diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index 59ad8b79b5..4de36f2d94 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -22,7 +22,7 @@ use std::{ use unwrap_infallible::UnwrapInfallible; use zenoh_buffers::{ buffer::{Buffer, SplitBuffer}, - reader::{HasReader, Reader}, + reader::HasReader, writer::HasWriter, ZBufReader, ZSlice, }; @@ -171,11 +171,6 @@ where let t = ZSerde.deserialize(&kpld).ok()?; Some(t) } - - fn size_hint(&self) -> (usize, Option) { - let remaining = self.reader.remaining(); - (remaining, Some(remaining)) - } } impl FromIterator for Payload From 140526b6881ef3ddcc7536ccf879cd86692e36bf Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 17:14:18 +0200 Subject: [PATCH 112/124] impl Seek for PayloadReader --- zenoh/src/payload.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index 4de36f2d94..7e42b4564a 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -14,7 +14,6 @@ //! Payload primitives. use crate::buffers::ZBuf; -use std::io::Read; use std::marker::PhantomData; use std::{ borrow::Cow, convert::Infallible, fmt::Debug, ops::Deref, string::FromUtf8Error, sync::Arc, @@ -143,6 +142,12 @@ impl std::io::Read for PayloadReader<'_> { } } +impl std::io::Seek for PayloadReader<'_> { + fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result { + std::io::Seek::seek(&mut self.0, pos) + } +} + /// An iterator that implements [`std::iter::Iterator`] trait to iterate on values `T` in a [`Payload`]. /// Note that [`Payload`] contains a serialized version of `T` and iterating over a [`Payload`] performs lazy deserialization. #[repr(transparent)] @@ -285,6 +290,8 @@ impl Deserialize<'_, [u8; N]> for ZSerde { type Error = ZDeserializeError; fn deserialize(self, v: &Payload) -> Result<[u8; N], Self::Error> { + use std::io::Read; + if v.0.len() != N { return Err(ZDeserializeError); } From 2dceb52b4db864d4616fd0ca1d271d2e423752cc Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 17:40:18 +0200 Subject: [PATCH 113/124] Fix tests --- zenoh/src/payload.rs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index 7e42b4564a..a63d19d4a9 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -19,6 +19,7 @@ use std::{ borrow::Cow, convert::Infallible, fmt::Debug, ops::Deref, string::FromUtf8Error, sync::Arc, }; use unwrap_infallible::UnwrapInfallible; +use zenoh_buffers::ZBufWriter; use zenoh_buffers::{ buffer::{Buffer, SplitBuffer}, reader::HasReader, @@ -148,6 +149,21 @@ impl std::io::Seek for PayloadReader<'_> { } } +/// A writer that implements [`std::io::Write`] trait to write into a [`Payload`]. +#[repr(transparent)] +#[derive(Debug)] +pub struct PayloadWriter<'a>(ZBufWriter<'a>); + +impl std::io::Write for PayloadWriter<'_> { + fn write(&mut self, buf: &[u8]) -> std::io::Result { + std::io::Write::write(&mut self.0, buf) + } + + fn flush(&mut self) -> std::io::Result<()> { + Ok(()) + } +} + /// An iterator that implements [`std::iter::Iterator`] trait to iterate on values `T` in a [`Payload`]. /// Note that [`Payload`] contains a serialized version of `T` and iterating over a [`Payload`] performs lazy deserialization. #[repr(transparent)] @@ -1205,7 +1221,7 @@ mod tests { let v: [usize; 5] = [0, 1, 2, 3, 4]; println!("Serialize:\t{:?}", v); let p = Payload::from_iter(v.iter()); - println!("Deserialize:\t{:?}", p); + println!("Deserialize:\t{:?}\n", p); for (i, t) in p.iter::().enumerate() { assert_eq!(i, t); } @@ -1213,7 +1229,7 @@ mod tests { let mut v = vec![[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]]; println!("Serialize:\t{:?}", v); let p = Payload::from_iter(v.drain(..)); - println!("Deserialize:\t{:?}", p); + println!("Deserialize:\t{:?}\n", p); let mut iter = p.iter::<[u8; 4]>(); assert_eq!(iter.next().unwrap(), [0, 1, 2, 3]); assert_eq!(iter.next().unwrap(), [4, 5, 6, 7]); @@ -1226,8 +1242,8 @@ mod tests { hm.insert(0, 0); hm.insert(1, 1); println!("Serialize:\t{:?}", hm); - let p = Payload::from_iter(hm.drain()); - println!("Deserialize:\t{:?}", p); + let p = Payload::from_iter(hm.clone().drain()); + println!("Deserialize:\t{:?}\n", p); let o = HashMap::from_iter(p.iter::<(usize, usize)>()); assert_eq!(hm, o); } From c2c6217bcb894fe7d5319249c3b46f2f5230d998 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 18:07:06 +0200 Subject: [PATCH 114/124] Add SHM support for deserializer --- Cargo.lock | 1 + examples/Cargo.toml | 7 +++- examples/examples/z_sub.rs | 1 - examples/examples/z_sub_shm.rs | 66 ++++++++++++++++++++++++++++++++++ zenoh/src/payload.rs | 23 ++++++++++++ 5 files changed, 96 insertions(+), 2 deletions(-) create mode 100644 examples/examples/z_sub_shm.rs diff --git a/Cargo.lock b/Cargo.lock index 3f74af9ed1..a9d327a978 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4612,6 +4612,7 @@ dependencies = [ "zenoh", "zenoh-collections", "zenoh-ext", + "zenoh-shm", ] [[package]] diff --git a/examples/Cargo.toml b/examples/Cargo.toml index fc1db17fe8..fb9c4c481d 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -27,7 +27,7 @@ readme = "README.md" publish = false [features] -shared-memory = ["zenoh/shared-memory"] +shared-memory = ["zenoh-shm","zenoh/shared-memory"] unstable = ["zenoh/unstable"] transport_unixpipe = ["zenoh/transport_unixpipe"] @@ -52,6 +52,7 @@ log = { workspace = true } zenoh = { workspace = true } zenoh-collections = { workspace = true } zenoh-ext = { workspace = true } +zenoh-shm = { workspace = true, optional = true } [dev-dependencies] rand = { workspace = true, features = ["default"] } @@ -96,6 +97,10 @@ required-features = ["shared-memory"] name = "z_sub" path = "examples/z_sub.rs" +[[example]] +name = "z_sub_shm" +path = "examples/z_sub_shm.rs" + [[example]] name = "z_pull" path = "examples/z_pull.rs" diff --git a/examples/examples/z_sub.rs b/examples/examples/z_sub.rs index fbce562c2e..299f0c8f49 100644 --- a/examples/examples/z_sub.rs +++ b/examples/examples/z_sub.rs @@ -32,7 +32,6 @@ async fn main() { let session = zenoh::open(config).res().await.unwrap(); println!("Declaring Subscriber on '{}'...", &key_expr); - let subscriber = session.declare_subscriber(&key_expr).res().await.unwrap(); println!("Press CTRL-C to quit..."); diff --git a/examples/examples/z_sub_shm.rs b/examples/examples/z_sub_shm.rs new file mode 100644 index 0000000000..630876f287 --- /dev/null +++ b/examples/examples/z_sub_shm.rs @@ -0,0 +1,66 @@ +// +// Copyright (c) 2023 ZettaScale Technology +// +// This program and the accompanying materials are made available under the +// terms of the Eclipse Public License 2.0 which is available at +// http://www.eclipse.org/legal/epl-2.0, or the Apache License, Version 2.0 +// which is available at https://www.apache.org/licenses/LICENSE-2.0. +// +// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 +// +// Contributors: +// ZettaScale Zenoh Team, +// +use clap::Parser; +use zenoh::config::Config; +use zenoh::prelude::r#async::*; +use zenoh_examples::CommonArgs; +use zenoh_shm::SharedMemoryBuf; + +#[tokio::main] +async fn main() { + // Initiate logging + env_logger::init(); + + let (mut config, key_expr) = parse_args(); + + // A probing procedure for shared memory is performed upon session opening. To enable `z_pub_shm` to operate + // over shared memory (and to not fallback on network mode), shared memory needs to be enabled also on the + // subscriber side. By doing so, the probing procedure will succeed and shared memory will operate as expected. + config.transport.shared_memory.set_enabled(true).unwrap(); + + println!("Opening session..."); + let session = zenoh::open(config).res().await.unwrap(); + + println!("Declaring Subscriber on '{}'...", &key_expr); + let subscriber = session.declare_subscriber(&key_expr).res().await.unwrap(); + + println!("Press CTRL-C to quit..."); + while let Ok(sample) = subscriber.recv_async().await { + match sample.payload().deserialize::() { + Ok(payload) => println!( + ">> [Subscriber] Received {} ('{}': '{:02x?}')", + sample.kind(), + sample.key_expr().as_str(), + payload.as_slice() + ), + Err(e) => { + println!(">> [Subscriber] Not a SharedMemoryBuf: {:?}", e); + } + } + } +} + +#[derive(clap::Parser, Clone, PartialEq, Eq, Hash, Debug)] +struct SubArgs { + #[arg(short, long, default_value = "demo/example/**")] + /// The Key Expression to subscribe to. + key: KeyExpr<'static>, + #[command(flatten)] + common: CommonArgs, +} + +fn parse_args() -> (Config, KeyExpr<'static>) { + let args = SubArgs::parse(); + (args.common.into(), args.key) +} diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index a63d19d4a9..b05cf868a8 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -1000,6 +1000,29 @@ impl From for Payload { } } +#[cfg(feature = "shared-memory")] +impl Deserialize<'_, SharedMemoryBuf> for ZSerde { + type Error = ZDeserializeError; + + fn deserialize(self, v: &Payload) -> Result { + for zs in v.0.zslices() { + if let Some(shmb) = zs.downcast_ref::() { + return Ok(shmb.clone()); + } + } + Err(ZDeserializeError) + } +} + +#[cfg(feature = "shared-memory")] +impl TryFrom for SharedMemoryBuf { + type Error = ZDeserializeError; + + fn try_from(value: Payload) -> Result { + ZSerde.deserialize(&value) + } +} + // Tuple impl Serialize<(A, B)> for ZSerde where From e4ee3069e4cff58a79e983d2bdb9c357a5975177 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 18:30:30 +0200 Subject: [PATCH 115/124] Fix SharedMemoryBuf deserialize --- zenoh/src/payload.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index b05cf868a8..1cb9fae783 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -1005,7 +1005,9 @@ impl Deserialize<'_, SharedMemoryBuf> for ZSerde { type Error = ZDeserializeError; fn deserialize(self, v: &Payload) -> Result { - for zs in v.0.zslices() { + // A SharedMemoryBuf is expected to have only one slice + let mut zslices = v.0.zslices(); + if let Some(zs) = zslices.next() { if let Some(shmb) = zs.downcast_ref::() { return Ok(shmb.clone()); } From cf861e1ecaa75930488e72b8288027828d1eadb4 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 18:36:10 +0200 Subject: [PATCH 116/124] Fix clippy examples --- examples/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/Cargo.toml b/examples/Cargo.toml index fb9c4c481d..4a4a4fef3e 100644 --- a/examples/Cargo.toml +++ b/examples/Cargo.toml @@ -100,6 +100,7 @@ path = "examples/z_sub.rs" [[example]] name = "z_sub_shm" path = "examples/z_sub_shm.rs" +required-features = ["shared-memory"] [[example]] name = "z_pull" From 28e23ab3c2713c2b65e331a7d432c0c2856c63b9 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Tue, 9 Apr 2024 19:42:25 +0200 Subject: [PATCH 117/124] Add writer method to payload --- zenoh/src/payload.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index 1cb9fae783..f8af7e182a 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -14,9 +14,9 @@ //! Payload primitives. use crate::buffers::ZBuf; -use std::marker::PhantomData; use std::{ - borrow::Cow, convert::Infallible, fmt::Debug, ops::Deref, string::FromUtf8Error, sync::Arc, + borrow::Cow, convert::Infallible, fmt::Debug, marker::PhantomData, ops::Deref, + string::FromUtf8Error, sync::Arc, }; use unwrap_infallible::UnwrapInfallible; use zenoh_buffers::ZBufWriter; @@ -57,7 +57,7 @@ impl Payload { Self(ZBuf::empty()) } - /// Create a [`Payload`] from any type `T` that can implements [`Into`]. + /// Create a [`Payload`] from any type `T` that implements [`Into`]. pub fn new(t: T) -> Self where T: Into, @@ -80,7 +80,7 @@ impl Payload { PayloadReader(self.0.reader()) } - /// Build a [`Payload`] from a [`Reader`]. This operation copies data from the reader. + /// Build a [`Payload`] from a generic reader implementing [`std::io::Read`]. This operation copies data from the reader. pub fn from_reader(mut reader: R) -> Result where R: std::io::Read, @@ -103,6 +103,11 @@ impl Payload { } } + /// Get a [`PayloadWriter`] implementing [`std::io::Write`] trait. + pub fn writer(&mut self) -> PayloadWriter<'_> { + PayloadWriter(self.0.writer()) + } + /// Encode an object of type `T` as a [`Value`] using the [`ZSerde`]. /// /// ```rust @@ -733,7 +738,7 @@ impl Serialize<&serde_json::Value> for ZSerde { fn serialize(self, t: &serde_json::Value) -> Self::Output { let mut payload = Payload::empty(); - serde_json::to_writer(payload.0.writer(), t)?; + serde_json::to_writer(payload.writer(), t)?; Ok(payload) } } From 042964e11e8a6aa423611f669b33e5426bdfd7bc Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 10 Apr 2024 11:49:09 +0200 Subject: [PATCH 118/124] Add ZSlice payload support --- commons/zenoh-buffers/src/zbuf.rs | 41 ++++- commons/zenoh-buffers/src/zslice.rs | 4 + .../zenoh-collections/src/single_or_vec.rs | 3 + zenoh/src/payload.rs | 174 +++++++++++++++--- 4 files changed, 187 insertions(+), 35 deletions(-) diff --git a/commons/zenoh-buffers/src/zbuf.rs b/commons/zenoh-buffers/src/zbuf.rs index 6fded06ae7..cfface650a 100644 --- a/commons/zenoh-buffers/src/zbuf.rs +++ b/commons/zenoh-buffers/src/zbuf.rs @@ -17,7 +17,7 @@ use crate::{ buffer::{Buffer, SplitBuffer}, reader::{BacktrackableReader, DidntRead, DidntSiphon, HasReader, Reader, SiphonableReader}, writer::{BacktrackableWriter, DidntWrite, HasWriter, Writer}, - ZSlice, + ZSlice, ZSliceBuffer, }; use alloc::{sync::Arc, vec::Vec}; use core::{cmp, iter, mem, num::NonZeroUsize, ops::RangeBounds, ptr}; @@ -60,6 +60,21 @@ impl ZBuf { } } + pub fn to_zslice(&self) -> ZSlice { + let mut slices = self.zslices(); + match self.slices.len() { + 0 => ZSlice::empty(), + // SAFETY: it's safe to use unwrap_unchecked() beacuse we are explicitly checking the length is 1. + 1 => unsafe { slices.next().unwrap_unchecked().clone() }, + _ => slices + .fold(Vec::new(), |mut acc, it| { + acc.extend(it.as_slice()); + acc + }) + .into(), + } + } + pub fn splice>(&mut self, erased: Range, replacement: &[u8]) { let start = match erased.start_bound() { core::ops::Bound::Included(n) => *n, @@ -201,15 +216,31 @@ impl PartialEq for ZBuf { } // From impls +impl From for ZBuf { + fn from(t: ZSlice) -> Self { + let mut zbuf = ZBuf::empty(); + zbuf.push_zslice(t); + zbuf + } +} + +impl From> for ZBuf +where + T: ZSliceBuffer + 'static, +{ + fn from(t: Arc) -> Self { + let zslice: ZSlice = t.into(); + Self::from(zslice) + } +} + impl From for ZBuf where - T: Into, + T: ZSliceBuffer + 'static, { fn from(t: T) -> Self { - let mut zbuf = ZBuf::empty(); let zslice: ZSlice = t.into(); - zbuf.push_zslice(zslice); - zbuf + Self::from(zslice) } } diff --git a/commons/zenoh-buffers/src/zslice.rs b/commons/zenoh-buffers/src/zslice.rs index c15cbc6828..05c77cac7d 100644 --- a/commons/zenoh-buffers/src/zslice.rs +++ b/commons/zenoh-buffers/src/zslice.rs @@ -114,6 +114,10 @@ impl ZSlice { } } + pub fn empty() -> Self { + unsafe { ZSlice::new_unchecked(Arc::new([]), 0, 0) } + } + /// # Safety /// This function does not verify wether the `start` and `end` indexes are within the buffer boundaries. /// If a [`ZSlice`] is built via this constructor, a later access may panic if `start` and `end` indexes are out-of-bound. diff --git a/commons/zenoh-collections/src/single_or_vec.rs b/commons/zenoh-collections/src/single_or_vec.rs index ceb43e4025..ed82bf49af 100644 --- a/commons/zenoh-collections/src/single_or_vec.rs +++ b/commons/zenoh-collections/src/single_or_vec.rs @@ -182,14 +182,17 @@ impl SingleOrVec { self.vectorize().insert(at, value); } } + enum DrainInner<'a, T> { Vec(alloc::vec::Drain<'a, T>), Single(&'a mut SingleOrVecInner), Done, } + pub struct Drain<'a, T> { inner: DrainInner<'a, T>, } + impl<'a, T> Iterator for Drain<'a, T> { type Item = T; diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index f8af7e182a..1b91757329 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -91,11 +91,11 @@ impl Payload { } /// Get a [`PayloadReader`] implementing [`std::io::Read`] trait. - pub fn iter<'a, T>(&'a self) -> PayloadIterator<'a, T> + pub fn iter(&self) -> PayloadIterator<'_, T> where - T: TryFrom, - ZSerde: Deserialize<'a, T>, - >::Error: Debug, + T: for<'b> TryFrom<&'b Payload>, + for<'b> ZSerde: Deserialize<'b, T>, + for<'b> >::Error: Debug, { PayloadIterator { reader: self.0.reader(), @@ -126,14 +126,23 @@ impl Payload { } /// Decode an object of type `T` from a [`Value`] using the [`ZSerde`]. - /// See [encode](Value::encode) for an example. pub fn deserialize<'a, T>(&'a self) -> ZResult where ZSerde: Deserialize<'a, T>, >::Error: Debug, { - let t: T = ZSerde.deserialize(self).map_err(|e| zerror!("{:?}", e))?; - Ok(t) + ZSerde + .deserialize(self) + .map_err(|e| zerror!("{:?}", e).into()) + } + + /// Decode an object of type `T` from a [`Value`] using the [`ZSerde`]. + pub fn into<'a, T>(&'a self) -> T + where + ZSerde: Deserialize<'a, T, Error = Infallible>, + >::Error: Debug, + { + ZSerde.deserialize(self).unwrap_infallible() } } @@ -181,10 +190,10 @@ where _t: PhantomData, } -impl<'a, T> Iterator for PayloadIterator<'a, T> +impl Iterator for PayloadIterator<'_, T> where - ZSerde: for<'b> Deserialize<'b, T>, - >::Error: Debug, + for<'a> ZSerde: Deserialize<'a, T>, + for<'a> >::Error: Debug, { type Item = T; @@ -278,6 +287,55 @@ impl From<&Payload> for ZBuf { } } +// ZSlice +impl Serialize for ZSerde { + type Output = Payload; + + fn serialize(self, t: ZSlice) -> Self::Output { + Payload::new(t) + } +} + +impl From for Payload { + fn from(t: ZSlice) -> Self { + ZSerde.serialize(t) + } +} + +impl Serialize<&ZSlice> for ZSerde { + type Output = Payload; + + fn serialize(self, t: &ZSlice) -> Self::Output { + Payload::new(t.clone()) + } +} + +impl From<&ZSlice> for Payload { + fn from(t: &ZSlice) -> Self { + ZSerde.serialize(t) + } +} + +impl Deserialize<'_, ZSlice> for ZSerde { + type Error = Infallible; + + fn deserialize(self, v: &Payload) -> Result { + Ok(v.0.to_zslice()) + } +} + +impl From for ZSlice { + fn from(value: Payload) -> Self { + ZBuf::from(value).to_zslice() + } +} + +impl From<&Payload> for ZSlice { + fn from(value: &Payload) -> Self { + ZSerde.deserialize(value).unwrap_infallible() + } +} + // [u8; N] impl Serialize<[u8; N]> for ZSerde { type Output = Payload; @@ -515,7 +573,6 @@ impl From<&str> for Payload { } } -// Cow impl<'a> Serialize> for ZSerde { type Output = Payload; @@ -1069,16 +1126,16 @@ where } } -impl<'a, A, B> Deserialize<'a, (A, B)> for ZSerde +impl Deserialize<'_, (A, B)> for ZSerde where - A: TryFrom, - >::Error: Debug, - B: TryFrom, - >::Error: Debug, + for<'a> A: TryFrom<&'a Payload>, + for<'a> >::Error: Debug, + for<'b> B: TryFrom<&'b Payload>, + for<'b> >::Error: Debug, { type Error = ZError; - fn deserialize(self, payload: &'a Payload) -> Result<(A, B), Self::Error> { + fn deserialize(self, payload: &Payload) -> Result<(A, B), Self::Error> { let codec = Zenoh080::new(); let mut reader = payload.0.reader(); @@ -1088,18 +1145,18 @@ where let bbuf: ZBuf = codec.read(&mut reader).map_err(|e| zerror!("{:?}", e))?; let bpld = Payload::new(bbuf); - let a = A::try_from(apld).map_err(|e| zerror!("{:?}", e))?; - let b = B::try_from(bpld).map_err(|e| zerror!("{:?}", e))?; + let a = A::try_from(&apld).map_err(|e| zerror!("{:?}", e))?; + let b = B::try_from(&bpld).map_err(|e| zerror!("{:?}", e))?; Ok((a, b)) } } impl TryFrom for (A, B) where - A: TryFrom, - >::Error: Debug, - B: TryFrom, - >::Error: Debug, + A: for<'a> TryFrom<&'a Payload>, + for<'a> >::Error: Debug, + for<'b> B: TryFrom<&'b Payload>, + for<'b> >::Error: Debug, { type Error = ZError; @@ -1108,6 +1165,20 @@ where } } +impl TryFrom<&Payload> for (A, B) +where + for<'a> A: TryFrom<&'a Payload>, + for<'a> >::Error: Debug, + for<'b> B: TryFrom<&'b Payload>, + for<'b> >::Error: Debug, +{ + type Error = ZError; + + fn try_from(value: &Payload) -> Result { + ZSerde.deserialize(value) + } +} + // For convenience to always convert a Value in the examples #[derive(Debug, Clone, PartialEq, Eq)] pub enum StringOrBase64 { @@ -1142,12 +1213,9 @@ impl std::fmt::Display for StringOrBase64 { impl From<&Payload> for StringOrBase64 { fn from(v: &Payload) -> Self { use base64::{engine::general_purpose::STANDARD as b64_std_engine, Engine}; - match v.deserialize::>() { - Ok(s) => StringOrBase64::String(s.into_owned()), - Err(_) => { - let cow: Cow<'_, [u8]> = Cow::from(v); - StringOrBase64::Base64(b64_std_engine.encode(cow)) - } + match v.deserialize::() { + Ok(s) => StringOrBase64::String(s), + Err(_) => StringOrBase64::Base64(b64_std_engine.encode(v.into::>())), } } } @@ -1157,7 +1225,7 @@ mod tests { fn serializer() { use super::Payload; use rand::Rng; - use zenoh_buffers::ZBuf; + use zenoh_buffers::{ZBuf, ZSlice}; const NUM: usize = 1_000; @@ -1276,5 +1344,51 @@ mod tests { println!("Deserialize:\t{:?}\n", p); let o = HashMap::from_iter(p.iter::<(usize, usize)>()); assert_eq!(hm, o); + + let mut hm: HashMap> = HashMap::new(); + hm.insert(0, vec![0u8; 8]); + hm.insert(1, vec![1u8; 16]); + println!("Serialize:\t{:?}", hm); + let p = Payload::from_iter(hm.clone().drain()); + println!("Deserialize:\t{:?}\n", p); + let o = HashMap::from_iter(p.iter::<(usize, Vec)>()); + assert_eq!(hm, o); + + let mut hm: HashMap> = HashMap::new(); + hm.insert(0, vec![0u8; 8]); + hm.insert(1, vec![1u8; 16]); + println!("Serialize:\t{:?}", hm); + let p = Payload::from_iter(hm.clone().drain()); + println!("Deserialize:\t{:?}\n", p); + let o = HashMap::from_iter(p.iter::<(usize, Vec)>()); + assert_eq!(hm, o); + + let mut hm: HashMap = HashMap::new(); + hm.insert(0, ZSlice::from(vec![0u8; 8])); + hm.insert(1, ZSlice::from(vec![1u8; 16])); + println!("Serialize:\t{:?}", hm); + let p = Payload::from_iter(hm.clone().drain()); + println!("Deserialize:\t{:?}\n", p); + let o = HashMap::from_iter(p.iter::<(usize, ZSlice)>()); + assert_eq!(hm, o); + + let mut hm: HashMap = HashMap::new(); + hm.insert(0, ZBuf::from(vec![0u8; 8])); + hm.insert(1, ZBuf::from(vec![1u8; 16])); + println!("Serialize:\t{:?}", hm); + let p = Payload::from_iter(hm.clone().drain()); + println!("Deserialize:\t{:?}\n", p); + let o = HashMap::from_iter(p.iter::<(usize, ZBuf)>()); + assert_eq!(hm, o); + + use std::borrow::Cow; + let mut hm: HashMap> = HashMap::new(); + hm.insert(0, vec![0u8; 8]); + hm.insert(1, vec![1u8; 16]); + println!("Serialize:\t{:?}", hm); + let p = Payload::from_iter(hm.clone().iter().map(|(k, v)| (k, Cow::from(v)))); + println!("Deserialize:\t{:?}\n", p); + let o = HashMap::from_iter(p.iter::<(usize, Vec)>()); + assert_eq!(hm, o); } } From abded105583f165d939ac9b24174e6a65b11abbb Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 10 Apr 2024 11:58:42 +0200 Subject: [PATCH 119/124] Improve payload --- zenoh/src/payload.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index 1b91757329..aed0d15834 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -1225,6 +1225,7 @@ mod tests { fn serializer() { use super::Payload; use rand::Rng; + use std::borrow::Cow; use zenoh_buffers::{ZBuf, ZSlice}; const NUM: usize = 1_000; @@ -1302,10 +1303,21 @@ mod tests { serialize_deserialize!(String, ""); serialize_deserialize!(String, String::from("abcdefghijklmnopqrstuvwxyz")); + // Cow + serialize_deserialize!(Cow, Cow::from("")); + serialize_deserialize!( + Cow, + Cow::from(String::from("abcdefghijklmnopqrstuvwxyz")) + ); + // Vec serialize_deserialize!(Vec, vec![0u8; 0]); serialize_deserialize!(Vec, vec![0u8; 64]); + // Cow<[u8]> + serialize_deserialize!(Cow<[u8]>, Cow::from(vec![0u8; 0])); + serialize_deserialize!(Cow<[u8]>, Cow::from(vec![0u8; 64])); + // ZBuf serialize_deserialize!(ZBuf, ZBuf::from(vec![0u8; 0])); serialize_deserialize!(ZBuf, ZBuf::from(vec![0u8; 64])); @@ -1381,7 +1393,6 @@ mod tests { let o = HashMap::from_iter(p.iter::<(usize, ZBuf)>()); assert_eq!(hm, o); - use std::borrow::Cow; let mut hm: HashMap> = HashMap::new(); hm.insert(0, vec![0u8; 8]); hm.insert(1, vec![1u8; 16]); From adf422d89945f1958ff2460f0816c684fa2cfe37 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 10 Apr 2024 12:30:41 +0200 Subject: [PATCH 120/124] allowed build zenoh without unstable feature set (#910) --- zenoh/src/publication.rs | 1 + zenoh/src/query.rs | 2 ++ zenoh/src/queryable.rs | 12 ++++++++++-- zenoh/src/sample/builder.rs | 1 + zenoh/src/sample/mod.rs | 8 ++++---- zenoh/src/session.rs | 6 +++++- zenoh/src/subscriber.rs | 3 --- 7 files changed, 23 insertions(+), 10 deletions(-) diff --git a/zenoh/src/publication.rs b/zenoh/src/publication.rs index c176ad32e0..4f31c73a24 100644 --- a/zenoh/src/publication.rs +++ b/zenoh/src/publication.rs @@ -157,6 +157,7 @@ impl

ValueBuilderTrait for PublicationBuilder { } } +#[zenoh_macros::unstable] impl SampleBuilderTrait for PublicationBuilder { #[cfg(feature = "unstable")] fn source_info(self, source_info: SourceInfo) -> Self { diff --git a/zenoh/src/query.rs b/zenoh/src/query.rs index cb1116130d..3a380bd1c9 100644 --- a/zenoh/src/query.rs +++ b/zenoh/src/query.rs @@ -133,6 +133,7 @@ pub struct GetBuilder<'a, 'b, Handler> { pub(crate) source_info: SourceInfo, } +#[zenoh_macros::unstable] impl SampleBuilderTrait for GetBuilder<'_, '_, Handler> { #[cfg(feature = "unstable")] fn source_info(self, source_info: SourceInfo) -> Self { @@ -430,6 +431,7 @@ where self.value, #[cfg(feature = "unstable")] self.attachment, + #[cfg(feature = "unstable")] self.source_info, callback, ) diff --git a/zenoh/src/queryable.rs b/zenoh/src/queryable.rs index 6fbb4e9090..0ad3a36c07 100644 --- a/zenoh/src/queryable.rs +++ b/zenoh/src/queryable.rs @@ -18,12 +18,15 @@ use crate::encoding::Encoding; use crate::handlers::{locked, DefaultHandler}; use crate::net::primitives::Primitives; use crate::prelude::*; -use crate::sample::{QoSBuilder, SourceInfo}; +use crate::sample::builder::SampleBuilder; +use crate::sample::QoSBuilder; +#[cfg(feature = "unstable")] +use crate::sample::SourceInfo; use crate::Id; use crate::SessionRef; use crate::Undeclarable; #[cfg(feature = "unstable")] -use crate::{query::ReplyKeyExpr, sample::builder::SampleBuilder, sample::Attachment}; +use crate::{query::ReplyKeyExpr, sample::Attachment}; use std::fmt; use std::future::Ready; use std::ops::Deref; @@ -155,7 +158,9 @@ impl Query { encoding: Encoding::default(), }, timestamp: None, + #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] attachment: None, } } @@ -193,7 +198,9 @@ impl Query { qos: response::ext::QoSType::RESPONSE.into(), kind: ReplyBuilderDelete, timestamp: None, + #[cfg(feature = "unstable")] source_info: SourceInfo::empty(), + #[cfg(feature = "unstable")] attachment: None, } } @@ -298,6 +305,7 @@ impl TimestampBuilderTrait for ReplyBuilder<'_, '_, T> { } } +#[cfg(feature = "unstable")] impl SampleBuilderTrait for ReplyBuilder<'_, '_, T> { #[cfg(feature = "unstable")] fn attachment>>(self, attachment: U) -> Self { diff --git a/zenoh/src/sample/builder.rs b/zenoh/src/sample/builder.rs index fca55edd09..bad35024ef 100644 --- a/zenoh/src/sample/builder.rs +++ b/zenoh/src/sample/builder.rs @@ -163,6 +163,7 @@ impl TimestampBuilderTrait for SampleBuilder { } } +#[cfg(feature = "unstable")] impl SampleBuilderTrait for SampleBuilder { #[zenoh_macros::unstable] fn source_info(self, source_info: SourceInfo) -> Self { diff --git a/zenoh/src/sample/mod.rs b/zenoh/src/sample/mod.rs index 6e457578a3..0ef8462d2a 100644 --- a/zenoh/src/sample/mod.rs +++ b/zenoh/src/sample/mod.rs @@ -22,9 +22,9 @@ use crate::Priority; #[zenoh_macros::unstable] use serde::Serialize; use std::{convert::TryFrom, fmt}; +use zenoh_protocol::core::CongestionControl; use zenoh_protocol::core::EntityGlobalId; use zenoh_protocol::network::declare::ext::QoSType; -use zenoh_protocol::{core::CongestionControl, zenoh}; pub mod builder; @@ -178,12 +178,12 @@ impl SourceInfo { } #[zenoh_macros::unstable] -impl From for Option { - fn from(source_info: SourceInfo) -> Option { +impl From for Option { + fn from(source_info: SourceInfo) -> Option { if source_info.is_empty() { None } else { - Some(zenoh::put::ext::SourceInfoType { + Some(zenoh_protocol::zenoh::put::ext::SourceInfoType { id: source_info.source_id.unwrap_or_default(), sn: source_info.source_sn.unwrap_or_default() as u32, }) diff --git a/zenoh/src/session.rs b/zenoh/src/session.rs index f694eb6420..181976dcb0 100644 --- a/zenoh/src/session.rs +++ b/zenoh/src/session.rs @@ -61,6 +61,8 @@ use zenoh_config::unwrap_or_default; use zenoh_core::{zconfigurable, zread, Resolve, ResolveClosure, ResolveFuture, SyncResolve}; #[cfg(feature = "unstable")] use zenoh_protocol::network::declare::SubscriberId; +#[cfg(feature = "unstable")] +use zenoh_protocol::network::ext; use zenoh_protocol::network::AtomicRequestId; use zenoh_protocol::network::RequestId; use zenoh_protocol::zenoh::reply::ReplyBody; @@ -77,7 +79,6 @@ use zenoh_protocol::{ subscriber::ext::SubscriberInfo, Declare, DeclareBody, DeclareKeyExpr, DeclareMode, DeclareQueryable, DeclareSubscriber, UndeclareQueryable, UndeclareSubscriber, }, - ext, request::{self, ext::TargetType, Request}, Mapping, Push, Response, ResponseFinal, }, @@ -1687,7 +1688,10 @@ impl Session { payload: RequestBody::Query(zenoh_protocol::zenoh::Query { consolidation, parameters: selector.parameters().to_string(), + #[cfg(feature = "unstable")] ext_sinfo: source.into(), + #[cfg(not(feature = "unstable"))] + ext_sinfo: None, ext_body: value.as_ref().map(|v| query::ext::QueryBodyType { #[cfg(feature = "shared-memory")] ext_shm: None, diff --git a/zenoh/src/subscriber.rs b/zenoh/src/subscriber.rs index 60a31a6577..47d41ebb1f 100644 --- a/zenoh/src/subscriber.rs +++ b/zenoh/src/subscriber.rs @@ -202,9 +202,6 @@ pub struct SubscriberBuilder<'a, 'b, Handler> { #[cfg(not(feature = "unstable"))] pub(crate) reliability: Reliability, - #[cfg(not(feature = "unstable"))] - pub(crate) mode: Mode, - #[cfg(feature = "unstable")] pub origin: Locality, #[cfg(not(feature = "unstable"))] From 7d9d57c2b2ef023a0c5887efb092250f2ff2ef44 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 10 Apr 2024 13:03:54 +0200 Subject: [PATCH 121/124] Fix tests --- zenoh/src/payload.rs | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/zenoh/src/payload.rs b/zenoh/src/payload.rs index aed0d15834..eac4f58e7c 100644 --- a/zenoh/src/payload.rs +++ b/zenoh/src/payload.rs @@ -14,6 +14,7 @@ //! Payload primitives. use crate::buffers::ZBuf; +use std::str::Utf8Error; use std::{ borrow::Cow, convert::Infallible, fmt::Debug, marker::PhantomData, ops::Deref, string::FromUtf8Error, sync::Arc, @@ -494,7 +495,7 @@ impl<'a> Deserialize<'a, Cow<'a, [u8]>> for ZSerde { type Error = Infallible; fn deserialize(self, v: &'a Payload) -> Result, Self::Error> { - Ok(Cow::from(v)) + Ok(v.0.contiguous()) } } @@ -602,16 +603,19 @@ impl From<&Cow<'_, str>> for Payload { } impl<'a> Deserialize<'a, Cow<'a, str>> for ZSerde { - type Error = FromUtf8Error; + type Error = Utf8Error; - fn deserialize(self, v: &Payload) -> Result, Self::Error> { - let v: String = Self.deserialize(v)?; - Ok(Cow::Owned(v)) + fn deserialize(self, v: &'a Payload) -> Result, Self::Error> { + let v: Cow<[u8]> = Self.deserialize(v).unwrap_infallible(); + let _ = core::str::from_utf8(v.as_ref())?; + // SAFETY: &str is &[u8] with the guarantee that every char is UTF-8 + // As implemented internally https://doc.rust-lang.org/std/str/fn.from_utf8_unchecked.html. + Ok(unsafe { core::mem::transmute(v) }) } } impl<'a> TryFrom<&'a Payload> for Cow<'a, str> { - type Error = FromUtf8Error; + type Error = Utf8Error; fn try_from(value: &'a Payload) -> Result { ZSerde.deserialize(value) @@ -1301,14 +1305,11 @@ mod tests { // String serialize_deserialize!(String, ""); - serialize_deserialize!(String, String::from("abcdefghijklmnopqrstuvwxyz")); + serialize_deserialize!(String, String::from("abcdef")); // Cow serialize_deserialize!(Cow, Cow::from("")); - serialize_deserialize!( - Cow, - Cow::from(String::from("abcdefghijklmnopqrstuvwxyz")) - ); + serialize_deserialize!(Cow, Cow::from(String::from("abcdef"))); // Vec serialize_deserialize!(Vec, vec![0u8; 0]); From 27063b6fd2f15be36aa3988c37cf1cbb46933c40 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 10 Apr 2024 15:10:17 +0200 Subject: [PATCH 122/124] Integrating #918 --- commons/zenoh-buffers/src/lib.rs | 12 ++ commons/zenoh-buffers/src/zbuf.rs | 213 +++++++++++++++--------------- 2 files changed, 117 insertions(+), 108 deletions(-) diff --git a/commons/zenoh-buffers/src/lib.rs b/commons/zenoh-buffers/src/lib.rs index eae7f1715c..117fb412b7 100644 --- a/commons/zenoh-buffers/src/lib.rs +++ b/commons/zenoh-buffers/src/lib.rs @@ -199,6 +199,18 @@ pub mod reader { fn rewind(&mut self, mark: Self::Mark) -> bool; } + pub trait AdvanceableReader: Reader { + fn skip(&mut self, offset: usize) -> Result<(), DidntRead>; + fn backtrack(&mut self, offset: usize) -> Result<(), DidntRead>; + fn advance(&mut self, offset: isize) -> Result<(), DidntRead> { + if offset > 0 { + self.skip(offset as usize) + } else { + self.backtrack((-offset) as usize) + } + } + } + #[derive(Debug, Clone, Copy)] pub struct DidntSiphon; diff --git a/commons/zenoh-buffers/src/zbuf.rs b/commons/zenoh-buffers/src/zbuf.rs index cfface650a..f3621049b0 100644 --- a/commons/zenoh-buffers/src/zbuf.rs +++ b/commons/zenoh-buffers/src/zbuf.rs @@ -15,7 +15,10 @@ use crate::ZSliceKind; use crate::{ buffer::{Buffer, SplitBuffer}, - reader::{BacktrackableReader, DidntRead, DidntSiphon, HasReader, Reader, SiphonableReader}, + reader::{ + AdvanceableReader, BacktrackableReader, DidntRead, DidntSiphon, HasReader, Reader, + SiphonableReader, + }, writer::{BacktrackableWriter, DidntWrite, HasWriter, Writer}, ZSlice, ZSliceBuffer, }; @@ -433,43 +436,74 @@ impl<'a> io::Read for ZBufReader<'a> { } } -#[cfg(feature = "std")] -impl<'a> io::Seek for ZBufReader<'a> { - fn seek(&mut self, pos: io::SeekFrom) -> io::Result { - // Compute the index - let len = self.inner.len(); - let index = match pos { - io::SeekFrom::Start(pos) => pos.try_into().unwrap_or(i64::MAX), - io::SeekFrom::End(pos) => { - pos + i64::try_from(len) - .map_err(|e| io::Error::new(io::ErrorKind::UnexpectedEof, e))? - } - io::SeekFrom::Current(pos) => { - pos + i64::try_from(len - self.remaining()) - .map_err(|e| io::Error::new(io::ErrorKind::UnexpectedEof, e))? +impl<'a> AdvanceableReader for ZBufReader<'a> { + fn skip(&mut self, offset: usize) -> Result<(), DidntRead> { + let mut remaining_offset = offset; + while remaining_offset > 0 { + let s = self.inner.slices.get(self.cursor.slice).ok_or(DidntRead)?; + let remains_in_current_slice = s.len() - self.cursor.byte; + let advance = remaining_offset.min(remains_in_current_slice); + remaining_offset -= advance; + self.cursor.byte += advance; + if self.cursor.byte == s.len() { + self.cursor.slice += 1; + self.cursor.byte = 0; } - }; + } + Ok(()) + } - let index = usize::try_from(index) - .map_err(|e| io::Error::new(io::ErrorKind::UnexpectedEof, e))? - .min(len); - - // Seek the position - let mut left = index; - let mut pos = ZBufPos { slice: 0, byte: 0 }; - while let Some(slice) = self.inner.slices.get(pos.slice) { - let len = slice.len(); - if len >= left { - pos.byte = left; - self.cursor = pos; - return Ok(index as u64); - } else { - left -= len; + fn backtrack(&mut self, offset: usize) -> Result<(), DidntRead> { + let mut remaining_offset = offset; + while remaining_offset > 0 { + let backtrack = remaining_offset.min(self.cursor.byte); + remaining_offset -= backtrack; + self.cursor.byte -= backtrack; + if self.cursor.byte == 0 { + if self.cursor.slice == 0 { + break; + } + self.cursor.slice -= 1; + self.cursor.byte = self + .inner + .slices + .get(self.cursor.slice) + .ok_or(DidntRead)? + .len(); } - pos.slice += 1; } + if remaining_offset == 0 { + Ok(()) + } else { + Err(DidntRead) + } + } +} - Err(io::ErrorKind::UnexpectedEof.into()) +#[cfg(feature = "std")] +impl<'a> io::Seek for ZBufReader<'a> { + fn seek(&mut self, pos: io::SeekFrom) -> io::Result { + let current_pos = self + .inner + .slices() + .take(self.cursor.slice) + .fold(0, |acc, s| acc + s.len()) + + self.cursor.byte; + let current_pos = i64::try_from(current_pos) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, format!("{}", e)))?; + + let offset = match pos { + std::io::SeekFrom::Start(s) => i64::try_from(s).unwrap_or(i64::MAX) - current_pos, + std::io::SeekFrom::Current(s) => s, + std::io::SeekFrom::End(s) => self.inner.len() as i64 + s - current_pos, + }; + match self.advance(offset as isize) { + Ok(()) => Ok((offset + current_pos) as u64), + Err(_) => Err(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "InvalidInput", + )), + } } } @@ -745,80 +779,43 @@ mod tests { #[cfg(feature = "std")] #[test] fn zbuf_seek() { - use crate::reader::HasReader; - use std::io::{Seek, SeekFrom}; - - use super::{ZBuf, ZSlice}; - - let slice: ZSlice = [0u8, 1, 2, 3, 4, 5, 6, 7].to_vec().into(); - - let mut zbuf = ZBuf::empty(); - zbuf.push_zslice(slice.subslice(0, 1).unwrap()); - zbuf.push_zslice(slice.subslice(1, 4).unwrap()); - zbuf.push_zslice(slice.subslice(4, 8).unwrap()); - - let mut reader = zbuf.reader(); - - let index = reader.seek(SeekFrom::Start(0)).unwrap(); - assert_eq!(index, 0); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Start(4)).unwrap(); - assert_eq!(index, 4); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Start(8)).unwrap(); - assert_eq!(index, 8); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Start(u64::MAX)).unwrap(); - assert_eq!(index, 8); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::End(0)).unwrap(); - assert_eq!(index, 8); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::End(-4)).unwrap(); - assert_eq!(index, 4); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::End(-8)).unwrap(); - assert_eq!(index, 0); - assert_eq!(index, reader.stream_position().unwrap()); - - reader.seek(SeekFrom::End(i64::MIN)).unwrap_err(); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Start(0)).unwrap(); - assert_eq!(index, 0); - assert_eq!(index, reader.stream_position().unwrap()); - - reader.seek(SeekFrom::Current(-1)).unwrap_err(); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Current(2)).unwrap(); - assert_eq!(index, 2); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Current(2)).unwrap(); - assert_eq!(index, 4); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Current(-2)).unwrap(); - assert_eq!(index, 2); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Current(-2)).unwrap(); - assert_eq!(index, 0); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Current(i64::MAX)).unwrap(); - assert_eq!(index, 8); - assert_eq!(index, reader.stream_position().unwrap()); - - let index = reader.seek(SeekFrom::Current(-1)).unwrap(); - assert_eq!(index, 7); - assert_eq!(index, reader.stream_position().unwrap()); + use super::{HasReader, ZBuf}; + use crate::reader::Reader; + use std::io::Seek; + + let mut buf = ZBuf::empty(); + buf.push_zslice([0u8, 1u8, 2u8, 3u8].into()); + buf.push_zslice([4u8, 5u8, 6u8, 7u8, 8u8].into()); + buf.push_zslice([9u8, 10u8, 11u8, 12u8, 13u8, 14u8].into()); + let mut reader = buf.reader(); + + assert_eq!(reader.stream_position().unwrap(), 0); + assert_eq!(reader.read_u8().unwrap(), 0); + assert_eq!(reader.seek(std::io::SeekFrom::Current(6)).unwrap(), 7); + assert_eq!(reader.read_u8().unwrap(), 7); + assert_eq!(reader.seek(std::io::SeekFrom::Current(-5)).unwrap(), 3); + assert_eq!(reader.read_u8().unwrap(), 3); + assert_eq!(reader.seek(std::io::SeekFrom::Current(10)).unwrap(), 14); + assert_eq!(reader.read_u8().unwrap(), 14); + reader.seek(std::io::SeekFrom::Current(100)).unwrap_err(); + + assert_eq!(reader.seek(std::io::SeekFrom::Start(0)).unwrap(), 0); + assert_eq!(reader.read_u8().unwrap(), 0); + assert_eq!(reader.seek(std::io::SeekFrom::Start(12)).unwrap(), 12); + assert_eq!(reader.read_u8().unwrap(), 12); + assert_eq!(reader.seek(std::io::SeekFrom::Start(15)).unwrap(), 15); + reader.read_u8().unwrap_err(); + reader.seek(std::io::SeekFrom::Start(100)).unwrap_err(); + + assert_eq!(reader.seek(std::io::SeekFrom::End(0)).unwrap(), 15); + reader.read_u8().unwrap_err(); + assert_eq!(reader.seek(std::io::SeekFrom::End(-5)).unwrap(), 10); + assert_eq!(reader.read_u8().unwrap(), 10); + assert_eq!(reader.seek(std::io::SeekFrom::End(-15)).unwrap(), 0); + assert_eq!(reader.read_u8().unwrap(), 0); + reader.seek(std::io::SeekFrom::End(-20)).unwrap_err(); + + assert_eq!(reader.seek(std::io::SeekFrom::Start(10)).unwrap(), 10); + reader.seek(std::io::SeekFrom::Current(-100)).unwrap_err(); } } From a1c2a024e6343222eb110595ee166804b24d0397 Mon Sep 17 00:00:00 2001 From: Luca Cominardi Date: Wed, 10 Apr 2024 15:12:25 +0200 Subject: [PATCH 123/124] Fix ZBur io::Read impl --- commons/zenoh-buffers/src/zbuf.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/commons/zenoh-buffers/src/zbuf.rs b/commons/zenoh-buffers/src/zbuf.rs index f3621049b0..4a655ce36a 100644 --- a/commons/zenoh-buffers/src/zbuf.rs +++ b/commons/zenoh-buffers/src/zbuf.rs @@ -428,10 +428,7 @@ impl<'a> io::Read for ZBufReader<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result { match ::read(self, buf) { Ok(n) => Ok(n.get()), - Err(_) => Err(io::Error::new( - io::ErrorKind::UnexpectedEof, - "UnexpectedEof", - )), + Err(_) => Ok(0), } } } From 834be851a79b4787ad4ad3639c28dd86e66c8c12 Mon Sep 17 00:00:00 2001 From: Michael Ilyin Date: Wed, 10 Apr 2024 18:00:39 +0200 Subject: [PATCH 124/124] compilation fixes --- zenoh-ext/src/publication_cache.rs | 4 +- zenoh/src/net/tests/tables.rs | 69 ------------------------------ 2 files changed, 2 insertions(+), 71 deletions(-) diff --git a/zenoh-ext/src/publication_cache.rs b/zenoh-ext/src/publication_cache.rs index 608a051d05..9f2b645da9 100644 --- a/zenoh-ext/src/publication_cache.rs +++ b/zenoh-ext/src/publication_cache.rs @@ -181,9 +181,9 @@ impl<'a> PublicationCache<'a> { sample = sub_recv.recv_async() => { if let Ok(sample) = sample { let queryable_key_expr: KeyExpr<'_> = if let Some(prefix) = &queryable_prefix { - prefix.join(&sample.key_expr).unwrap().into() + prefix.join(&sample.key_expr()).unwrap().into() } else { - sample.key_expr.clone() + sample.key_expr().clone() }; if let Some(queue) = cache.get_mut(queryable_key_expr.as_keyexpr()) { diff --git a/zenoh/src/net/tests/tables.rs b/zenoh/src/net/tests/tables.rs index f5a4b24e2a..35db2a7ac4 100644 --- a/zenoh/src/net/tests/tables.rs +++ b/zenoh/src/net/tests/tables.rs @@ -234,75 +234,6 @@ fn multisub_test() { tables::close_face(&tables, &face0); } -#[test] -fn multisub_test() { - let config = Config::default(); - let router = Router::new( - ZenohId::try_from([1]).unwrap(), - WhatAmI::Client, - Some(Arc::new(HLC::default())), - &config, - ) - .unwrap(); - let tables = router.tables.clone(); - - let primitives = Arc::new(DummyPrimitives {}); - let face0 = Arc::downgrade(&router.new_primitives(primitives).state); - assert!(face0.upgrade().is_some()); - - // -------------- - let sub_info = SubscriberInfo { - reliability: Reliability::Reliable, - }; - declare_subscription( - zlock!(tables.ctrl_lock).as_ref(), - &tables, - &mut face0.upgrade().unwrap(), - 0, - &"sub".into(), - &sub_info, - NodeId::default(), - ); - let optres = Resource::get_resource(zread!(tables.tables)._get_root(), "sub") - .map(|res| Arc::downgrade(&res)); - assert!(optres.is_some()); - let res = optres.unwrap(); - assert!(res.upgrade().is_some()); - - declare_subscription( - zlock!(tables.ctrl_lock).as_ref(), - &tables, - &mut face0.upgrade().unwrap(), - 1, - &"sub".into(), - &sub_info, - NodeId::default(), - ); - assert!(res.upgrade().is_some()); - - undeclare_subscription( - zlock!(tables.ctrl_lock).as_ref(), - &tables, - &mut face0.upgrade().unwrap(), - 0, - &WireExpr::empty(), - NodeId::default(), - ); - assert!(res.upgrade().is_some()); - - undeclare_subscription( - zlock!(tables.ctrl_lock).as_ref(), - &tables, - &mut face0.upgrade().unwrap(), - 1, - &WireExpr::empty(), - NodeId::default(), - ); - assert!(res.upgrade().is_none()); - - tables::close_face(&tables, &face0); -} - #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn clean_test() { let config = Config::default();