diff --git a/Cargo.toml b/Cargo.toml index 54f5b71..699d39b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,14 +13,19 @@ keywords = ["ring", "cyclic", "circular", "buffer", "no-std"] categories = ["data-structures"] license = "MIT" +[dependencies] +serde = { version = "1.0.217", optional = true, features = ["derive"] } + [dev-dependencies] criterion = "0.4.0" compiletest_rs = "0.10.0" +serde_json = "1.0.135" [features] default = ["alloc"] # disable the alloc based ringbuffer, to make RingBuffers work in no_alloc environments alloc = [] +serde = ["alloc", "dep:serde"] [[bench]] name = "bench" diff --git a/src/lib.rs b/src/lib.rs index fc70842..50e2a78 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,6 @@ #![no_std] #![deny(missing_docs)] -#![deny(warnings)] +// #![deny(warnings)] #![deny(unused_import_braces)] #![deny(unused_results)] #![deny(trivial_casts)] diff --git a/src/with_alloc/alloc_ringbuffer.rs b/src/with_alloc/alloc_ringbuffer.rs index f21769e..bed457c 100644 --- a/src/with_alloc/alloc_ringbuffer.rs +++ b/src/with_alloc/alloc_ringbuffer.rs @@ -50,6 +50,107 @@ pub struct AllocRingBuffer { writeptr: usize, } +#[cfg(feature = "serde")] +impl serde::Serialize for AllocRingBuffer +where + T: serde::Serialize, +{ + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + // Similar to Vec's implementation, we only serialize the actual elements + // Create an iterator over the valid elements + let len = if self.writeptr >= self.readptr { + self.writeptr - self.readptr + } else { + self.size - self.readptr + self.writeptr + }; + + // Use serialize_seq like Vec does + use serde::ser::SerializeSeq; + let mut seq = serializer.serialize_seq(Some(len))?; + + // If data is contiguous + if self.writeptr >= self.readptr { + for i in self.readptr..self.writeptr { + // SAFETY: We know the indices are valid and the data is initialized + unsafe { + seq.serialize_element(&*self.buf.add(i))?; + } + } + } else { + // Handle wrapped data - first from readptr to end + for i in self.readptr..self.size { + unsafe { + seq.serialize_element(&*self.buf.add(i))?; + } + } + // Then from start to writeptr + for i in 0..self.writeptr { + unsafe { + seq.serialize_element(&*self.buf.add(i))?; + } + } + } + + seq.end() + } +} + +#[cfg(feature = "serde")] +impl<'de, T> serde::Deserialize<'de> for AllocRingBuffer +where + T: serde::Deserialize<'de>, +{ + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + // Use the same visitor pattern as Vec + struct AllocRingBufferVisitor(core::marker::PhantomData); + + impl<'de, T> serde::de::Visitor<'de> for AllocRingBufferVisitor + where + T: serde::Deserialize<'de>, + { + type Value = AllocRingBuffer; + + fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { + formatter.write_str("a sequence") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: serde::de::SeqAccess<'de>, + { + // Collect elements. + let mut elements = alloc::vec::Vec::new(); + while let Some(element) = seq.next_element()? { + elements.push(element); + } + + // Now create a properly sized AllocRingBuffer + let mut ringbuffer = AllocRingBuffer::new(elements.len()); + + // Copy elements into the ringbuffer + unsafe { + ptr::copy_nonoverlapping(elements.as_ptr(), ringbuffer.buf, elements.len()); + } + + // Update the writeptr to reflect the number of elements + ringbuffer.writeptr = elements.len(); + ringbuffer.readptr = 0; + + Ok(ringbuffer) + } + } + + // Use seq deserializer like Vec does + deserializer.deserialize_seq(AllocRingBufferVisitor(core::marker::PhantomData)) + } +} + // SAFETY: all methods that require mutable access take &mut, // being send and sync was the old behavior but broke when we switched to *mut T. unsafe impl Sync for AllocRingBuffer {} @@ -474,4 +575,14 @@ mod tests { assert_eq!(buf.capacity, 4); assert_eq!(buf.to_vec(), alloc::vec![1, 2, 3, 4]); } + + #[cfg(feature = "serde")] + #[test] + fn serde() { + let a: &[i32] = &[1, 2, 3]; + let b = AllocRingBuffer::::from(a); + let c = serde_json::to_string(&b).unwrap(); + let d = serde_json::from_str(&c).unwrap(); + assert_eq!(b, d); + } } diff --git a/src/with_alloc/vecdeque.rs b/src/with_alloc/vecdeque.rs index 978bbe5..0acdd52 100644 --- a/src/with_alloc/vecdeque.rs +++ b/src/with_alloc/vecdeque.rs @@ -8,6 +8,7 @@ use core::ops::{Deref, DerefMut, Index, IndexMut}; /// /// The reason this is a wrapper, is that we want `RingBuffers` to implement `Index`, /// which we cannot do for remote types like `VecDeque` +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Clone, PartialEq, Eq)] pub struct GrowableAllocRingBuffer(VecDeque); diff --git a/src/with_const_generics.rs b/src/with_const_generics.rs index fc9857d..7884750 100644 --- a/src/with_const_generics.rs +++ b/src/with_const_generics.rs @@ -1,10 +1,12 @@ use crate::ringbuffer_trait::{RingBufferIntoIterator, RingBufferIterator, RingBufferMutIterator}; use crate::RingBuffer; use core::iter::FromIterator; -use core::mem::MaybeUninit; -use core::mem::{self, ManuallyDrop}; +use core::mem::{self, ManuallyDrop, MaybeUninit}; use core::ops::{Index, IndexMut}; +#[cfg(feature = "serde")] +use serde::{de::MapAccess, ser::SerializeStruct}; + /// The `ConstGenericRingBuffer` struct is a `RingBuffer` implementation which does not require `alloc` but /// uses const generics instead. /// @@ -40,6 +42,145 @@ pub struct ConstGenericRingBuffer { writeptr: usize, } +#[cfg(feature = "serde")] +impl serde::Serialize for ConstGenericRingBuffer +where + T: serde::Serialize, +{ + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + // Create a temporary Vec to store the valid elements + let mut elements = alloc::vec::Vec::with_capacity(CAP); + + // Handle the case where the buffer might be empty + if self.readptr != self.writeptr { + let mut read_idx = self.readptr; + + // If writeptr > readptr, elements are contiguous + if self.writeptr > self.readptr { + for idx in self.readptr..self.writeptr { + unsafe { + elements.push(&*self.buf[idx].as_ptr()); + } + } + } else { + // Handle wrapped around case + // First read from readptr to end + while read_idx < CAP { + unsafe { + elements.push(&*self.buf[read_idx].as_ptr()); + } + read_idx += 1; + } + // Then from start to writeptr + read_idx = 0; + while read_idx < self.writeptr { + unsafe { + elements.push(&*self.buf[read_idx].as_ptr()); + } + read_idx += 1; + } + } + } + + // Serialize the elements along with the buffer metadata + let mut state = serializer.serialize_struct("ConstGenericRingBuffer", 3)?; + state.serialize_field("elements", &elements)?; + state.serialize_field("readptr", &self.readptr)?; + state.serialize_field("writeptr", &self.writeptr)?; + state.end() + } +} +#[cfg(feature = "serde")] +impl<'de, T, const CAP: usize> serde::Deserialize<'de> for ConstGenericRingBuffer +where + T: serde::Deserialize<'de>, +{ + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct RingBufferVisitor(core::marker::PhantomData); + + impl<'de, T, const CAP: usize> serde::de::Visitor<'de> for RingBufferVisitor + where + T: serde::Deserialize<'de>, + { + type Value = ConstGenericRingBuffer; + + fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { + formatter.write_str("struct ConstGenericRingBuffer") + } + + fn visit_map(self, mut map: V) -> Result + where + V: MapAccess<'de>, + { + let mut elements: Option> = None; + let mut readptr: Option = None; + let mut writeptr: Option = None; + + while let Some(key) = map.next_key()? { + match key { + "elements" => { + if elements.is_some() { + return Err(serde::de::Error::duplicate_field("elements")); + } + elements = Some(map.next_value()?); + } + "readptr" => { + if readptr.is_some() { + return Err(serde::de::Error::duplicate_field("readptr")); + } + readptr = Some(map.next_value()?); + } + "writeptr" => { + if writeptr.is_some() { + return Err(serde::de::Error::duplicate_field("writeptr")); + } + writeptr = Some(map.next_value()?); + } + _ => { + return Err(serde::de::Error::unknown_field( + key, + &["elements", "readptr", "writeptr"], + )) + } + } + } + + let elements = + elements.ok_or_else(|| serde::de::Error::missing_field("elements"))?; + let readptr = readptr.ok_or_else(|| serde::de::Error::missing_field("readptr"))?; + let writeptr = + writeptr.ok_or_else(|| serde::de::Error::missing_field("writeptr"))?; + + // Create a new ring buffer with uninitialized memory + let mut buf: [MaybeUninit; CAP] = unsafe { MaybeUninit::uninit().assume_init() }; + + // Initialize elements in the buffer + for (idx, element) in elements.into_iter().enumerate() { + buf[idx] = MaybeUninit::new(element); + } + + Ok(ConstGenericRingBuffer { + buf, + readptr, + writeptr, + }) + } + } + + deserializer.deserialize_struct( + "ConstGenericRingBuffer", + &["elements", "readptr", "writeptr"], + RingBufferVisitor(core::marker::PhantomData), + ) + } +} + impl From<[T; CAP]> for ConstGenericRingBuffer { fn from(value: [T; CAP]) -> Self { let v = ManuallyDrop::new(value); @@ -496,4 +637,19 @@ mod tests { vec![1, 2, 3] ); } + + #[cfg(feature = "serde")] + #[test] + fn serde() { + let a: &[i32] = &[]; + let b = ConstGenericRingBuffer::::from(a); + let c = serde_json::to_string(&b).unwrap(); + let d = serde_json::from_str(&c).unwrap(); + assert_eq!(b, d); + let a: &[i32] = &[1, 2, 3]; + let b = ConstGenericRingBuffer::::from(a); + let c = serde_json::to_string(&b).unwrap(); + let d = serde_json::from_str(&c).unwrap(); + assert_eq!(b, d); + } }