Skip to content

Commit

Permalink
token_serializer: introduce TokenWriter
Browse files Browse the repository at this point in the history
This simplifies token serialization

Signed-off-by: William Casarin <[email protected]>
  • Loading branch information
jb55 committed Jan 5, 2025
1 parent 6a76e67 commit 47d1e8b
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 9 deletions.
2 changes: 1 addition & 1 deletion crates/notedeck_columns/src/storage/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ mod token_parser;
pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE};
pub use migration::{deserialize_columns, COLUMNS_FILE};

pub use token_parser::{ParseError, TokenParser, TokenSerializable};
pub use token_parser::{ParseError, TokenParser, TokenSerializable, TokenWriter};
38 changes: 37 additions & 1 deletion crates/notedeck_columns/src/storage/token_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,42 @@ pub enum ParseError<'a> {
EOF,
}

pub struct TokenWriter {
delim: &'static str,
tokens_written: usize,
buf: Vec<u8>,
}

impl Default for TokenWriter {
fn default() -> Self {
Self::new(":")
}
}

impl TokenWriter {
pub fn new(delim: &'static str) -> Self {
let buf = vec![];
let tokens_written = 0;
Self {
buf,
tokens_written,
delim,
}
}

pub fn write_token(&mut self, token: &str) {
if self.tokens_written > 0 {
self.buf.extend_from_slice(self.delim.as_bytes())
}
self.buf.extend_from_slice(token.as_bytes());
self.tokens_written += 1;
}

pub fn buffer(&self) -> &[u8] {
&self.buf
}
}

#[derive(Clone)]
pub struct TokenParser<'a> {
tokens: &'a [&'a str],
Expand Down Expand Up @@ -146,7 +182,7 @@ pub trait TokenSerializable: Sized {
/// Return a list of serialization plans for a type. We do this for
/// type safety and assume constructing these types are lightweight
fn parse<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>>;
fn serialize(&self, write_token: fn(&str) -> Result<(), std::io::Error>) -> Result<(), std::io::Error>;
fn serialize(&self, writer: &mut TokenWriter);
}

#[cfg(test)]
Expand Down
10 changes: 3 additions & 7 deletions crates/notedeck_columns/src/ui/add_column.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use nostrdb::{Ndb, Transaction};
use crate::{
login_manager::AcquireKeyState,
route::Route,
storage::{ParseError, TokenParser, TokenSerializable},
storage::{ParseError, TokenParser, TokenSerializable, TokenWriter},
timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind},
ui::anim::ICON_EXPANSION_MULTIPLE,
Damus,
Expand Down Expand Up @@ -111,14 +111,10 @@ impl AddColumnRoute {
}

impl TokenSerializable for AddColumnRoute {
fn serialize(
&self,
write_token: fn(&str) -> Result<(), std::io::Error>,
) -> Result<(), std::io::Error> {
fn serialize(&self, writer: &mut TokenWriter) {
for token in self.tokens() {
write_token(token)?;
writer.write_token(token);
}
Ok(())
}

fn parse<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
Expand Down

0 comments on commit 47d1e8b

Please sign in to comment.