From 646e6725b4ec42af7a6b6bfe7d37fded7b8ac568 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 26 Dec 2024 09:38:56 -0800 Subject: [PATCH 1/6] algos: introduce last_n_per_pubkey_from_tags This function creates filters for the base our first algo in Damus: Called "last N note per pubkey". I don't have a better name for it. This function generates a query in the form: [ {"authors": ["author_a"], "limit": 1, "kinds": [1] , {"authors": ["author_b"], "limit": 1, "kinds": [1] , {"authors": ["author_c"], "limit": 1, "kinds": [1] , {"authors": ["author_c"], "limit": 1, "kinds": [1] ... ] Due to an unfortunate restriction currently in nostrdb and strfry, we can only do about 16 to 20 of these at any given time. I have made this limit configurable in strfry[1]. I just need to do the same in nostrdb now. [1] https://github.com/hoytech/strfry/pull/133 Changelog-Added: Add last_n_per_pubkey_from_tags algo function --- crates/notedeck/src/filter.rs | 54 +++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/crates/notedeck/src/filter.rs b/crates/notedeck/src/filter.rs index 214a8700..bca3e234 100644 --- a/crates/notedeck/src/filter.rs +++ b/crates/notedeck/src/filter.rs @@ -190,6 +190,60 @@ impl FilteredTags { } } +/// Create a "last N notes per pubkey" query. +pub fn last_n_per_pubkey_from_tags( + note: &Note, + kind: u64, + notes_per_pubkey: u64, +) -> Result> { + let mut filters: Vec = vec![]; + + for tag in note.tags() { + // TODO: fix arbitrary MAX_FILTER limit in nostrdb + if filters.len() == 15 { + break; + } + + if tag.count() < 2 { + continue; + } + + let t = if let Some(t) = tag.get_unchecked(0).variant().str() { + t + } else { + continue; + }; + + if t == "p" { + let author = if let Some(author) = tag.get_unchecked(1).variant().id() { + author + } else { + continue; + }; + + let mut filter = Filter::new(); + filter.start_authors_field()?; + filter.add_id_element(author)?; + filter.end_field(); + filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build()); + } else if t == "t" { + let hashtag = if let Some(hashtag) = tag.get_unchecked(1).variant().str() { + hashtag + } else { + continue; + }; + + let mut filter = Filter::new(); + filter.start_tags_field('t')?; + filter.add_str_element(hashtag)?; + filter.end_field(); + filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build()); + } + } + + Ok(filters) +} + /// Create a filter from tags. This can be used to create a filter /// from a contact list pub fn filter_from_tags(note: &Note, add_pubkey: Option<&[u8; 32]>) -> Result { From d57052176bba8f5676e479a9e0389085c2304e68 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 25 Dec 2024 19:06:04 -0800 Subject: [PATCH 2/6] wip algo timelines Signed-off-by: William Casarin --- crates/notedeck_columns/src/route.rs | 6 +- crates/notedeck_columns/src/storage/decks.rs | 452 +++++++++++++------ crates/notedeck_columns/src/timeline/kind.rs | 83 +++- crates/notedeck_columns/src/timeline/mod.rs | 31 +- crates/notedeck_columns/src/ui/add_column.rs | 139 +++++- 5 files changed, 554 insertions(+), 157 deletions(-) diff --git a/crates/notedeck_columns/src/route.rs b/crates/notedeck_columns/src/route.rs index 647aa66b..3f06dc44 100644 --- a/crates/notedeck_columns/src/route.rs +++ b/crates/notedeck_columns/src/route.rs @@ -5,7 +5,7 @@ use crate::{ accounts::AccountsRoute, column::Columns, timeline::{kind::ColumnTitle, TimelineId, TimelineRoute}, - ui::add_column::AddColumnRoute, + ui::add_column::{AddAlgoRoute, AddColumnRoute}, }; /// App routing. These describe different places you can go inside Notedeck. @@ -88,6 +88,10 @@ impl Route { Route::ComposeNote => ColumnTitle::simple("Compose Note"), Route::AddColumn(c) => match c { AddColumnRoute::Base => ColumnTitle::simple("Add Column"), + AddColumnRoute::Algo(r) => match r { + AddAlgoRoute::Base => ColumnTitle::simple("Add Algo Column"), + AddAlgoRoute::LastPerPubkey => ColumnTitle::simple("Add Last Notes Column"), + }, AddColumnRoute::UndecidedNotification => { ColumnTitle::simple("Add Notifications Column") } diff --git a/crates/notedeck_columns/src/storage/decks.rs b/crates/notedeck_columns/src/storage/decks.rs index 3c4b9c2b..8bd1389b 100644 --- a/crates/notedeck_columns/src/storage/decks.rs +++ b/crates/notedeck_columns/src/storage/decks.rs @@ -3,6 +3,8 @@ use std::{collections::HashMap, fmt, str::FromStr}; use enostr::{NoteId, Pubkey}; use nostrdb::Ndb; use serde::{Deserialize, Serialize}; +use strum::IntoEnumIterator; +use strum_macros::EnumIter; use tracing::{error, info}; use crate::{ @@ -10,8 +12,8 @@ use crate::{ column::{Columns, IntermediaryRoute}, decks::{Deck, Decks, DecksCache}, route::Route, - timeline::{kind::ListKind, PubkeySource, TimelineKind, TimelineRoute}, - ui::add_column::AddColumnRoute, + timeline::{kind::ListKind, AlgoTimeline, PubkeySource, TimelineKind, TimelineRoute}, + ui::add_column::{AddAlgoRoute, AddColumnRoute}, Error, }; @@ -299,7 +301,7 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec Result { + Ok(parse_selection(serialized)) + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] +enum AlgoKeyword { + LastPerPubkey, +} + +impl AlgoKeyword { + #[inline] + pub fn name(&self) -> &'static str { + match self { + AlgoKeyword::LastPerPubkey => "last_per_pubkey", + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] +enum ListKeyword { Contact, +} + +impl ListKeyword { + #[inline] + pub fn name(&self) -> &'static str { + match self { + ListKeyword::Contact => "contact", + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] +enum PubkeySourceKeyword { Explicit, DeckAuthor, +} + +impl PubkeySourceKeyword { + #[inline] + pub fn name(&self) -> &'static str { + match self { + PubkeySourceKeyword::Explicit => "explicit", + PubkeySourceKeyword::DeckAuthor => "deck_author", + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] +enum Keyword { + Notifs, + Universe, Profile, Hashtag, Generic, @@ -350,6 +408,7 @@ enum Keyword { Relay, Compose, Column, + AlgoSelection, NotificationSelection, ExternalNotifSelection, HashtagSelection, @@ -361,60 +420,104 @@ enum Keyword { } impl Keyword { - const MAPPING: &'static [(&'static str, Keyword, bool)] = &[ - ("notifs", Keyword::Notifs, false), - ("universe", Keyword::Universe, false), - ("contact", Keyword::Contact, false), - ("explicit", Keyword::Explicit, true), - ("deck_author", Keyword::DeckAuthor, false), - ("profile", Keyword::Profile, false), - ("hashtag", Keyword::Hashtag, true), - ("generic", Keyword::Generic, false), - ("thread", Keyword::Thread, true), - ("reply", Keyword::Reply, true), - ("quote", Keyword::Quote, true), - ("account", Keyword::Account, false), - ("show", Keyword::Show, false), - ("new", Keyword::New, false), - ("relay", Keyword::Relay, false), - ("compose", Keyword::Compose, false), - ("column", Keyword::Column, false), - ( - "notification_selection", - Keyword::NotificationSelection, - false, - ), - ( - "external_notif_selection", - Keyword::ExternalNotifSelection, - false, - ), - ("hashtag_selection", Keyword::HashtagSelection, false), - ("support", Keyword::Support, false), - ("deck", Keyword::Deck, false), - ("edit", Keyword::Edit, true), - ]; - - fn has_payload(&self) -> bool { - Keyword::MAPPING - .iter() - .find(|(_, keyword, _)| keyword == self) - .map(|(_, _, has_payload)| *has_payload) - .unwrap_or(false) + fn name(&self) -> &'static str { + match self { + Keyword::Notifs => "notifs", + Keyword::Universe => "universe", + Keyword::Profile => "profile", + Keyword::Hashtag => "hashtag", + Keyword::Generic => "generic", + Keyword::Thread => "thread", + Keyword::Reply => "reply", + Keyword::Quote => "quote", + Keyword::Account => "account", + Keyword::Show => "show", + Keyword::New => "new", + Keyword::Relay => "relay", + Keyword::Compose => "compose", + Keyword::Column => "column", + Keyword::AlgoSelection => "algo_selection", + Keyword::NotificationSelection => "notification_selection", + Keyword::ExternalNotifSelection => "external_notif_selection", + Keyword::IndividualSelection => "individual_selection", + Keyword::ExternalIndividualSelection => "external_individual_selection", + Keyword::HashtagSelection => "hashtag_selection", + Keyword::Support => "support", + Keyword::Deck => "deck", + Keyword::Edit => "edit", + } } } impl fmt::Display for Keyword { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(name) = Keyword::MAPPING - .iter() - .find(|(_, keyword, _)| keyword == self) - .map(|(name, _, _)| *name) - { - write!(f, "{}", name) - } else { - write!(f, "UnknownKeyword") + write!(f, "{}", self.name()) + } +} + +impl fmt::Display for AlgoKeyword { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl fmt::Display for ListKeyword { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl FromStr for PubkeySourceKeyword { + type Err = Error; + + fn from_str(serialized: &str) -> Result { + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } + } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) + } +} + +impl FromStr for ListKeyword { + type Err = Error; + + fn from_str(serialized: &str) -> Result { + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } + } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) + } +} + +impl fmt::Display for PubkeySourceKeyword { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl FromStr for AlgoKeyword { + type Err = Error; + + fn from_str(serialized: &str) -> Result { + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) } } @@ -422,13 +525,15 @@ impl FromStr for Keyword { type Err = Error; fn from_str(serialized: &str) -> Result { - Keyword::MAPPING - .iter() - .find(|(name, _, _)| *name == serialized) - .map(|(_, keyword, _)| keyword.clone()) - .ok_or(Error::Generic( - "Could not convert string to Keyword enum".to_owned(), - )) + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } + } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) } } @@ -458,10 +563,19 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option { match &timeline.kind { TimelineKind::List(list_kind) => match list_kind { ListKind::Contact(pubkey_source) => { - selections.push(Selection::Keyword(Keyword::Contact)); + selections.push(Selection::List(ListKeyword::Contact)); selections.extend(generate_pubkey_selections(pubkey_source)); } }, + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => { + match list_kind { + ListKind::Contact(pk_src) => { + selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey)); + selections.push(Selection::List(ListKeyword::Contact)); + selections.extend(generate_pubkey_selections(pk_src)); + } + } + } TimelineKind::Notifications(pubkey_source) => { selections.push(Selection::Keyword(Keyword::Notifs)); selections.extend(generate_pubkey_selections(pubkey_source)); @@ -493,7 +607,7 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option { } TimelineRoute::Profile(pubkey) => { selections.push(Selection::Keyword(Keyword::Profile)); - selections.push(Selection::Keyword(Keyword::Explicit)); + selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit)); selections.push(Selection::Payload(pubkey.hex())); } TimelineRoute::Reply(note_id) => { @@ -518,6 +632,16 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option { selections.push(Selection::Keyword(Keyword::Column)); match add_column_route { AddColumnRoute::Base => (), + AddColumnRoute::Algo(algo_route) => match algo_route { + AddAlgoRoute::Base => { + selections.push(Selection::Keyword(Keyword::AlgoSelection)) + } + + AddAlgoRoute::LastPerPubkey => { + selections.push(Selection::Keyword(Keyword::AlgoSelection)); + selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey)); + } + }, AddColumnRoute::UndecidedNotification => { selections.push(Selection::Keyword(Keyword::NotificationSelection)) } @@ -569,109 +693,149 @@ fn generate_pubkey_selections(source: &PubkeySource) -> Vec { let mut selections = Vec::new(); match source { PubkeySource::Explicit(pubkey) => { - selections.push(Selection::Keyword(Keyword::Explicit)); + selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit)); selections.push(Selection::Payload(pubkey.hex())); } PubkeySource::DeckAuthor => { - selections.push(Selection::Keyword(Keyword::DeckAuthor)); + selections.push(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor)); } } selections } +/// Parses a selection +fn parse_selection(token: &str) -> Selection { + AlgoKeyword::from_str(token) + .map(Selection::Algo) + .or_else(|_| ListKeyword::from_str(token).map(Selection::List)) + .or_else(|_| PubkeySourceKeyword::from_str(token).map(Selection::PubkeySource)) + .or_else(|_| Keyword::from_str(token).map(Selection::Keyword)) + .unwrap_or_else(|_| Selection::Payload(token.to_owned())) +} + impl Selection { - fn from_serialized(serialized: &str) -> Vec { + fn from_serialized(buffer: &str) -> Vec { let mut selections = Vec::new(); let seperator = ":"; + let sep_len = seperator.len(); + let mut pos = 0; - let mut serialized_copy = serialized.to_string(); - let mut buffer = serialized_copy.as_mut(); + while let Some(offset) = buffer[pos..].find(seperator) { + selections.push(parse_selection(&buffer[pos..pos + offset])); + pos = pos + offset + sep_len; + } - let mut next_is_payload = false; - while let Some(index) = buffer.find(seperator) { - if let Ok(keyword) = Keyword::from_str(&buffer[..index]) { - selections.push(Selection::Keyword(keyword.clone())); - if keyword.has_payload() { - next_is_payload = true; - } - } + selections.push(parse_selection(&buffer[pos..])); - buffer = &mut buffer[index + seperator.len()..]; + selections + } +} + +/// Parse an explicit:abdef... or deck_author from a Selection token stream. +/// +/// Also handle the case where there is nothing. We assume this means deck_author. +fn parse_pubkey_src_selection(tokens: &[Selection]) -> Option { + match tokens.first() { + // we handle bare payloads and assume they are explicit pubkey sources + Some(Selection::Payload(hex)) => { + let pk = Pubkey::from_hex(hex.as_str()).ok()?; + Some(PubkeySource::Explicit(pk)) + } + + Some(Selection::PubkeySource(PubkeySourceKeyword::Explicit)) => { + if let Selection::Payload(hex) = tokens.get(1)? { + let pk = Pubkey::from_hex(hex.as_str()).ok()?; + Some(PubkeySource::Explicit(pk)) + } else { + None + } } - if next_is_payload { - selections.push(Selection::Payload(buffer.to_string())); - } else if let Ok(keyword) = Keyword::from_str(buffer) { - selections.push(Selection::Keyword(keyword.clone())); + None | Some(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor)) => { + Some(PubkeySource::DeckAuthor) } - selections + Some(Selection::Keyword(_kw)) => None, + Some(Selection::Algo(_kw)) => None, + Some(Selection::List(_kw)) => None, } } -fn selections_to_route(selections: Vec) -> Option { +/// Parse ListKinds from Selections +fn parse_list_kind_selections(tokens: &[Selection]) -> Option { + // only list selections are valid in this position + let list_kw = if let Selection::List(list_kw) = tokens.first()? { + list_kw + } else { + return None; + }; + + let pubkey_src = parse_pubkey_src_selection(&tokens[1..])?; + + Some(match list_kw { + ListKeyword::Contact => ListKind::contact_list(pubkey_src), + }) +} + +fn selections_to_route(selections: &[Selection]) -> Option { match selections.first()? { - Selection::Keyword(Keyword::Contact) => match selections.get(1)? { - Selection::Keyword(Keyword::Explicit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::contact_list(PubkeySource::Explicit( - Pubkey::from_hex(hex.as_str()).ok()?, - )), - )) - } else { - None + Selection::Keyword(Keyword::AlgoSelection) => { + let r = match selections.get(1) { + None => AddColumnRoute::Algo(AddAlgoRoute::Base), + Some(Selection::Algo(algo_kw)) => match algo_kw { + AlgoKeyword::LastPerPubkey => AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey), + }, + // other keywords are invalid here + Some(_) => { + return None; } - } - Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::contact_list(PubkeySource::DeckAuthor), - )), - _ => None, - }, - Selection::Keyword(Keyword::Notifs) => match selections.get(1)? { - Selection::Keyword(Keyword::Explicit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::notifications(PubkeySource::Explicit( - Pubkey::from_hex(hex.as_str()).ok()?, - )), - )) - } else { - None - } - } - Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::notifications(PubkeySource::DeckAuthor), - )), - _ => None, - }, - Selection::Keyword(Keyword::Profile) => match selections.get(1)? { - Selection::Keyword(Keyword::Explicit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile( - PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?), - ))) - } else { - None - } - } - Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::profile(PubkeySource::DeckAuthor), - )), - Selection::Keyword(Keyword::Edit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToRoute(Route::EditProfile( - Pubkey::from_hex(hex.as_str()).ok()?, - ))) - } else { - None + }; + + Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(r))) + } + + // Algorithm timelines + Selection::Algo(algo_kw) => { + let timeline_kind = match algo_kw { + AlgoKeyword::LastPerPubkey => { + let list_kind = parse_list_kind_selections(&selections[1..])?; + TimelineKind::last_per_pubkey(list_kind) } - } - _ => None, - }, + }; + + Some(CleanIntermediaryRoute::ToTimeline(timeline_kind)) + } + + // We never have PubkeySource keywords at the top level + Selection::PubkeySource(_pk_src) => None, + + Selection::List(ListKeyword::Contact) => { + // only pubkey/src is allowed in this position + let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; + Some(CleanIntermediaryRoute::ToTimeline( + TimelineKind::contact_list(pubkey_src), + )) + } + + Selection::Keyword(Keyword::Notifs) => { + let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; + Some(CleanIntermediaryRoute::ToTimeline( + TimelineKind::notifications(pubkey_src), + )) + } + + Selection::Keyword(Keyword::Profile) => { + // we only expect PubkeySource in this position + let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; + Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile( + pubkey_src, + ))) + } + Selection::Keyword(Keyword::Universe) => { Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Universe)) } + Selection::Keyword(Keyword::Hashtag) => { if let Selection::Payload(hashtag) = selections.get(1)? { Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Hashtag( @@ -681,9 +845,11 @@ fn selections_to_route(selections: Vec) -> Option { Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Generic)) } + Selection::Keyword(Keyword::Thread) => { if let Selection::Payload(hex) = selections.get(1)? { Some(CleanIntermediaryRoute::ToRoute(Route::thread( @@ -693,6 +859,7 @@ fn selections_to_route(selections: Vec) -> Option { if let Selection::Payload(hex) = selections.get(1)? { Some(CleanIntermediaryRoute::ToRoute(Route::reply( @@ -770,9 +937,7 @@ fn selections_to_route(selections: Vec) -> Option None, }, Selection::Payload(_) - | Selection::Keyword(Keyword::Explicit) | Selection::Keyword(Keyword::New) - | Selection::Keyword(Keyword::DeckAuthor) | Selection::Keyword(Keyword::Show) | Selection::Keyword(Keyword::NotificationSelection) | Selection::Keyword(Keyword::ExternalNotifSelection) @@ -788,6 +953,9 @@ impl fmt::Display for Selection { match self { Selection::Keyword(keyword) => write!(f, "{}", keyword), Selection::Payload(payload) => write!(f, "{}", payload), + Selection::Algo(algo_kw) => write!(f, "{}", algo_kw), + Selection::List(list_kw) => write!(f, "{}", list_kw), + Selection::PubkeySource(pk_src_kw) => write!(f, "{}", pk_src_kw), } } } diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index c6d072cc..3d219140 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -35,6 +35,10 @@ impl PubkeySource { } impl ListKind { + pub fn contact_list(pk_src: PubkeySource) -> Self { + ListKind::Contact(pk_src) + } + pub fn pubkey_source(&self) -> Option<&PubkeySource> { match self { ListKind::Contact(pk_src) => Some(pk_src), @@ -54,6 +58,9 @@ impl ListKind { pub enum TimelineKind { List(ListKind), + /// The last not per pubkey + Algo(AlgoTimeline), + Notifications(PubkeySource), Profile(PubkeySource), @@ -69,10 +76,19 @@ pub enum TimelineKind { Hashtag(String), } +/// Hardcoded algo timelines +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum AlgoTimeline { + /// LastPerPubkey: a special nostr query that fetches the last N + /// notes for each pubkey on the list + LastPerPubkey(ListKind), +} + impl Display for TimelineKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { TimelineKind::List(ListKind::Contact(_src)) => f.write_str("Contacts"), + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_lk)) => f.write_str("Last Notes"), TimelineKind::Generic => f.write_str("Timeline"), TimelineKind::Notifications(_) => f.write_str("Notifications"), TimelineKind::Profile(_) => f.write_str("Profile"), @@ -87,6 +103,7 @@ impl TimelineKind { pub fn pubkey_source(&self) -> Option<&PubkeySource> { match self { TimelineKind::List(list_kind) => list_kind.pubkey_source(), + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => list_kind.pubkey_source(), TimelineKind::Notifications(pk_src) => Some(pk_src), TimelineKind::Profile(pk_src) => Some(pk_src), TimelineKind::Universe => None, @@ -96,8 +113,27 @@ impl TimelineKind { } } + /// Some feeds are not realtime, like certain algo feeds + pub fn should_subscribe_locally(&self) -> bool { + match self { + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_list_kind)) => false, + + TimelineKind::List(_list_kind) => true, + TimelineKind::Notifications(_pk_src) => true, + TimelineKind::Profile(_pk_src) => true, + TimelineKind::Universe => true, + TimelineKind::Generic => true, + TimelineKind::Hashtag(_ht) => true, + TimelineKind::Thread(_ht) => true, + } + } + + pub fn last_per_pubkey(list_kind: ListKind) -> Self { + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) + } + pub fn contact_list(pk: PubkeySource) -> Self { - TimelineKind::List(ListKind::Contact(pk)) + TimelineKind::List(ListKind::contact_list(pk)) } pub fn is_contacts(&self) -> bool { @@ -138,6 +174,48 @@ impl TimelineKind { None } + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(pk_src))) => { + let pk = match &pk_src { + PubkeySource::DeckAuthor => default_user?, + PubkeySource::Explicit(pk) => pk.bytes(), + }; + + let contact_filter = Filter::new().authors([pk]).kinds([3]).limit(1).build(); + + let txn = Transaction::new(ndb).expect("txn"); + let results = ndb + .query(&txn, &[contact_filter.clone()], 1) + .expect("contact query failed?"); + + let kind_fn = TimelineKind::last_per_pubkey; + let tabs = TimelineTab::only_notes_and_replies(); + + if results.is_empty() { + return Some(Timeline::new( + kind_fn(ListKind::contact_list(pk_src)), + FilterState::needs_remote(vec![contact_filter.clone()]), + tabs, + )); + } + + let list_kind = ListKind::contact_list(pk_src); + + match Timeline::last_per_pubkey(&results[0].note, &list_kind) { + Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => { + Some(Timeline::new( + kind_fn(list_kind), + FilterState::needs_remote(vec![contact_filter]), + tabs, + )) + } + Err(e) => { + error!("Unexpected error: {e}"); + None + } + Ok(tl) => Some(tl), + } + } + TimelineKind::Profile(pk_src) => { let pk = match &pk_src { PubkeySource::DeckAuthor => default_user?, @@ -222,6 +300,9 @@ impl TimelineKind { TimelineKind::List(list_kind) => match list_kind { ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts"), }, + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => match list_kind { + ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts (last notes)"), + }, TimelineKind::Notifications(_pubkey_source) => ColumnTitle::simple("Notifications"), TimelineKind::Profile(_pubkey_source) => ColumnTitle::needs_db(self), TimelineKind::Thread(_root_id) => ColumnTitle::simple("Thread"), diff --git a/crates/notedeck_columns/src/timeline/mod.rs b/crates/notedeck_columns/src/timeline/mod.rs index 235684d2..53f7217a 100644 --- a/crates/notedeck_columns/src/timeline/mod.rs +++ b/crates/notedeck_columns/src/timeline/mod.rs @@ -4,6 +4,7 @@ use crate::{ error::Error, subscriptions::{self, SubKind, Subscriptions}, thread::Thread, + timeline::kind::ListKind, Result, }; @@ -29,7 +30,7 @@ pub mod kind; pub mod route; pub use cache::{TimelineCache, TimelineCacheKey}; -pub use kind::{ColumnTitle, PubkeySource, TimelineKind}; +pub use kind::{AlgoTimeline, ColumnTitle, PubkeySource, TimelineKind}; pub use route::TimelineRoute; #[derive(Debug, Hash, Copy, Clone, Eq, PartialEq)] @@ -225,6 +226,18 @@ impl Timeline { ) } + pub fn last_per_pubkey(list: &Note, list_kind: &ListKind) -> Result { + let kind = 1; + let notes_per_pk = 1; + let filter = filter::last_n_per_pubkey_from_tags(list, kind, notes_per_pk)?; + + Ok(Timeline::new( + TimelineKind::last_per_pubkey(list_kind.clone()), + FilterState::ready(filter), + TimelineTab::only_notes_and_replies(), + )) + } + pub fn hashtag(hashtag: String) -> Self { let filter = Filter::new() .kinds([1]) @@ -395,6 +408,11 @@ impl Timeline { note_cache: &mut NoteCache, reversed: bool, ) -> Result<()> { + if !self.kind.should_subscribe_locally() { + // don't need to poll for timelines that don't have local subscriptions + return Ok(()); + } + let sub = self .subscription .ok_or(Error::App(notedeck::Error::no_active_sub()))?; @@ -599,13 +617,20 @@ fn setup_initial_timeline( note_cache: &mut NoteCache, filters: &[Filter], ) -> Result<()> { - timeline.subscription = Some(ndb.subscribe(filters)?); + // some timelines are one-shot and a refreshed, like last_per_pubkey algo feed + if timeline.kind.should_subscribe_locally() { + timeline.subscription = Some(ndb.subscribe(filters)?); + } let txn = Transaction::new(ndb)?; debug!( "querying nostrdb sub {:?} {:?}", timeline.subscription, timeline.filter ); - let lim = filters[0].limit().unwrap_or(filter::default_limit()) as i32; + + let mut lim = 0i32; + for filter in filters { + lim += filter.limit().unwrap_or(1) as i32; + } let notes: Vec = ndb .query(&txn, filters, lim)? diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index 6a380000..39a5590f 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -10,7 +10,8 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, - timeline::{PubkeySource, Timeline, TimelineKind}, + route::Route, + timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, }; @@ -24,22 +25,35 @@ pub enum AddColumnResponse { UndecidedNotification, ExternalNotification, Hashtag, + Algo(AlgoOption), UndecidedIndividual, ExternalIndividual, } pub enum NotificationColumnType { - Home, + Contacts, External, } +#[derive(Clone, Debug)] +pub enum Decision { + Undecided, + Decided(T), +} + +#[derive(Clone, Debug)] +pub enum AlgoOption { + LastPerPubkey(Decision), +} + #[derive(Clone, Debug)] enum AddColumnOption { Universe, UndecidedNotification, ExternalNotification, + Algo(AlgoOption), Notification(PubkeySource), - Home(PubkeySource), + Contacts(PubkeySource), UndecidedHashtag, Hashtag(String), UndecidedIndividual, @@ -47,12 +61,19 @@ enum AddColumnOption { Individual(PubkeySource), } +#[derive(Clone, Copy, Eq, PartialEq, Debug)] +pub enum AddAlgoRoute { + Base, + LastPerPubkey, +} + #[derive(Clone, Copy, Eq, PartialEq, Debug)] pub enum AddColumnRoute { Base, UndecidedNotification, ExternalNotification, Hashtag, + Algo(AddAlgoRoute), UndecidedIndividual, ExternalIndividual, } @@ -64,6 +85,7 @@ impl AddColumnOption { cur_account: Option<&UserAccount>, ) -> Option { match self { + AddColumnOption::Algo(algo_option) => Some(AddColumnResponse::Algo(algo_option)), AddColumnOption::Universe => TimelineKind::Universe .into_timeline(ndb, None) .map(AddColumnResponse::Timeline), @@ -73,7 +95,7 @@ impl AddColumnOption { AddColumnOption::UndecidedNotification => { Some(AddColumnResponse::UndecidedNotification) } - AddColumnOption::Home(pubkey) => { + AddColumnOption::Contacts(pubkey) => { let tlk = TimelineKind::contact_list(pubkey); tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes())) .map(AddColumnResponse::Timeline) @@ -151,6 +173,40 @@ impl<'a> AddColumnView<'a> { }) } + fn algo_last_per_pk_ui(&mut self, ui: &mut Ui) -> Option { + let algo_option = ColumnOptionData { + title: "Contact List", + description: "Source the last note for each user in your contact list", + icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"), + option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Decided( + ListKind::contact_list(PubkeySource::DeckAuthor), + ))), + }; + + let option = algo_option.option.clone(); + if self.column_option_ui(ui, algo_option).clicked() { + option.take_as_response(self.ndb, self.cur_account) + } else { + None + } + } + + fn algo_ui(&mut self, ui: &mut Ui) -> Option { + let algo_option = ColumnOptionData { + title: "Last Note per User", + description: "Show the last note for each user from a list", + icon: egui::include_image!("../../../../assets/icons/universe_icon_dark_4x.png"), + option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Undecided)), + }; + + let option = algo_option.option.clone(); + if self.column_option_ui(ui, algo_option).clicked() { + option.take_as_response(self.ndb, self.cur_account) + } else { + None + } + } + fn individual_ui(&mut self, ui: &mut Ui) -> Option { let mut selected_option: Option = None; for column_option_data in self.get_individual_options() { @@ -352,10 +408,10 @@ impl<'a> AddColumnView<'a> { }; vec.push(ColumnOptionData { - title: "Home timeline", - description: "See recommended notes first", + title: "Contacts", + description: "See notes from your contacts", icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"), - option: AddColumnOption::Home(source.clone()), + option: AddColumnOption::Contacts(source.clone()), }); } vec.push(ColumnOptionData { @@ -376,6 +432,12 @@ impl<'a> AddColumnView<'a> { icon: egui::include_image!("../../../../assets/icons/profile_icon_4x.png"), option: AddColumnOption::UndecidedIndividual, }); + vec.push(ColumnOptionData { + title: "Algo", + description: "Algorithmic feeds to aid in note discovery", + icon: egui::include_image!("../../../../assets/icons/plus_icon_4x.png"), + option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Undecided)), + }); vec } @@ -486,6 +548,10 @@ pub fn render_add_column_routes( ); let resp = match route { AddColumnRoute::Base => add_column_view.ui(ui), + AddColumnRoute::Algo(r) => match r { + AddAlgoRoute::Base => add_column_view.algo_ui(ui), + AddAlgoRoute::LastPerPubkey => add_column_view.algo_last_per_pk_ui(ui), + }, AddColumnRoute::UndecidedNotification => add_column_view.notifications_ui(ui), AddColumnRoute::ExternalNotification => add_column_view.external_notification_ui(ui), AddColumnRoute::Hashtag => hashtag_ui(ui, ctx.ndb, &mut app.view_state.id_string_map), @@ -511,13 +577,66 @@ pub fn render_add_column_routes( app.columns_mut(ctx.accounts) .add_timeline_to_column(col, timeline); } + + AddColumnResponse::Algo(algo_option) => match algo_option { + // If we are undecided, we simply route to the LastPerPubkey + // algo route selection + AlgoOption::LastPerPubkey(Decision::Undecided) => { + app.columns_mut(ctx.accounts) + .column_mut(col) + .router_mut() + .route_to(Route::AddColumn(AddColumnRoute::Algo( + AddAlgoRoute::LastPerPubkey, + ))); + } + + // We have a decision on where we want the last per pubkey + // source to be, so let;s create a timeline from that and + // add it to our list of timelines + AlgoOption::LastPerPubkey(Decision::Decided(list_kind)) => { + let maybe_timeline = { + let default_user = ctx + .accounts + .get_selected_account() + .as_ref() + .map(|sa| sa.pubkey.bytes()); + + TimelineKind::last_per_pubkey(list_kind.clone()) + .into_timeline(ctx.ndb, default_user) + }; + + if let Some(mut timeline) = maybe_timeline { + crate::timeline::setup_new_timeline( + &mut timeline, + ctx.ndb, + &mut app.subscriptions, + ctx.pool, + ctx.note_cache, + app.since_optimize, + ctx.accounts + .get_selected_account() + .as_ref() + .map(|sa| &sa.pubkey), + ); + + app.columns_mut(ctx.accounts) + .add_timeline_to_column(col, timeline); + } else { + // we couldn't fetch the timeline yet... let's let + // the user know ? + + // TODO: spin off the list search here instead + + ui.label(format!("error: could not find {:?}", &list_kind)); + } + } + }, + AddColumnResponse::UndecidedNotification => { app.columns_mut(ctx.accounts) .column_mut(col) .router_mut() - .route_to(crate::route::Route::AddColumn( - AddColumnRoute::UndecidedNotification, - )); + .route_to(Route::AddColumn(AddColumnRoute::UndecidedNotification)); } AddColumnResponse::ExternalNotification => { app.columns_mut(ctx.accounts) From ffc6cb537cfbaf40c11c5a640e9bbe8f70f2b6c7 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 26 Dec 2024 11:23:09 -0800 Subject: [PATCH 3/6] Initial token parser combinator In an attempt to make our deck serializer more localized, comprehensible, and less error-prone, we introduce a new parser combinator based around string tokens. This replaces the Selection-based intermediary types so that we have a more direct serialization style. --- crates/notedeck_columns/src/storage/mod.rs | 3 + .../src/storage/token_parser.rs | 177 ++++++++++++++++++ crates/notedeck_columns/src/ui/add_column.rs | 65 ++++++- 3 files changed, 244 insertions(+), 1 deletion(-) create mode 100644 crates/notedeck_columns/src/storage/token_parser.rs diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index cda44eeb..edd5df3d 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -1,5 +1,8 @@ mod decks; mod migration; +mod token_parser; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; pub use migration::{deserialize_columns, COLUMNS_FILE}; + +pub use token_parser::{ParseError, TokenParser, TokenSerializable}; diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs new file mode 100644 index 00000000..a1d49f26 --- /dev/null +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -0,0 +1,177 @@ +use crate::timeline::kind::PubkeySource; +use enostr::Pubkey; + +#[derive(Debug, Clone)] +pub struct UnexpectedToken<'fnd, 'exp> { + pub expected: &'exp str, + pub found: &'fnd str, +} + +#[derive(Debug, Clone)] +pub enum ParseError<'a> { + /// Not done parsing yet + Incomplete, + + /// All parsing options failed + AltAllFailed, + + /// There was some issue decoding the data + DecodeFailed, + + /// We encountered an unexpected token + UnexpectedToken(UnexpectedToken<'a, 'static>), + + /// No more tokens + EOF, +} + +#[derive(Clone)] +pub struct TokenParser<'a> { + tokens: &'a [&'a str], + index: usize, +} + +fn _parse_pubkey_src_tokens<'a>( + parser: &mut TokenParser<'a>, +) -> Result> { + match parser.pull_token() { + // we handle bare payloads and assume they are explicit pubkey sources + Ok("explicit") => { + let hex_str = parser.pull_token()?; + Pubkey::from_hex(hex_str) + .map_err(|_| ParseError::DecodeFailed) + .map(PubkeySource::Explicit) + } + + Err(ParseError::EOF) | Ok("deck_author") => Ok(PubkeySource::DeckAuthor), + + Ok(hex_payload) => Pubkey::from_hex(hex_payload) + .map_err(|_| ParseError::DecodeFailed) + .map(PubkeySource::Explicit), + + Err(e) => Err(e), + } +} + +impl<'a> TokenParser<'a> { + /// alt tries each parser in `routes` until one succeeds. + /// If all fail, returns `ParseError::AltAllFailed`. + #[allow(clippy::type_complexity)] + pub fn alt( + parser: &mut TokenParser<'a>, + routes: &[fn(&mut TokenParser<'a>) -> Result>], + ) -> Result> { + let start = parser.index; + for route in routes { + match route(parser) { + Ok(r) => return Ok(r), // if success, stop trying more routes + Err(_) => { + // revert index & try next route + parser.index = start; + } + } + } + // if we tried them all and none succeeded + Err(ParseError::AltAllFailed) + } + + pub fn new(tokens: &'a [&'a str]) -> Self { + let index = 0; + Self { tokens, index } + } + + pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { + let found = self.pull_token()?; + if found == expected { + Ok(found) + } else { + Err(ParseError::UnexpectedToken(UnexpectedToken { + expected, + found, + })) + } + } + + /// “Parse all” meaning: run the provided closure. If it fails, revert + /// the index. + pub fn parse_all( + &mut self, + parse_fn: impl FnOnce(&mut Self) -> Result>, + ) -> Result> { + let start = self.index; + let result = parse_fn(self); + + // If the parser closure fails, revert the index + if result.is_err() { + self.index = start; + result + } else if !self.is_eof() { + Err(ParseError::Incomplete) + } else { + result + } + } + + pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> { + let token = self + .tokens + .get(self.index) + .copied() + .ok_or(ParseError::EOF)?; + self.index += 1; + Ok(token) + } + + pub fn unpop_token(&mut self) { + if (self.index as isize) - 1 < 0 { + return; + } + + self.index -= 1; + } + + #[inline] + pub fn tokens(&self) -> &'a [&'a str] { + let min_index = self.index.min(self.tokens.len()); + &self.tokens[min_index..] + } + + #[inline] + pub fn is_eof(&self) -> bool { + self.tokens().is_empty() + } +} + +pub trait TokenSerializable: Sized { + /// Return a list of serialization plans for a type. We do this for + /// type safety and assume constructing these types are lightweight + fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_column_serialize() { + use crate::ui::add_column::{AddAlgoRoute, AddColumnRoute}; + + { + let data = &"column:algo_selection:last_per_pubkey" + .split(":") + .collect::>(); + let mut parser = TokenParser::new(&data); + let parsed = AddColumnRoute::parse(&mut parser).unwrap(); + let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey); + assert_eq!(expected, parsed) + } + + { + let data: &[&str] = &["column"]; + let mut parser = TokenParser::new(data); + let parsed = AddColumnRoute::parse(&mut parser).unwrap(); + let expected = AddColumnRoute::Base; + assert_eq!(expected, parsed) + } + } +} diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index 39a5590f..d48e7e2a 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -11,6 +11,7 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, route::Route, + storage::{ParseError, TokenParser, TokenSerializable}, timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, @@ -61,8 +62,9 @@ enum AddColumnOption { Individual(PubkeySource), } -#[derive(Clone, Copy, Eq, PartialEq, Debug)] +#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)] pub enum AddAlgoRoute { + #[default] Base, LastPerPubkey, } @@ -78,6 +80,67 @@ pub enum AddColumnRoute { ExternalIndividual, } +impl TokenSerializable for AddColumnRoute { + fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { + // all start with column + parser.parse_token("column")?; + + // if we're done then we have the base + if parser.is_eof() { + return Ok(AddColumnRoute::Base); + } + + TokenParser::alt( + parser, + &[ + |p| { + p.parse_all(|p| { + p.parse_token("external_notif_selection")?; + Ok(AddColumnRoute::UndecidedNotification) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("external_notif_selection")?; + Ok(AddColumnRoute::ExternalNotification) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("hashtag_selection")?; + Ok(AddColumnRoute::Hashtag) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("algo_selection")?; + Ok(AddColumnRoute::Algo(AddAlgoRoute::Base)) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("algo_selection")?; + p.parse_token("last_per_pubkey")?; + Ok(AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey)) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("individual_selection")?; + Ok(AddColumnRoute::UndecidedIndividual) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("external_individual_selection")?; + Ok(AddColumnRoute::ExternalIndividual) + }) + }, + ], + ) + } +} + impl AddColumnOption { pub fn take_as_response( self, From d12a99cb85f07bc739348a33a93633fb0489a715 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 5 Jan 2025 10:57:37 -0600 Subject: [PATCH 4/6] token_parser: unify parsing and serialization This reduces the number of things we have to update in our token parser and serializer. For payloads, we we have to handle the payload cases different, but we now have a structure that can deal with that efficiently. Signed-off-by: William Casarin --- .../src/storage/token_parser.rs | 1 + crates/notedeck_columns/src/ui/add_column.rs | 90 ++++++++++--------- 2 files changed, 48 insertions(+), 43 deletions(-) diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index a1d49f26..45b43595 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -146,6 +146,7 @@ pub trait TokenSerializable: Sized { /// Return a list of serialization plans for a type. We do this for /// type safety and assume constructing these types are lightweight fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; + fn serialize(&self, write_token: fn(&str) -> Result<(), std::io::Error>) -> Result<(), std::io::Error>; } #[cfg(test)] diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index d48e7e2a..b89699d0 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -80,7 +80,47 @@ pub enum AddColumnRoute { ExternalIndividual, } +// Parser for the common case without any payloads +fn parse_column_route<'a>( + parser: &mut TokenParser<'a>, + route: AddColumnRoute, +) -> Result> { + parser.parse_all(|p| { + for token in route.tokens() { + p.parse_token(token)?; + } + Ok(route) + }) +} + +impl AddColumnRoute { + /// Route tokens use in both serialization and deserialization + fn tokens(&self) -> &'static [&'static str] { + match self { + Self::Base => &[], + Self::UndecidedNotification => &["notification_selection"], + Self::ExternalNotification => &["external_notif_selection"], + Self::UndecidedIndividual => &["individual_selection"], + Self::ExternalIndividual => &["external_individual_selection"], + Self::Hashtag => &["hashtag"], + Self::Algo(AddAlgoRoute::Base) => &["algo_selection"], + Self::Algo(AddAlgoRoute::LastPerPubkey) => &["algo_selection", "last_per_pubkey"], + // NOTE!!! When adding to this, update the parser for TokenSerializable below + } + } +} + impl TokenSerializable for AddColumnRoute { + fn serialize( + &self, + write_token: fn(&str) -> Result<(), std::io::Error>, + ) -> Result<(), std::io::Error> { + for token in self.tokens() { + write_token(token)?; + } + Ok(()) + } + fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { // all start with column parser.parse_token("column")?; @@ -93,49 +133,13 @@ impl TokenSerializable for AddColumnRoute { TokenParser::alt( parser, &[ - |p| { - p.parse_all(|p| { - p.parse_token("external_notif_selection")?; - Ok(AddColumnRoute::UndecidedNotification) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("external_notif_selection")?; - Ok(AddColumnRoute::ExternalNotification) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("hashtag_selection")?; - Ok(AddColumnRoute::Hashtag) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("algo_selection")?; - Ok(AddColumnRoute::Algo(AddAlgoRoute::Base)) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("algo_selection")?; - p.parse_token("last_per_pubkey")?; - Ok(AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey)) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("individual_selection")?; - Ok(AddColumnRoute::UndecidedIndividual) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("external_individual_selection")?; - Ok(AddColumnRoute::ExternalIndividual) - }) - }, + |p| parse_column_route(p, AddColumnRoute::UndecidedNotification), + |p| parse_column_route(p, AddColumnRoute::ExternalNotification), + |p| parse_column_route(p, AddColumnRoute::UndecidedIndividual), + |p| parse_column_route(p, AddColumnRoute::ExternalIndividual), + |p| parse_column_route(p, AddColumnRoute::Hashtag), + |p| parse_column_route(p, AddColumnRoute::Algo(AddAlgoRoute::Base)), + |p| parse_column_route(p, AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey)), ], ) } From c51eaa0723a723483ebae9baf08cf6acd4a249d3 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 5 Jan 2025 11:50:08 -0600 Subject: [PATCH 5/6] token_serializer: introduce TokenWriter This simplifies token serialization Signed-off-by: William Casarin --- crates/notedeck_columns/src/storage/mod.rs | 2 +- .../src/storage/token_parser.rs | 38 ++++++++++++++++++- crates/notedeck_columns/src/ui/add_column.rs | 10 ++--- 3 files changed, 41 insertions(+), 9 deletions(-) diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index edd5df3d..d870d91d 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -5,4 +5,4 @@ mod token_parser; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; pub use migration::{deserialize_columns, COLUMNS_FILE}; -pub use token_parser::{ParseError, TokenParser, TokenSerializable}; +pub use token_parser::{ParseError, TokenParser, TokenSerializable, TokenWriter}; diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index 45b43595..f7bd9641 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -25,6 +25,42 @@ pub enum ParseError<'a> { EOF, } +pub struct TokenWriter { + delim: &'static str, + tokens_written: usize, + buf: Vec, +} + +impl Default for TokenWriter { + fn default() -> Self { + Self::new(":") + } +} + +impl TokenWriter { + pub fn new(delim: &'static str) -> Self { + let buf = vec![]; + let tokens_written = 0; + Self { + buf, + tokens_written, + delim, + } + } + + pub fn write_token(&mut self, token: &str) { + if self.tokens_written > 0 { + self.buf.extend_from_slice(self.delim.as_bytes()) + } + self.buf.extend_from_slice(token.as_bytes()); + self.tokens_written += 1; + } + + pub fn buffer(&self) -> &[u8] { + &self.buf + } +} + #[derive(Clone)] pub struct TokenParser<'a> { tokens: &'a [&'a str], @@ -146,7 +182,7 @@ pub trait TokenSerializable: Sized { /// Return a list of serialization plans for a type. We do this for /// type safety and assume constructing these types are lightweight fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; - fn serialize(&self, write_token: fn(&str) -> Result<(), std::io::Error>) -> Result<(), std::io::Error>; + fn serialize(&self, writer: &mut TokenWriter); } #[cfg(test)] diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index b89699d0..a5fb3a5b 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -11,7 +11,7 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, route::Route, - storage::{ParseError, TokenParser, TokenSerializable}, + storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}, timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, @@ -111,14 +111,10 @@ impl AddColumnRoute { } impl TokenSerializable for AddColumnRoute { - fn serialize( - &self, - write_token: fn(&str) -> Result<(), std::io::Error>, - ) -> Result<(), std::io::Error> { + fn serialize(&self, writer: &mut TokenWriter) { for token in self.tokens() { - write_token(token)?; + writer.write_token(token); } - Ok(()) } fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { From c3d31e54313daf2277fa7f83bbf0eb37c7426f76 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 5 Jan 2025 12:05:14 -0600 Subject: [PATCH 6/6] token_parser: simplify AddColumnRoute serialization Signed-off-by: William Casarin --- .../src/storage/token_parser.rs | 24 +++++++++++++----- crates/notedeck_columns/src/ui/add_column.rs | 25 +++++++------------ 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index f7bd9641..91dcedf7 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -56,6 +56,12 @@ impl TokenWriter { self.tokens_written += 1; } + pub fn str(&self) -> &str { + // SAFETY: only &strs are ever serialized, so its guaranteed to be + // correct here + unsafe { std::str::from_utf8_unchecked(self.buffer()) } + } + pub fn buffer(&self) -> &[u8] { &self.buf } @@ -194,21 +200,27 @@ mod tests { use crate::ui::add_column::{AddAlgoRoute, AddColumnRoute}; { - let data = &"column:algo_selection:last_per_pubkey" - .split(":") - .collect::>(); + let data_str = "column:algo_selection:last_per_pubkey"; + let data = &data_str.split(":").collect::>(); + let mut token_writer = TokenWriter::default(); let mut parser = TokenParser::new(&data); let parsed = AddColumnRoute::parse(&mut parser).unwrap(); let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey); - assert_eq!(expected, parsed) + parsed.serialize(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); } { - let data: &[&str] = &["column"]; + let data_str = "column"; + let mut token_writer = TokenWriter::default(); + let data: &[&str] = &[data_str]; let mut parser = TokenParser::new(data); let parsed = AddColumnRoute::parse(&mut parser).unwrap(); let expected = AddColumnRoute::Base; - assert_eq!(expected, parsed) + parsed.serialize(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); } } } diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index a5fb3a5b..9b06a9ee 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -97,14 +97,14 @@ impl AddColumnRoute { /// Route tokens use in both serialization and deserialization fn tokens(&self) -> &'static [&'static str] { match self { - Self::Base => &[], - Self::UndecidedNotification => &["notification_selection"], - Self::ExternalNotification => &["external_notif_selection"], - Self::UndecidedIndividual => &["individual_selection"], - Self::ExternalIndividual => &["external_individual_selection"], - Self::Hashtag => &["hashtag"], - Self::Algo(AddAlgoRoute::Base) => &["algo_selection"], - Self::Algo(AddAlgoRoute::LastPerPubkey) => &["algo_selection", "last_per_pubkey"], + Self::Base => &["column"], + Self::UndecidedNotification => &["column", "notification_selection"], + Self::ExternalNotification => &["column", "external_notif_selection"], + Self::UndecidedIndividual => &["column", "individual_selection"], + Self::ExternalIndividual => &["column", "external_individual_selection"], + Self::Hashtag => &["column", "hashtag"], + Self::Algo(AddAlgoRoute::Base) => &["column", "algo_selection"], + Self::Algo(AddAlgoRoute::LastPerPubkey) => &["column", "algo_selection", "last_per_pubkey"], // NOTE!!! When adding to this, update the parser for TokenSerializable below } } @@ -118,17 +118,10 @@ impl TokenSerializable for AddColumnRoute { } fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { - // all start with column - parser.parse_token("column")?; - - // if we're done then we have the base - if parser.is_eof() { - return Ok(AddColumnRoute::Base); - } - TokenParser::alt( parser, &[ + |p| parse_column_route(p, AddColumnRoute::Base), |p| parse_column_route(p, AddColumnRoute::UndecidedNotification), |p| parse_column_route(p, AddColumnRoute::ExternalNotification), |p| parse_column_route(p, AddColumnRoute::UndecidedIndividual),