From 7afe3b7d7c6fd8ba904eb34810780a04f72420e5 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 26 Dec 2024 09:38:56 -0800 Subject: [PATCH 01/18] algos: introduce last_n_per_pubkey_from_tags This function creates filters for the base our first algo in Damus: Called "last N note per pubkey". I don't have a better name for it. This function generates a query in the form: [ {"authors": ["author_a"], "limit": 1, "kinds": [1] , {"authors": ["author_b"], "limit": 1, "kinds": [1] , {"authors": ["author_c"], "limit": 1, "kinds": [1] , {"authors": ["author_c"], "limit": 1, "kinds": [1] ... ] Due to an unfortunate restriction currently in nostrdb and strfry, we can only do about 16 to 20 of these at any given time. I have made this limit configurable in strfry[1]. I just need to do the same in nostrdb now. [1] https://github.com/hoytech/strfry/pull/133 Changelog-Added: Add last_n_per_pubkey_from_tags algo function --- crates/notedeck/src/filter.rs | 54 +++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/crates/notedeck/src/filter.rs b/crates/notedeck/src/filter.rs index 81f1299e..387beb4e 100644 --- a/crates/notedeck/src/filter.rs +++ b/crates/notedeck/src/filter.rs @@ -190,6 +190,60 @@ impl FilteredTags { } } +/// Create a "last N notes per pubkey" query. +pub fn last_n_per_pubkey_from_tags( + note: &Note, + kind: u64, + notes_per_pubkey: u64, +) -> Result> { + let mut filters: Vec = vec![]; + + for tag in note.tags() { + // TODO: fix arbitrary MAX_FILTER limit in nostrdb + if filters.len() == 15 { + break; + } + + if tag.count() < 2 { + continue; + } + + let t = if let Some(t) = tag.get_unchecked(0).variant().str() { + t + } else { + continue; + }; + + if t == "p" { + let author = if let Some(author) = tag.get_unchecked(1).variant().id() { + author + } else { + continue; + }; + + let mut filter = Filter::new(); + filter.start_authors_field()?; + filter.add_id_element(author)?; + filter.end_field(); + filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build()); + } else if t == "t" { + let hashtag = if let Some(hashtag) = tag.get_unchecked(1).variant().str() { + hashtag + } else { + continue; + }; + + let mut filter = Filter::new(); + filter.start_tags_field('t')?; + filter.add_str_element(hashtag)?; + filter.end_field(); + filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build()); + } + } + + Ok(filters) +} + /// Create a filter from tags. This can be used to create a filter /// from a contact list pub fn filter_from_tags( From 662755550f2181b6923c04919296359245f8aa75 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 25 Dec 2024 19:06:04 -0800 Subject: [PATCH 02/18] wip algo timelines Signed-off-by: William Casarin --- crates/notedeck_columns/src/route.rs | 6 +- crates/notedeck_columns/src/storage/decks.rs | 456 +++++++++++++------ crates/notedeck_columns/src/timeline/kind.rs | 83 +++- crates/notedeck_columns/src/timeline/mod.rs | 31 +- crates/notedeck_columns/src/ui/add_column.rs | 139 +++++- 5 files changed, 556 insertions(+), 159 deletions(-) diff --git a/crates/notedeck_columns/src/route.rs b/crates/notedeck_columns/src/route.rs index 647aa66b..3f06dc44 100644 --- a/crates/notedeck_columns/src/route.rs +++ b/crates/notedeck_columns/src/route.rs @@ -5,7 +5,7 @@ use crate::{ accounts::AccountsRoute, column::Columns, timeline::{kind::ColumnTitle, TimelineId, TimelineRoute}, - ui::add_column::AddColumnRoute, + ui::add_column::{AddAlgoRoute, AddColumnRoute}, }; /// App routing. These describe different places you can go inside Notedeck. @@ -88,6 +88,10 @@ impl Route { Route::ComposeNote => ColumnTitle::simple("Compose Note"), Route::AddColumn(c) => match c { AddColumnRoute::Base => ColumnTitle::simple("Add Column"), + AddColumnRoute::Algo(r) => match r { + AddAlgoRoute::Base => ColumnTitle::simple("Add Algo Column"), + AddAlgoRoute::LastPerPubkey => ColumnTitle::simple("Add Last Notes Column"), + }, AddColumnRoute::UndecidedNotification => { ColumnTitle::simple("Add Notifications Column") } diff --git a/crates/notedeck_columns/src/storage/decks.rs b/crates/notedeck_columns/src/storage/decks.rs index 3c4b9c2b..8bd1389b 100644 --- a/crates/notedeck_columns/src/storage/decks.rs +++ b/crates/notedeck_columns/src/storage/decks.rs @@ -3,6 +3,8 @@ use std::{collections::HashMap, fmt, str::FromStr}; use enostr::{NoteId, Pubkey}; use nostrdb::Ndb; use serde::{Deserialize, Serialize}; +use strum::IntoEnumIterator; +use strum_macros::EnumIter; use tracing::{error, info}; use crate::{ @@ -10,8 +12,8 @@ use crate::{ column::{Columns, IntermediaryRoute}, decks::{Deck, Decks, DecksCache}, route::Route, - timeline::{kind::ListKind, PubkeySource, TimelineKind, TimelineRoute}, - ui::add_column::AddColumnRoute, + timeline::{kind::ListKind, AlgoTimeline, PubkeySource, TimelineKind, TimelineRoute}, + ui::add_column::{AddAlgoRoute, AddColumnRoute}, Error, }; @@ -299,7 +301,7 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec Result { + Ok(parse_selection(serialized)) + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] +enum AlgoKeyword { + LastPerPubkey, +} + +impl AlgoKeyword { + #[inline] + pub fn name(&self) -> &'static str { + match self { + AlgoKeyword::LastPerPubkey => "last_per_pubkey", + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] +enum ListKeyword { + Contact, +} + +impl ListKeyword { + #[inline] + pub fn name(&self) -> &'static str { + match self { + ListKeyword::Contact => "contact", + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] +enum PubkeySourceKeyword { + Explicit, + DeckAuthor, +} + +impl PubkeySourceKeyword { + #[inline] + pub fn name(&self) -> &'static str { + match self { + PubkeySourceKeyword::Explicit => "explicit", + PubkeySourceKeyword::DeckAuthor => "deck_author", + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] enum Keyword { Notifs, Universe, - Contact, - Explicit, - DeckAuthor, Profile, Hashtag, Generic, @@ -350,6 +408,7 @@ enum Keyword { Relay, Compose, Column, + AlgoSelection, NotificationSelection, ExternalNotifSelection, HashtagSelection, @@ -361,60 +420,104 @@ enum Keyword { } impl Keyword { - const MAPPING: &'static [(&'static str, Keyword, bool)] = &[ - ("notifs", Keyword::Notifs, false), - ("universe", Keyword::Universe, false), - ("contact", Keyword::Contact, false), - ("explicit", Keyword::Explicit, true), - ("deck_author", Keyword::DeckAuthor, false), - ("profile", Keyword::Profile, false), - ("hashtag", Keyword::Hashtag, true), - ("generic", Keyword::Generic, false), - ("thread", Keyword::Thread, true), - ("reply", Keyword::Reply, true), - ("quote", Keyword::Quote, true), - ("account", Keyword::Account, false), - ("show", Keyword::Show, false), - ("new", Keyword::New, false), - ("relay", Keyword::Relay, false), - ("compose", Keyword::Compose, false), - ("column", Keyword::Column, false), - ( - "notification_selection", - Keyword::NotificationSelection, - false, - ), - ( - "external_notif_selection", - Keyword::ExternalNotifSelection, - false, - ), - ("hashtag_selection", Keyword::HashtagSelection, false), - ("support", Keyword::Support, false), - ("deck", Keyword::Deck, false), - ("edit", Keyword::Edit, true), - ]; - - fn has_payload(&self) -> bool { - Keyword::MAPPING - .iter() - .find(|(_, keyword, _)| keyword == self) - .map(|(_, _, has_payload)| *has_payload) - .unwrap_or(false) + fn name(&self) -> &'static str { + match self { + Keyword::Notifs => "notifs", + Keyword::Universe => "universe", + Keyword::Profile => "profile", + Keyword::Hashtag => "hashtag", + Keyword::Generic => "generic", + Keyword::Thread => "thread", + Keyword::Reply => "reply", + Keyword::Quote => "quote", + Keyword::Account => "account", + Keyword::Show => "show", + Keyword::New => "new", + Keyword::Relay => "relay", + Keyword::Compose => "compose", + Keyword::Column => "column", + Keyword::AlgoSelection => "algo_selection", + Keyword::NotificationSelection => "notification_selection", + Keyword::ExternalNotifSelection => "external_notif_selection", + Keyword::IndividualSelection => "individual_selection", + Keyword::ExternalIndividualSelection => "external_individual_selection", + Keyword::HashtagSelection => "hashtag_selection", + Keyword::Support => "support", + Keyword::Deck => "deck", + Keyword::Edit => "edit", + } } } impl fmt::Display for Keyword { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(name) = Keyword::MAPPING - .iter() - .find(|(_, keyword, _)| keyword == self) - .map(|(name, _, _)| *name) - { - write!(f, "{}", name) - } else { - write!(f, "UnknownKeyword") + write!(f, "{}", self.name()) + } +} + +impl fmt::Display for AlgoKeyword { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl fmt::Display for ListKeyword { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl FromStr for PubkeySourceKeyword { + type Err = Error; + + fn from_str(serialized: &str) -> Result { + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) + } +} + +impl FromStr for ListKeyword { + type Err = Error; + + fn from_str(serialized: &str) -> Result { + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } + } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) + } +} + +impl fmt::Display for PubkeySourceKeyword { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl FromStr for AlgoKeyword { + type Err = Error; + + fn from_str(serialized: &str) -> Result { + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } + } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) } } @@ -422,13 +525,15 @@ impl FromStr for Keyword { type Err = Error; fn from_str(serialized: &str) -> Result { - Keyword::MAPPING - .iter() - .find(|(name, _, _)| *name == serialized) - .map(|(_, keyword, _)| keyword.clone()) - .ok_or(Error::Generic( - "Could not convert string to Keyword enum".to_owned(), - )) + for keyword in Self::iter() { + if serialized == keyword.name() { + return Ok(keyword); + } + } + + Err(Error::Generic( + "Could not convert string to Keyword enum".to_owned(), + )) } } @@ -458,10 +563,19 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option { match &timeline.kind { TimelineKind::List(list_kind) => match list_kind { ListKind::Contact(pubkey_source) => { - selections.push(Selection::Keyword(Keyword::Contact)); + selections.push(Selection::List(ListKeyword::Contact)); selections.extend(generate_pubkey_selections(pubkey_source)); } }, + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => { + match list_kind { + ListKind::Contact(pk_src) => { + selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey)); + selections.push(Selection::List(ListKeyword::Contact)); + selections.extend(generate_pubkey_selections(pk_src)); + } + } + } TimelineKind::Notifications(pubkey_source) => { selections.push(Selection::Keyword(Keyword::Notifs)); selections.extend(generate_pubkey_selections(pubkey_source)); @@ -493,7 +607,7 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option { } TimelineRoute::Profile(pubkey) => { selections.push(Selection::Keyword(Keyword::Profile)); - selections.push(Selection::Keyword(Keyword::Explicit)); + selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit)); selections.push(Selection::Payload(pubkey.hex())); } TimelineRoute::Reply(note_id) => { @@ -518,6 +632,16 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option { selections.push(Selection::Keyword(Keyword::Column)); match add_column_route { AddColumnRoute::Base => (), + AddColumnRoute::Algo(algo_route) => match algo_route { + AddAlgoRoute::Base => { + selections.push(Selection::Keyword(Keyword::AlgoSelection)) + } + + AddAlgoRoute::LastPerPubkey => { + selections.push(Selection::Keyword(Keyword::AlgoSelection)); + selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey)); + } + }, AddColumnRoute::UndecidedNotification => { selections.push(Selection::Keyword(Keyword::NotificationSelection)) } @@ -569,109 +693,149 @@ fn generate_pubkey_selections(source: &PubkeySource) -> Vec { let mut selections = Vec::new(); match source { PubkeySource::Explicit(pubkey) => { - selections.push(Selection::Keyword(Keyword::Explicit)); + selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit)); selections.push(Selection::Payload(pubkey.hex())); } PubkeySource::DeckAuthor => { - selections.push(Selection::Keyword(Keyword::DeckAuthor)); + selections.push(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor)); } } selections } +/// Parses a selection +fn parse_selection(token: &str) -> Selection { + AlgoKeyword::from_str(token) + .map(Selection::Algo) + .or_else(|_| ListKeyword::from_str(token).map(Selection::List)) + .or_else(|_| PubkeySourceKeyword::from_str(token).map(Selection::PubkeySource)) + .or_else(|_| Keyword::from_str(token).map(Selection::Keyword)) + .unwrap_or_else(|_| Selection::Payload(token.to_owned())) +} + impl Selection { - fn from_serialized(serialized: &str) -> Vec { + fn from_serialized(buffer: &str) -> Vec { let mut selections = Vec::new(); let seperator = ":"; + let sep_len = seperator.len(); + let mut pos = 0; - let mut serialized_copy = serialized.to_string(); - let mut buffer = serialized_copy.as_mut(); - - let mut next_is_payload = false; - while let Some(index) = buffer.find(seperator) { - if let Ok(keyword) = Keyword::from_str(&buffer[..index]) { - selections.push(Selection::Keyword(keyword.clone())); - if keyword.has_payload() { - next_is_payload = true; - } - } - - buffer = &mut buffer[index + seperator.len()..]; + while let Some(offset) = buffer[pos..].find(seperator) { + selections.push(parse_selection(&buffer[pos..pos + offset])); + pos = pos + offset + sep_len; } - if next_is_payload { - selections.push(Selection::Payload(buffer.to_string())); - } else if let Ok(keyword) = Keyword::from_str(buffer) { - selections.push(Selection::Keyword(keyword.clone())); - } + selections.push(parse_selection(&buffer[pos..])); selections } } -fn selections_to_route(selections: Vec) -> Option { +/// Parse an explicit:abdef... or deck_author from a Selection token stream. +/// +/// Also handle the case where there is nothing. We assume this means deck_author. +fn parse_pubkey_src_selection(tokens: &[Selection]) -> Option { + match tokens.first() { + // we handle bare payloads and assume they are explicit pubkey sources + Some(Selection::Payload(hex)) => { + let pk = Pubkey::from_hex(hex.as_str()).ok()?; + Some(PubkeySource::Explicit(pk)) + } + + Some(Selection::PubkeySource(PubkeySourceKeyword::Explicit)) => { + if let Selection::Payload(hex) = tokens.get(1)? { + let pk = Pubkey::from_hex(hex.as_str()).ok()?; + Some(PubkeySource::Explicit(pk)) + } else { + None + } + } + + None | Some(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor)) => { + Some(PubkeySource::DeckAuthor) + } + + Some(Selection::Keyword(_kw)) => None, + Some(Selection::Algo(_kw)) => None, + Some(Selection::List(_kw)) => None, + } +} + +/// Parse ListKinds from Selections +fn parse_list_kind_selections(tokens: &[Selection]) -> Option { + // only list selections are valid in this position + let list_kw = if let Selection::List(list_kw) = tokens.first()? { + list_kw + } else { + return None; + }; + + let pubkey_src = parse_pubkey_src_selection(&tokens[1..])?; + + Some(match list_kw { + ListKeyword::Contact => ListKind::contact_list(pubkey_src), + }) +} + +fn selections_to_route(selections: &[Selection]) -> Option { match selections.first()? { - Selection::Keyword(Keyword::Contact) => match selections.get(1)? { - Selection::Keyword(Keyword::Explicit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::contact_list(PubkeySource::Explicit( - Pubkey::from_hex(hex.as_str()).ok()?, - )), - )) - } else { - None + Selection::Keyword(Keyword::AlgoSelection) => { + let r = match selections.get(1) { + None => AddColumnRoute::Algo(AddAlgoRoute::Base), + Some(Selection::Algo(algo_kw)) => match algo_kw { + AlgoKeyword::LastPerPubkey => AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey), + }, + // other keywords are invalid here + Some(_) => { + return None; } - } - Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::contact_list(PubkeySource::DeckAuthor), - )), - _ => None, - }, - Selection::Keyword(Keyword::Notifs) => match selections.get(1)? { - Selection::Keyword(Keyword::Explicit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::notifications(PubkeySource::Explicit( - Pubkey::from_hex(hex.as_str()).ok()?, - )), - )) - } else { - None + }; + + Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(r))) + } + + // Algorithm timelines + Selection::Algo(algo_kw) => { + let timeline_kind = match algo_kw { + AlgoKeyword::LastPerPubkey => { + let list_kind = parse_list_kind_selections(&selections[1..])?; + TimelineKind::last_per_pubkey(list_kind) } - } - Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::notifications(PubkeySource::DeckAuthor), - )), - _ => None, - }, - Selection::Keyword(Keyword::Profile) => match selections.get(1)? { - Selection::Keyword(Keyword::Explicit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile( - PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?), - ))) - } else { - None - } - } - Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::profile(PubkeySource::DeckAuthor), - )), - Selection::Keyword(Keyword::Edit) => { - if let Selection::Payload(hex) = selections.get(2)? { - Some(CleanIntermediaryRoute::ToRoute(Route::EditProfile( - Pubkey::from_hex(hex.as_str()).ok()?, - ))) - } else { - None - } - } - _ => None, - }, + }; + + Some(CleanIntermediaryRoute::ToTimeline(timeline_kind)) + } + + // We never have PubkeySource keywords at the top level + Selection::PubkeySource(_pk_src) => None, + + Selection::List(ListKeyword::Contact) => { + // only pubkey/src is allowed in this position + let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; + Some(CleanIntermediaryRoute::ToTimeline( + TimelineKind::contact_list(pubkey_src), + )) + } + + Selection::Keyword(Keyword::Notifs) => { + let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; + Some(CleanIntermediaryRoute::ToTimeline( + TimelineKind::notifications(pubkey_src), + )) + } + + Selection::Keyword(Keyword::Profile) => { + // we only expect PubkeySource in this position + let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; + Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile( + pubkey_src, + ))) + } + Selection::Keyword(Keyword::Universe) => { Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Universe)) } + Selection::Keyword(Keyword::Hashtag) => { if let Selection::Payload(hashtag) = selections.get(1)? { Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Hashtag( @@ -681,9 +845,11 @@ fn selections_to_route(selections: Vec) -> Option { Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Generic)) } + Selection::Keyword(Keyword::Thread) => { if let Selection::Payload(hex) = selections.get(1)? { Some(CleanIntermediaryRoute::ToRoute(Route::thread( @@ -693,6 +859,7 @@ fn selections_to_route(selections: Vec) -> Option { if let Selection::Payload(hex) = selections.get(1)? { Some(CleanIntermediaryRoute::ToRoute(Route::reply( @@ -770,9 +937,7 @@ fn selections_to_route(selections: Vec) -> Option None, }, Selection::Payload(_) - | Selection::Keyword(Keyword::Explicit) | Selection::Keyword(Keyword::New) - | Selection::Keyword(Keyword::DeckAuthor) | Selection::Keyword(Keyword::Show) | Selection::Keyword(Keyword::NotificationSelection) | Selection::Keyword(Keyword::ExternalNotifSelection) @@ -788,6 +953,9 @@ impl fmt::Display for Selection { match self { Selection::Keyword(keyword) => write!(f, "{}", keyword), Selection::Payload(payload) => write!(f, "{}", payload), + Selection::Algo(algo_kw) => write!(f, "{}", algo_kw), + Selection::List(list_kw) => write!(f, "{}", list_kw), + Selection::PubkeySource(pk_src_kw) => write!(f, "{}", pk_src_kw), } } } diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index c6d072cc..3d219140 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -35,6 +35,10 @@ impl PubkeySource { } impl ListKind { + pub fn contact_list(pk_src: PubkeySource) -> Self { + ListKind::Contact(pk_src) + } + pub fn pubkey_source(&self) -> Option<&PubkeySource> { match self { ListKind::Contact(pk_src) => Some(pk_src), @@ -54,6 +58,9 @@ impl ListKind { pub enum TimelineKind { List(ListKind), + /// The last not per pubkey + Algo(AlgoTimeline), + Notifications(PubkeySource), Profile(PubkeySource), @@ -69,10 +76,19 @@ pub enum TimelineKind { Hashtag(String), } +/// Hardcoded algo timelines +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum AlgoTimeline { + /// LastPerPubkey: a special nostr query that fetches the last N + /// notes for each pubkey on the list + LastPerPubkey(ListKind), +} + impl Display for TimelineKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { TimelineKind::List(ListKind::Contact(_src)) => f.write_str("Contacts"), + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_lk)) => f.write_str("Last Notes"), TimelineKind::Generic => f.write_str("Timeline"), TimelineKind::Notifications(_) => f.write_str("Notifications"), TimelineKind::Profile(_) => f.write_str("Profile"), @@ -87,6 +103,7 @@ impl TimelineKind { pub fn pubkey_source(&self) -> Option<&PubkeySource> { match self { TimelineKind::List(list_kind) => list_kind.pubkey_source(), + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => list_kind.pubkey_source(), TimelineKind::Notifications(pk_src) => Some(pk_src), TimelineKind::Profile(pk_src) => Some(pk_src), TimelineKind::Universe => None, @@ -96,8 +113,27 @@ impl TimelineKind { } } + /// Some feeds are not realtime, like certain algo feeds + pub fn should_subscribe_locally(&self) -> bool { + match self { + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_list_kind)) => false, + + TimelineKind::List(_list_kind) => true, + TimelineKind::Notifications(_pk_src) => true, + TimelineKind::Profile(_pk_src) => true, + TimelineKind::Universe => true, + TimelineKind::Generic => true, + TimelineKind::Hashtag(_ht) => true, + TimelineKind::Thread(_ht) => true, + } + } + + pub fn last_per_pubkey(list_kind: ListKind) -> Self { + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) + } + pub fn contact_list(pk: PubkeySource) -> Self { - TimelineKind::List(ListKind::Contact(pk)) + TimelineKind::List(ListKind::contact_list(pk)) } pub fn is_contacts(&self) -> bool { @@ -138,6 +174,48 @@ impl TimelineKind { None } + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(pk_src))) => { + let pk = match &pk_src { + PubkeySource::DeckAuthor => default_user?, + PubkeySource::Explicit(pk) => pk.bytes(), + }; + + let contact_filter = Filter::new().authors([pk]).kinds([3]).limit(1).build(); + + let txn = Transaction::new(ndb).expect("txn"); + let results = ndb + .query(&txn, &[contact_filter.clone()], 1) + .expect("contact query failed?"); + + let kind_fn = TimelineKind::last_per_pubkey; + let tabs = TimelineTab::only_notes_and_replies(); + + if results.is_empty() { + return Some(Timeline::new( + kind_fn(ListKind::contact_list(pk_src)), + FilterState::needs_remote(vec![contact_filter.clone()]), + tabs, + )); + } + + let list_kind = ListKind::contact_list(pk_src); + + match Timeline::last_per_pubkey(&results[0].note, &list_kind) { + Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => { + Some(Timeline::new( + kind_fn(list_kind), + FilterState::needs_remote(vec![contact_filter]), + tabs, + )) + } + Err(e) => { + error!("Unexpected error: {e}"); + None + } + Ok(tl) => Some(tl), + } + } + TimelineKind::Profile(pk_src) => { let pk = match &pk_src { PubkeySource::DeckAuthor => default_user?, @@ -222,6 +300,9 @@ impl TimelineKind { TimelineKind::List(list_kind) => match list_kind { ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts"), }, + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => match list_kind { + ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts (last notes)"), + }, TimelineKind::Notifications(_pubkey_source) => ColumnTitle::simple("Notifications"), TimelineKind::Profile(_pubkey_source) => ColumnTitle::needs_db(self), TimelineKind::Thread(_root_id) => ColumnTitle::simple("Thread"), diff --git a/crates/notedeck_columns/src/timeline/mod.rs b/crates/notedeck_columns/src/timeline/mod.rs index 9d6b7474..8fc81e6a 100644 --- a/crates/notedeck_columns/src/timeline/mod.rs +++ b/crates/notedeck_columns/src/timeline/mod.rs @@ -4,6 +4,7 @@ use crate::{ error::Error, subscriptions::{self, SubKind, Subscriptions}, thread::Thread, + timeline::kind::ListKind, Result, }; @@ -29,7 +30,7 @@ pub mod kind; pub mod route; pub use cache::{TimelineCache, TimelineCacheKey}; -pub use kind::{ColumnTitle, PubkeySource, TimelineKind}; +pub use kind::{AlgoTimeline, ColumnTitle, PubkeySource, TimelineKind}; pub use route::TimelineRoute; #[derive(Debug, Hash, Copy, Clone, Eq, PartialEq)] @@ -227,6 +228,18 @@ impl Timeline { ) } + pub fn last_per_pubkey(list: &Note, list_kind: &ListKind) -> Result { + let kind = 1; + let notes_per_pk = 1; + let filter = filter::last_n_per_pubkey_from_tags(list, kind, notes_per_pk)?; + + Ok(Timeline::new( + TimelineKind::last_per_pubkey(list_kind.clone()), + FilterState::ready(filter), + TimelineTab::only_notes_and_replies(), + )) + } + pub fn hashtag(hashtag: String) -> Self { let filter = Filter::new() .kinds([1]) @@ -397,6 +410,11 @@ impl Timeline { note_cache: &mut NoteCache, reversed: bool, ) -> Result<()> { + if !self.kind.should_subscribe_locally() { + // don't need to poll for timelines that don't have local subscriptions + return Ok(()); + } + let sub = self .subscription .ok_or(Error::App(notedeck::Error::no_active_sub()))?; @@ -601,13 +619,20 @@ fn setup_initial_timeline( note_cache: &mut NoteCache, filters: &[Filter], ) -> Result<()> { - timeline.subscription = Some(ndb.subscribe(filters)?); + // some timelines are one-shot and a refreshed, like last_per_pubkey algo feed + if timeline.kind.should_subscribe_locally() { + timeline.subscription = Some(ndb.subscribe(filters)?); + } let txn = Transaction::new(ndb)?; debug!( "querying nostrdb sub {:?} {:?}", timeline.subscription, timeline.filter ); - let lim = filters[0].limit().unwrap_or(filter::default_limit()) as i32; + + let mut lim = 0i32; + for filter in filters { + lim += filter.limit().unwrap_or(1) as i32; + } let notes: Vec = ndb .query(&txn, filters, lim)? diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index eb80398e..cd98f280 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -10,7 +10,8 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, - timeline::{PubkeySource, Timeline, TimelineKind}, + route::Route, + timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, }; @@ -24,22 +25,35 @@ pub enum AddColumnResponse { UndecidedNotification, ExternalNotification, Hashtag, + Algo(AlgoOption), UndecidedIndividual, ExternalIndividual, } pub enum NotificationColumnType { - Home, + Contacts, External, } +#[derive(Clone, Debug)] +pub enum Decision { + Undecided, + Decided(T), +} + +#[derive(Clone, Debug)] +pub enum AlgoOption { + LastPerPubkey(Decision), +} + #[derive(Clone, Debug)] enum AddColumnOption { Universe, UndecidedNotification, ExternalNotification, + Algo(AlgoOption), Notification(PubkeySource), - Home(PubkeySource), + Contacts(PubkeySource), UndecidedHashtag, Hashtag(String), UndecidedIndividual, @@ -47,12 +61,19 @@ enum AddColumnOption { Individual(PubkeySource), } +#[derive(Clone, Copy, Eq, PartialEq, Debug)] +pub enum AddAlgoRoute { + Base, + LastPerPubkey, +} + #[derive(Clone, Copy, Eq, PartialEq, Debug)] pub enum AddColumnRoute { Base, UndecidedNotification, ExternalNotification, Hashtag, + Algo(AddAlgoRoute), UndecidedIndividual, ExternalIndividual, } @@ -64,6 +85,7 @@ impl AddColumnOption { cur_account: Option<&UserAccount>, ) -> Option { match self { + AddColumnOption::Algo(algo_option) => Some(AddColumnResponse::Algo(algo_option)), AddColumnOption::Universe => TimelineKind::Universe .into_timeline(ndb, None) .map(AddColumnResponse::Timeline), @@ -73,7 +95,7 @@ impl AddColumnOption { AddColumnOption::UndecidedNotification => { Some(AddColumnResponse::UndecidedNotification) } - AddColumnOption::Home(pubkey) => { + AddColumnOption::Contacts(pubkey) => { let tlk = TimelineKind::contact_list(pubkey); tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes())) .map(AddColumnResponse::Timeline) @@ -151,6 +173,40 @@ impl<'a> AddColumnView<'a> { }) } + fn algo_last_per_pk_ui(&mut self, ui: &mut Ui) -> Option { + let algo_option = ColumnOptionData { + title: "Contact List", + description: "Source the last note for each user in your contact list", + icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"), + option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Decided( + ListKind::contact_list(PubkeySource::DeckAuthor), + ))), + }; + + let option = algo_option.option.clone(); + if self.column_option_ui(ui, algo_option).clicked() { + option.take_as_response(self.ndb, self.cur_account) + } else { + None + } + } + + fn algo_ui(&mut self, ui: &mut Ui) -> Option { + let algo_option = ColumnOptionData { + title: "Last Note per User", + description: "Show the last note for each user from a list", + icon: egui::include_image!("../../../../assets/icons/universe_icon_dark_4x.png"), + option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Undecided)), + }; + + let option = algo_option.option.clone(); + if self.column_option_ui(ui, algo_option).clicked() { + option.take_as_response(self.ndb, self.cur_account) + } else { + None + } + } + fn individual_ui(&mut self, ui: &mut Ui) -> Option { let mut selected_option: Option = None; for column_option_data in self.get_individual_options() { @@ -352,10 +408,10 @@ impl<'a> AddColumnView<'a> { }; vec.push(ColumnOptionData { - title: "Home timeline", - description: "See recommended notes first", + title: "Contacts", + description: "See notes from your contacts", icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"), - option: AddColumnOption::Home(source.clone()), + option: AddColumnOption::Contacts(source.clone()), }); } vec.push(ColumnOptionData { @@ -376,6 +432,12 @@ impl<'a> AddColumnView<'a> { icon: egui::include_image!("../../../../assets/icons/profile_icon_4x.png"), option: AddColumnOption::UndecidedIndividual, }); + vec.push(ColumnOptionData { + title: "Algo", + description: "Algorithmic feeds to aid in note discovery", + icon: egui::include_image!("../../../../assets/icons/plus_icon_4x.png"), + option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Undecided)), + }); vec } @@ -486,6 +548,10 @@ pub fn render_add_column_routes( ); let resp = match route { AddColumnRoute::Base => add_column_view.ui(ui), + AddColumnRoute::Algo(r) => match r { + AddAlgoRoute::Base => add_column_view.algo_ui(ui), + AddAlgoRoute::LastPerPubkey => add_column_view.algo_last_per_pk_ui(ui), + }, AddColumnRoute::UndecidedNotification => add_column_view.notifications_ui(ui), AddColumnRoute::ExternalNotification => add_column_view.external_notification_ui(ui), AddColumnRoute::Hashtag => hashtag_ui(ui, ctx.ndb, &mut app.view_state.id_string_map), @@ -511,13 +577,66 @@ pub fn render_add_column_routes( app.columns_mut(ctx.accounts) .add_timeline_to_column(col, timeline); } + + AddColumnResponse::Algo(algo_option) => match algo_option { + // If we are undecided, we simply route to the LastPerPubkey + // algo route selection + AlgoOption::LastPerPubkey(Decision::Undecided) => { + app.columns_mut(ctx.accounts) + .column_mut(col) + .router_mut() + .route_to(Route::AddColumn(AddColumnRoute::Algo( + AddAlgoRoute::LastPerPubkey, + ))); + } + + // We have a decision on where we want the last per pubkey + // source to be, so let;s create a timeline from that and + // add it to our list of timelines + AlgoOption::LastPerPubkey(Decision::Decided(list_kind)) => { + let maybe_timeline = { + let default_user = ctx + .accounts + .get_selected_account() + .as_ref() + .map(|sa| sa.pubkey.bytes()); + + TimelineKind::last_per_pubkey(list_kind.clone()) + .into_timeline(ctx.ndb, default_user) + }; + + if let Some(mut timeline) = maybe_timeline { + crate::timeline::setup_new_timeline( + &mut timeline, + ctx.ndb, + &mut app.subscriptions, + ctx.pool, + ctx.note_cache, + app.since_optimize, + ctx.accounts + .get_selected_account() + .as_ref() + .map(|sa| &sa.pubkey), + ); + + app.columns_mut(ctx.accounts) + .add_timeline_to_column(col, timeline); + } else { + // we couldn't fetch the timeline yet... let's let + // the user know ? + + // TODO: spin off the list search here instead + + ui.label(format!("error: could not find {:?}", &list_kind)); + } + } + }, + AddColumnResponse::UndecidedNotification => { app.columns_mut(ctx.accounts) .column_mut(col) .router_mut() - .route_to(crate::route::Route::AddColumn( - AddColumnRoute::UndecidedNotification, - )); + .route_to(Route::AddColumn(AddColumnRoute::UndecidedNotification)); } AddColumnResponse::ExternalNotification => { app.columns_mut(ctx.accounts) From 005ecd740de7273ec1be927b651c49d212036204 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 26 Dec 2024 11:23:09 -0800 Subject: [PATCH 03/18] Initial token parser combinator In an attempt to make our deck serializer more localized, comprehensible, and less error-prone, we introduce a new parser combinator based around string tokens. This replaces the Selection-based intermediary types so that we have a more direct serialization style. --- crates/notedeck_columns/src/storage/mod.rs | 3 + .../src/storage/token_parser.rs | 177 ++++++++++++++++++ crates/notedeck_columns/src/ui/add_column.rs | 65 ++++++- 3 files changed, 244 insertions(+), 1 deletion(-) create mode 100644 crates/notedeck_columns/src/storage/token_parser.rs diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index cda44eeb..edd5df3d 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -1,5 +1,8 @@ mod decks; mod migration; +mod token_parser; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; pub use migration::{deserialize_columns, COLUMNS_FILE}; + +pub use token_parser::{ParseError, TokenParser, TokenSerializable}; diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs new file mode 100644 index 00000000..a1d49f26 --- /dev/null +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -0,0 +1,177 @@ +use crate::timeline::kind::PubkeySource; +use enostr::Pubkey; + +#[derive(Debug, Clone)] +pub struct UnexpectedToken<'fnd, 'exp> { + pub expected: &'exp str, + pub found: &'fnd str, +} + +#[derive(Debug, Clone)] +pub enum ParseError<'a> { + /// Not done parsing yet + Incomplete, + + /// All parsing options failed + AltAllFailed, + + /// There was some issue decoding the data + DecodeFailed, + + /// We encountered an unexpected token + UnexpectedToken(UnexpectedToken<'a, 'static>), + + /// No more tokens + EOF, +} + +#[derive(Clone)] +pub struct TokenParser<'a> { + tokens: &'a [&'a str], + index: usize, +} + +fn _parse_pubkey_src_tokens<'a>( + parser: &mut TokenParser<'a>, +) -> Result> { + match parser.pull_token() { + // we handle bare payloads and assume they are explicit pubkey sources + Ok("explicit") => { + let hex_str = parser.pull_token()?; + Pubkey::from_hex(hex_str) + .map_err(|_| ParseError::DecodeFailed) + .map(PubkeySource::Explicit) + } + + Err(ParseError::EOF) | Ok("deck_author") => Ok(PubkeySource::DeckAuthor), + + Ok(hex_payload) => Pubkey::from_hex(hex_payload) + .map_err(|_| ParseError::DecodeFailed) + .map(PubkeySource::Explicit), + + Err(e) => Err(e), + } +} + +impl<'a> TokenParser<'a> { + /// alt tries each parser in `routes` until one succeeds. + /// If all fail, returns `ParseError::AltAllFailed`. + #[allow(clippy::type_complexity)] + pub fn alt( + parser: &mut TokenParser<'a>, + routes: &[fn(&mut TokenParser<'a>) -> Result>], + ) -> Result> { + let start = parser.index; + for route in routes { + match route(parser) { + Ok(r) => return Ok(r), // if success, stop trying more routes + Err(_) => { + // revert index & try next route + parser.index = start; + } + } + } + // if we tried them all and none succeeded + Err(ParseError::AltAllFailed) + } + + pub fn new(tokens: &'a [&'a str]) -> Self { + let index = 0; + Self { tokens, index } + } + + pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { + let found = self.pull_token()?; + if found == expected { + Ok(found) + } else { + Err(ParseError::UnexpectedToken(UnexpectedToken { + expected, + found, + })) + } + } + + /// “Parse all” meaning: run the provided closure. If it fails, revert + /// the index. + pub fn parse_all( + &mut self, + parse_fn: impl FnOnce(&mut Self) -> Result>, + ) -> Result> { + let start = self.index; + let result = parse_fn(self); + + // If the parser closure fails, revert the index + if result.is_err() { + self.index = start; + result + } else if !self.is_eof() { + Err(ParseError::Incomplete) + } else { + result + } + } + + pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> { + let token = self + .tokens + .get(self.index) + .copied() + .ok_or(ParseError::EOF)?; + self.index += 1; + Ok(token) + } + + pub fn unpop_token(&mut self) { + if (self.index as isize) - 1 < 0 { + return; + } + + self.index -= 1; + } + + #[inline] + pub fn tokens(&self) -> &'a [&'a str] { + let min_index = self.index.min(self.tokens.len()); + &self.tokens[min_index..] + } + + #[inline] + pub fn is_eof(&self) -> bool { + self.tokens().is_empty() + } +} + +pub trait TokenSerializable: Sized { + /// Return a list of serialization plans for a type. We do this for + /// type safety and assume constructing these types are lightweight + fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_column_serialize() { + use crate::ui::add_column::{AddAlgoRoute, AddColumnRoute}; + + { + let data = &"column:algo_selection:last_per_pubkey" + .split(":") + .collect::>(); + let mut parser = TokenParser::new(&data); + let parsed = AddColumnRoute::parse(&mut parser).unwrap(); + let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey); + assert_eq!(expected, parsed) + } + + { + let data: &[&str] = &["column"]; + let mut parser = TokenParser::new(data); + let parsed = AddColumnRoute::parse(&mut parser).unwrap(); + let expected = AddColumnRoute::Base; + assert_eq!(expected, parsed) + } + } +} diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index cd98f280..80a4d16a 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -11,6 +11,7 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, route::Route, + storage::{ParseError, TokenParser, TokenSerializable}, timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, @@ -61,8 +62,9 @@ enum AddColumnOption { Individual(PubkeySource), } -#[derive(Clone, Copy, Eq, PartialEq, Debug)] +#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)] pub enum AddAlgoRoute { + #[default] Base, LastPerPubkey, } @@ -78,6 +80,67 @@ pub enum AddColumnRoute { ExternalIndividual, } +impl TokenSerializable for AddColumnRoute { + fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { + // all start with column + parser.parse_token("column")?; + + // if we're done then we have the base + if parser.is_eof() { + return Ok(AddColumnRoute::Base); + } + + TokenParser::alt( + parser, + &[ + |p| { + p.parse_all(|p| { + p.parse_token("external_notif_selection")?; + Ok(AddColumnRoute::UndecidedNotification) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("external_notif_selection")?; + Ok(AddColumnRoute::ExternalNotification) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("hashtag_selection")?; + Ok(AddColumnRoute::Hashtag) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("algo_selection")?; + Ok(AddColumnRoute::Algo(AddAlgoRoute::Base)) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("algo_selection")?; + p.parse_token("last_per_pubkey")?; + Ok(AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey)) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("individual_selection")?; + Ok(AddColumnRoute::UndecidedIndividual) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("external_individual_selection")?; + Ok(AddColumnRoute::ExternalIndividual) + }) + }, + ], + ) + } +} + impl AddColumnOption { pub fn take_as_response( self, From 61b3a92792c78113fd4429bcda2cb71c5381f0ca Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 5 Jan 2025 10:57:37 -0600 Subject: [PATCH 04/18] token_parser: unify parsing and serialization This reduces the number of things we have to update in our token parser and serializer. For payloads, we we have to handle the payload cases different, but we now have a structure that can deal with that efficiently. Signed-off-by: William Casarin --- .../src/storage/token_parser.rs | 1 + crates/notedeck_columns/src/ui/add_column.rs | 90 ++++++++++--------- 2 files changed, 48 insertions(+), 43 deletions(-) diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index a1d49f26..45b43595 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -146,6 +146,7 @@ pub trait TokenSerializable: Sized { /// Return a list of serialization plans for a type. We do this for /// type safety and assume constructing these types are lightweight fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; + fn serialize(&self, write_token: fn(&str) -> Result<(), std::io::Error>) -> Result<(), std::io::Error>; } #[cfg(test)] diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index 80a4d16a..0ee4ffba 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -80,7 +80,47 @@ pub enum AddColumnRoute { ExternalIndividual, } +// Parser for the common case without any payloads +fn parse_column_route<'a>( + parser: &mut TokenParser<'a>, + route: AddColumnRoute, +) -> Result> { + parser.parse_all(|p| { + for token in route.tokens() { + p.parse_token(token)?; + } + Ok(route) + }) +} + +impl AddColumnRoute { + /// Route tokens use in both serialization and deserialization + fn tokens(&self) -> &'static [&'static str] { + match self { + Self::Base => &[], + Self::UndecidedNotification => &["notification_selection"], + Self::ExternalNotification => &["external_notif_selection"], + Self::UndecidedIndividual => &["individual_selection"], + Self::ExternalIndividual => &["external_individual_selection"], + Self::Hashtag => &["hashtag"], + Self::Algo(AddAlgoRoute::Base) => &["algo_selection"], + Self::Algo(AddAlgoRoute::LastPerPubkey) => &["algo_selection", "last_per_pubkey"], + // NOTE!!! When adding to this, update the parser for TokenSerializable below + } + } +} + impl TokenSerializable for AddColumnRoute { + fn serialize( + &self, + write_token: fn(&str) -> Result<(), std::io::Error>, + ) -> Result<(), std::io::Error> { + for token in self.tokens() { + write_token(token)?; + } + Ok(()) + } + fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { // all start with column parser.parse_token("column")?; @@ -93,49 +133,13 @@ impl TokenSerializable for AddColumnRoute { TokenParser::alt( parser, &[ - |p| { - p.parse_all(|p| { - p.parse_token("external_notif_selection")?; - Ok(AddColumnRoute::UndecidedNotification) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("external_notif_selection")?; - Ok(AddColumnRoute::ExternalNotification) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("hashtag_selection")?; - Ok(AddColumnRoute::Hashtag) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("algo_selection")?; - Ok(AddColumnRoute::Algo(AddAlgoRoute::Base)) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("algo_selection")?; - p.parse_token("last_per_pubkey")?; - Ok(AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey)) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("individual_selection")?; - Ok(AddColumnRoute::UndecidedIndividual) - }) - }, - |p| { - p.parse_all(|p| { - p.parse_token("external_individual_selection")?; - Ok(AddColumnRoute::ExternalIndividual) - }) - }, + |p| parse_column_route(p, AddColumnRoute::UndecidedNotification), + |p| parse_column_route(p, AddColumnRoute::ExternalNotification), + |p| parse_column_route(p, AddColumnRoute::UndecidedIndividual), + |p| parse_column_route(p, AddColumnRoute::ExternalIndividual), + |p| parse_column_route(p, AddColumnRoute::Hashtag), + |p| parse_column_route(p, AddColumnRoute::Algo(AddAlgoRoute::Base)), + |p| parse_column_route(p, AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey)), ], ) } From 4f89d95aefb3f302b6558d15682008396b491f33 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 5 Jan 2025 11:50:08 -0600 Subject: [PATCH 05/18] token_serializer: introduce TokenWriter This simplifies token serialization Signed-off-by: William Casarin --- crates/notedeck_columns/src/storage/mod.rs | 2 +- .../src/storage/token_parser.rs | 38 ++++++++++++++++++- crates/notedeck_columns/src/ui/add_column.rs | 10 ++--- 3 files changed, 41 insertions(+), 9 deletions(-) diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index edd5df3d..d870d91d 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -5,4 +5,4 @@ mod token_parser; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; pub use migration::{deserialize_columns, COLUMNS_FILE}; -pub use token_parser::{ParseError, TokenParser, TokenSerializable}; +pub use token_parser::{ParseError, TokenParser, TokenSerializable, TokenWriter}; diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index 45b43595..f7bd9641 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -25,6 +25,42 @@ pub enum ParseError<'a> { EOF, } +pub struct TokenWriter { + delim: &'static str, + tokens_written: usize, + buf: Vec, +} + +impl Default for TokenWriter { + fn default() -> Self { + Self::new(":") + } +} + +impl TokenWriter { + pub fn new(delim: &'static str) -> Self { + let buf = vec![]; + let tokens_written = 0; + Self { + buf, + tokens_written, + delim, + } + } + + pub fn write_token(&mut self, token: &str) { + if self.tokens_written > 0 { + self.buf.extend_from_slice(self.delim.as_bytes()) + } + self.buf.extend_from_slice(token.as_bytes()); + self.tokens_written += 1; + } + + pub fn buffer(&self) -> &[u8] { + &self.buf + } +} + #[derive(Clone)] pub struct TokenParser<'a> { tokens: &'a [&'a str], @@ -146,7 +182,7 @@ pub trait TokenSerializable: Sized { /// Return a list of serialization plans for a type. We do this for /// type safety and assume constructing these types are lightweight fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; - fn serialize(&self, write_token: fn(&str) -> Result<(), std::io::Error>) -> Result<(), std::io::Error>; + fn serialize(&self, writer: &mut TokenWriter); } #[cfg(test)] diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index 0ee4ffba..958d5aac 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -11,7 +11,7 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, route::Route, - storage::{ParseError, TokenParser, TokenSerializable}, + storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}, timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, @@ -111,14 +111,10 @@ impl AddColumnRoute { } impl TokenSerializable for AddColumnRoute { - fn serialize( - &self, - write_token: fn(&str) -> Result<(), std::io::Error>, - ) -> Result<(), std::io::Error> { + fn serialize(&self, writer: &mut TokenWriter) { for token in self.tokens() { - write_token(token)?; + writer.write_token(token); } - Ok(()) } fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { From efa5b7e32fa20d04febe7160ec4b1fd10e7c1e5e Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 5 Jan 2025 12:05:14 -0600 Subject: [PATCH 06/18] token_parser: simplify AddColumnRoute serialization Signed-off-by: William Casarin --- .../src/storage/token_parser.rs | 24 +++++++++++++----- crates/notedeck_columns/src/ui/add_column.rs | 25 +++++++------------ 2 files changed, 27 insertions(+), 22 deletions(-) diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index f7bd9641..91dcedf7 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -56,6 +56,12 @@ impl TokenWriter { self.tokens_written += 1; } + pub fn str(&self) -> &str { + // SAFETY: only &strs are ever serialized, so its guaranteed to be + // correct here + unsafe { std::str::from_utf8_unchecked(self.buffer()) } + } + pub fn buffer(&self) -> &[u8] { &self.buf } @@ -194,21 +200,27 @@ mod tests { use crate::ui::add_column::{AddAlgoRoute, AddColumnRoute}; { - let data = &"column:algo_selection:last_per_pubkey" - .split(":") - .collect::>(); + let data_str = "column:algo_selection:last_per_pubkey"; + let data = &data_str.split(":").collect::>(); + let mut token_writer = TokenWriter::default(); let mut parser = TokenParser::new(&data); let parsed = AddColumnRoute::parse(&mut parser).unwrap(); let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey); - assert_eq!(expected, parsed) + parsed.serialize(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); } { - let data: &[&str] = &["column"]; + let data_str = "column"; + let mut token_writer = TokenWriter::default(); + let data: &[&str] = &[data_str]; let mut parser = TokenParser::new(data); let parsed = AddColumnRoute::parse(&mut parser).unwrap(); let expected = AddColumnRoute::Base; - assert_eq!(expected, parsed) + parsed.serialize(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); } } } diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index 958d5aac..50acf1f4 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -97,14 +97,14 @@ impl AddColumnRoute { /// Route tokens use in both serialization and deserialization fn tokens(&self) -> &'static [&'static str] { match self { - Self::Base => &[], - Self::UndecidedNotification => &["notification_selection"], - Self::ExternalNotification => &["external_notif_selection"], - Self::UndecidedIndividual => &["individual_selection"], - Self::ExternalIndividual => &["external_individual_selection"], - Self::Hashtag => &["hashtag"], - Self::Algo(AddAlgoRoute::Base) => &["algo_selection"], - Self::Algo(AddAlgoRoute::LastPerPubkey) => &["algo_selection", "last_per_pubkey"], + Self::Base => &["column"], + Self::UndecidedNotification => &["column", "notification_selection"], + Self::ExternalNotification => &["column", "external_notif_selection"], + Self::UndecidedIndividual => &["column", "individual_selection"], + Self::ExternalIndividual => &["column", "external_individual_selection"], + Self::Hashtag => &["column", "hashtag"], + Self::Algo(AddAlgoRoute::Base) => &["column", "algo_selection"], + Self::Algo(AddAlgoRoute::LastPerPubkey) => &["column", "algo_selection", "last_per_pubkey"], // NOTE!!! When adding to this, update the parser for TokenSerializable below } } @@ -118,17 +118,10 @@ impl TokenSerializable for AddColumnRoute { } fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { - // all start with column - parser.parse_token("column")?; - - // if we're done then we have the base - if parser.is_eof() { - return Ok(AddColumnRoute::Base); - } - TokenParser::alt( parser, &[ + |p| parse_column_route(p, AddColumnRoute::Base), |p| parse_column_route(p, AddColumnRoute::UndecidedNotification), |p| parse_column_route(p, AddColumnRoute::ExternalNotification), |p| parse_column_route(p, AddColumnRoute::UndecidedIndividual), From 00ef3082f31d9aca613e4c1d38a70fbdd63a0e3c Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 21 Jan 2025 13:12:07 -0800 Subject: [PATCH 07/18] tokens: add a more advanced tokens parser Signed-off-by: William Casarin --- crates/notedeck_columns/src/storage/mod.rs | 5 +- .../src/storage/token_parser.rs | 300 +++++++++++++++++- crates/notedeck_columns/src/ui/add_column.rs | 11 +- 3 files changed, 304 insertions(+), 12 deletions(-) diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index d870d91d..95c88d7a 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -5,4 +5,7 @@ mod token_parser; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; pub use migration::{deserialize_columns, COLUMNS_FILE}; -pub use token_parser::{ParseError, TokenParser, TokenSerializable, TokenWriter}; +pub use token_parser::{ + ParseError, Payload, Token, TokenAlternatives, TokenParser, TokenPayload, TokenSerializable, + TokenWriter, UnexpectedToken, +}; diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index 91dcedf7..7a855995 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -1,5 +1,5 @@ use crate::timeline::kind::PubkeySource; -use enostr::Pubkey; +use enostr::{NoteId, Pubkey}; #[derive(Debug, Clone)] pub struct UnexpectedToken<'fnd, 'exp> { @@ -7,6 +7,251 @@ pub struct UnexpectedToken<'fnd, 'exp> { pub found: &'fnd str, } +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum TokenPayload { + PubkeySource, + Pubkey, + NoteId, +} + +pub struct TokenAlternatives { + /// This is the preferred token. It should be serialized this way + preferred: &'static str, + + /// These are deprecated tokens that should still be handled and parsed + deprecated: &'static [&'static str], +} + +impl TokenAlternatives { + pub const fn new(preferred: &'static str, deprecated: &'static [&'static str]) -> Self { + Self { + preferred, + deprecated, + } + } +} + +/// Token is a unified serialization helper. By specifying a list of +/// tokens for each thing you want to parse, you can type-safely parse +/// and serialize things +pub enum Token { + /// A simple identifier + Identifier(&'static str), + + /// There are multiple ways to parse this identifier + Alternatives(TokenAlternatives), + + /// Different payload types, pubkeys etc + Payload(TokenPayload), +} + +#[derive(Debug, Clone)] +pub enum Payload { + PubkeySource(PubkeySource), + Pubkey(Pubkey), + NoteId(NoteId), +} + +impl Payload { + pub fn token_payload(&self) -> TokenPayload { + match self { + Payload::PubkeySource(_) => TokenPayload::PubkeySource, + Payload::Pubkey(_) => TokenPayload::Pubkey, + Payload::NoteId(_) => TokenPayload::NoteId, + } + } + + pub fn parse_note_id(payload: Option) -> Result> { + payload + .and_then(|p| p.get_note_id().cloned()) + .ok_or(ParseError::ExpectedPayload(TokenPayload::NoteId)) + } + + pub fn parse_pubkey(payload: Option) -> Result> { + payload + .and_then(|p| p.get_pubkey().cloned()) + .ok_or(ParseError::ExpectedPayload(TokenPayload::Pubkey)) + } + + pub fn parse_pubkey_source( + payload: Option, + ) -> Result> { + payload + .and_then(|p| p.get_pubkey_source().cloned()) + .ok_or(ParseError::ExpectedPayload(TokenPayload::Pubkey)) + } + + pub fn parse<'a>( + expected: TokenPayload, + parser: &mut TokenParser<'a>, + ) -> Result> { + match expected { + TokenPayload::PubkeySource => Ok(Payload::pubkey_source( + PubkeySource::parse_from_tokens(parser)?, + )), + TokenPayload::Pubkey => { + let pubkey = parser.try_parse(|p| { + let hex = p.pull_token()?; + Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) + })?; + + Ok(Payload::pubkey(pubkey)) + } + TokenPayload::NoteId => { + let note_id = parser.try_parse(|p| { + let hex = p.pull_token()?; + NoteId::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) + })?; + + Ok(Payload::note_id(note_id)) + } + } + } + + pub fn pubkey(pubkey: Pubkey) -> Self { + Self::Pubkey(pubkey) + } + + pub fn pubkey_source(pubkey_src: PubkeySource) -> Self { + Self::PubkeySource(pubkey_src) + } + + pub fn note_id(note_id: NoteId) -> Self { + Self::NoteId(note_id) + } + + pub fn get_pubkey(&self) -> Option<&Pubkey> { + if let Self::Pubkey(pubkey) = self { + Some(pubkey) + } else { + None + } + } + + pub fn get_pubkey_source(&self) -> Option<&PubkeySource> { + if let Self::PubkeySource(pk_src) = self { + Some(pk_src) + } else { + None + } + } + + pub fn get_note_id(&self) -> Option<&NoteId> { + if let Self::NoteId(note_id) = self { + Some(note_id) + } else { + None + } + } +} + +impl Token { + pub fn parse<'a>( + &self, + parser: &mut TokenParser<'a>, + ) -> Result, ParseError<'a>> { + match self { + Token::Identifier(s) => { + parser.parse_token(s)?; + Ok(None) + } + + Token::Payload(payload) => { + let payload = Payload::parse(*payload, parser)?; + Ok(Some(payload)) + } + + Token::Alternatives(alts) => { + if parser.try_parse(|p| p.parse_token(alts.preferred)).is_ok() { + return Ok(None); + } + + for token in alts.deprecated { + if parser.try_parse(|p| p.parse_token(token)).is_ok() { + return Ok(None); + } + } + + Err(ParseError::AltAllFailed) + } + } + } + + /// Parse all of the tokens in sequence, ensuring that we extract a payload + /// if we find one. This only handles a single payload, if you need more, + /// then use a custom parser + pub fn parse_all<'a>( + parser: &mut TokenParser<'a>, + tokens: &[Token], + ) -> Result, ParseError<'a>> { + parser.try_parse(|p| { + let mut payload: Option = None; + for token in tokens { + if let Some(pl) = token.parse(p)? { + payload = Some(pl); + } + } + + Ok(payload) + }) + } + + pub fn serialize_all(writer: &mut TokenWriter, tokens: &[Token], payload: Option<&Payload>) { + for token in tokens { + token.serialize(writer, payload) + } + } + + pub fn serialize(&self, writer: &mut TokenWriter, payload: Option<&Payload>) { + match self { + Token::Identifier(s) => writer.write_token(s), + Token::Alternatives(alts) => writer.write_token(alts.preferred), + Token::Payload(token_payload) => match token_payload { + TokenPayload::PubkeySource => { + payload + .and_then(|p| p.get_pubkey_source()) + .expect("expected pubkey payload") + .serialize_tokens(writer); + } + + TokenPayload::Pubkey => { + let pubkey = payload + .and_then(|p| p.get_pubkey()) + .expect("expected note_id payload"); + writer.write_token(&hex::encode(pubkey.bytes())); + } + + TokenPayload::NoteId => { + let note_id = payload + .and_then(|p| p.get_note_id()) + .expect("expected note_id payload"); + writer.write_token(&hex::encode(note_id.bytes())); + } + }, + } + } + + pub const fn id(s: &'static str) -> Self { + Token::Identifier(s) + } + + pub const fn alts(primary: &'static str, deprecated: &'static [&'static str]) -> Self { + Token::Alternatives(TokenAlternatives::new(primary, deprecated)) + } + + pub const fn pubkey() -> Self { + Token::Payload(TokenPayload::Pubkey) + } + + pub const fn pubkey_source() -> Self { + Token::Payload(TokenPayload::PubkeySource) + } + + pub const fn note_id() -> Self { + Token::Payload(TokenPayload::NoteId) + } +} + #[derive(Debug, Clone)] pub enum ParseError<'a> { /// Not done parsing yet @@ -18,6 +263,11 @@ pub enum ParseError<'a> { /// There was some issue decoding the data DecodeFailed, + /// There was some issue decoding the data + ExpectedPayload(TokenPayload), + + HexDecodeFailed, + /// We encountered an unexpected token UnexpectedToken(UnexpectedToken<'a, 'static>), @@ -122,6 +372,18 @@ impl<'a> TokenParser<'a> { Self { tokens, index } } + pub fn peek_parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { + let found = self.peek_token()?; + if found == expected { + Ok(found) + } else { + Err(ParseError::UnexpectedToken(UnexpectedToken { + expected, + found, + })) + } + } + pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { let found = self.pull_token()?; if found == expected { @@ -154,6 +416,23 @@ impl<'a> TokenParser<'a> { } } + /// Attempt to parse something, backtrack if we fail. + pub fn try_parse( + &mut self, + parse_fn: impl FnOnce(&mut Self) -> Result>, + ) -> Result> { + let start = self.index; + let result = parse_fn(self); + + // If the parser closure fails, revert the index + if result.is_err() { + self.index = start; + result + } else { + result + } + } + pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> { let token = self .tokens @@ -172,6 +451,13 @@ impl<'a> TokenParser<'a> { self.index -= 1; } + pub fn peek_token(&self) -> Result<&'a str, ParseError<'a>> { + self.tokens() + .first() + .ok_or(ParseError::DecodeFailed) + .copied() + } + #[inline] pub fn tokens(&self) -> &'a [&'a str] { let min_index = self.index.min(self.tokens.len()); @@ -187,8 +473,8 @@ impl<'a> TokenParser<'a> { pub trait TokenSerializable: Sized { /// Return a list of serialization plans for a type. We do this for /// type safety and assume constructing these types are lightweight - fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; - fn serialize(&self, writer: &mut TokenWriter); + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result>; + fn serialize_tokens(&self, writer: &mut TokenWriter); } #[cfg(test)] @@ -204,9 +490,9 @@ mod tests { let data = &data_str.split(":").collect::>(); let mut token_writer = TokenWriter::default(); let mut parser = TokenParser::new(&data); - let parsed = AddColumnRoute::parse(&mut parser).unwrap(); + let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap(); let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey); - parsed.serialize(&mut token_writer); + parsed.serialize_tokens(&mut token_writer); assert_eq!(expected, parsed); assert_eq!(token_writer.str(), data_str); } @@ -216,9 +502,9 @@ mod tests { let mut token_writer = TokenWriter::default(); let data: &[&str] = &[data_str]; let mut parser = TokenParser::new(data); - let parsed = AddColumnRoute::parse(&mut parser).unwrap(); + let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap(); let expected = AddColumnRoute::Base; - parsed.serialize(&mut token_writer); + parsed.serialize_tokens(&mut token_writer); assert_eq!(expected, parsed); assert_eq!(token_writer.str(), data_str); } diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index 50acf1f4..cc8610f0 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -104,20 +104,23 @@ impl AddColumnRoute { Self::ExternalIndividual => &["column", "external_individual_selection"], Self::Hashtag => &["column", "hashtag"], Self::Algo(AddAlgoRoute::Base) => &["column", "algo_selection"], - Self::Algo(AddAlgoRoute::LastPerPubkey) => &["column", "algo_selection", "last_per_pubkey"], - // NOTE!!! When adding to this, update the parser for TokenSerializable below + Self::Algo(AddAlgoRoute::LastPerPubkey) => { + &["column", "algo_selection", "last_per_pubkey"] + } // NOTE!!! When adding to this, update the parser for TokenSerializable below } } } impl TokenSerializable for AddColumnRoute { - fn serialize(&self, writer: &mut TokenWriter) { + fn serialize_tokens(&self, writer: &mut TokenWriter) { for token in self.tokens() { writer.write_token(token); } } - fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> { + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + parser.peek_parse_token("column")?; + TokenParser::alt( parser, &[ From 6b57401e147bfb33c5a9533a5a155d6379ab3f95 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 21 Jan 2025 13:10:43 -0800 Subject: [PATCH 08/18] tokens: add AccountsRoute token serializer Signed-off-by: William Casarin --- crates/notedeck_columns/src/accounts/route.rs | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/crates/notedeck_columns/src/accounts/route.rs b/crates/notedeck_columns/src/accounts/route.rs index 69ce1279..6447fffe 100644 --- a/crates/notedeck_columns/src/accounts/route.rs +++ b/crates/notedeck_columns/src/accounts/route.rs @@ -1,4 +1,5 @@ use super::{AccountLoginResponse, AccountsViewResponse}; +use crate::storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}; use serde::{Deserialize, Serialize}; pub enum AccountsRouteResponse { @@ -11,3 +12,77 @@ pub enum AccountsRoute { Accounts, AddAccount, } + +impl AccountsRoute { + /// Route tokens use in both serialization and deserialization + fn tokens(&self) -> &'static [&'static str] { + match self { + Self::Accounts => &["accounts", "show"], + Self::AddAccount => &["accounts", "new"], + } + } +} + +impl TokenSerializable for AccountsRoute { + fn serialize_tokens(&self, writer: &mut TokenWriter) { + for token in self.tokens() { + writer.write_token(token); + } + } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + parser.peek_parse_token("accounts")?; + + TokenParser::alt( + parser, + &[ + |p| parse_accounts_route(p, AccountsRoute::Accounts), + |p| parse_accounts_route(p, AccountsRoute::AddAccount), + ], + ) + } +} + +fn parse_accounts_route<'a>( + parser: &mut TokenParser<'a>, + route: AccountsRoute, +) -> Result> { + parser.parse_all(|p| { + for token in route.tokens() { + p.parse_token(token)?; + } + Ok(route) + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::storage::{TokenParser, TokenSerializable, TokenWriter}; + + #[test] + fn test_accounts_route_serialize() { + let data_str = "accounts:show"; + let data = &data_str.split(":").collect::>(); + let mut token_writer = TokenWriter::default(); + let mut parser = TokenParser::new(&data); + let parsed = AccountsRoute::parse_from_tokens(&mut parser).unwrap(); + let expected = AccountsRoute::Accounts; + parsed.serialize_tokens(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); + } + + #[test] + fn test_new_accounts_route_serialize() { + let data_str = "accounts:new"; + let data = &data_str.split(":").collect::>(); + let mut token_writer = TokenWriter::default(); + let mut parser = TokenParser::new(data); + let parsed = AccountsRoute::parse_from_tokens(&mut parser).unwrap(); + let expected = AccountsRoute::AddAccount; + parsed.serialize_tokens(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); + } +} From 70a39ca69c9a6a490ccac039b44800e1c43f5091 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 21 Jan 2025 13:12:39 -0800 Subject: [PATCH 09/18] tokens: add PubkeySource and ListKinds token serializer Signed-off-by: William Casarin --- crates/notedeck_columns/src/timeline/kind.rs | 81 +++++++++++++++++++- 1 file changed, 80 insertions(+), 1 deletion(-) diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index 3d219140..8aa40b85 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -1,4 +1,7 @@ use crate::error::Error; +use crate::storage::{ + ParseError, Payload, Token, TokenParser, TokenPayload, TokenSerializable, TokenWriter, +}; use crate::timeline::{Timeline, TimelineTab}; use enostr::{Filter, Pubkey}; use nostrdb::{Ndb, Transaction}; @@ -7,9 +10,10 @@ use serde::{Deserialize, Serialize}; use std::{borrow::Cow, fmt::Display}; use tracing::{error, warn}; -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Default, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum PubkeySource { Explicit(Pubkey), + #[default] DeckAuthor, } @@ -19,6 +23,10 @@ pub enum ListKind { } impl PubkeySource { + pub fn pubkey(pubkey: Pubkey) -> Self { + PubkeySource::Explicit(pubkey) + } + pub fn to_pubkey<'a>(&'a self, deck_author: &'a Pubkey) -> &'a Pubkey { match self { PubkeySource::Explicit(pk) => pk, @@ -34,6 +42,44 @@ impl PubkeySource { } } +impl TokenSerializable for PubkeySource { + fn serialize_tokens(&self, writer: &mut TokenWriter) { + match self { + PubkeySource::DeckAuthor => { + writer.write_token("deck_author"); + } + PubkeySource::Explicit(pk) => { + writer.write_token(&hex::encode(pk.bytes())); + } + } + } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + parser.try_parse(|p| { + match p.pull_token() { + // we handle bare payloads and assume they are explicit pubkey sources + Ok("explicit") => { + if let Ok(hex) = p.pull_token() { + let pk = Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed)?; + Ok(PubkeySource::Explicit(pk)) + } else { + Err(ParseError::ExpectedPayload(TokenPayload::Pubkey)) + } + } + + Err(_) | Ok("deck_author") => Ok(PubkeySource::DeckAuthor), + + Ok(hex) => { + let pk = Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed)?; + Ok(PubkeySource::Explicit(pk)) + } + } + }) + } +} + +const LIST_CONTACT_TOKENS: &[Token] = &[Token::alts("contacts", &["contact"]), Token::pubkey()]; + impl ListKind { pub fn contact_list(pk_src: PubkeySource) -> Self { ListKind::Contact(pk_src) @@ -44,6 +90,39 @@ impl ListKind { ListKind::Contact(pk_src) => Some(pk_src), } } + + fn payload(&self) -> Option { + match self { + ListKind::Contact(pk_src) => Some(Payload::pubkey_source(pk_src.clone())), + } + } + + const fn tokens(&self) -> &'static [Token] { + match self { + ListKind::Contact(_pubkey) => LIST_CONTACT_TOKENS, + } + } +} + +impl TokenSerializable for ListKind { + fn serialize_tokens(&self, writer: &mut TokenWriter) { + Token::serialize_all(writer, self.tokens(), self.payload().as_ref()); + } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + TokenParser::alt( + parser, + &[|p| { + let maybe_payload = + Token::parse_all(p, ListKind::Contact(PubkeySource::default()).tokens())?; + let payload = maybe_payload + .as_ref() + .and_then(|mp| mp.get_pubkey_source()) + .ok_or(ParseError::ExpectedPayload(TokenPayload::Pubkey))?; + Ok(ListKind::Contact(payload.to_owned())) + }], + ) + } } /// From 4e87ed7065c4de0e5fd8791db9e6a6a094fea6a5 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 21 Jan 2025 13:13:28 -0800 Subject: [PATCH 10/18] tokens: add TimelineRoute token serializer Signed-off-by: William Casarin --- crates/notedeck_columns/src/timeline/mod.rs | 2 +- crates/notedeck_columns/src/timeline/route.rs | 87 +++++++++++++++++++ 2 files changed, 88 insertions(+), 1 deletion(-) diff --git a/crates/notedeck_columns/src/timeline/mod.rs b/crates/notedeck_columns/src/timeline/mod.rs index 8fc81e6a..85a0c446 100644 --- a/crates/notedeck_columns/src/timeline/mod.rs +++ b/crates/notedeck_columns/src/timeline/mod.rs @@ -30,7 +30,7 @@ pub mod kind; pub mod route; pub use cache::{TimelineCache, TimelineCacheKey}; -pub use kind::{AlgoTimeline, ColumnTitle, PubkeySource, TimelineKind}; +pub use kind::{ColumnTitle, PubkeySource, TimelineKind}; pub use route::TimelineRoute; #[derive(Debug, Hash, Copy, Clone, Eq, PartialEq)] diff --git a/crates/notedeck_columns/src/timeline/route.rs b/crates/notedeck_columns/src/timeline/route.rs index 4e8b9220..7a0effc4 100644 --- a/crates/notedeck_columns/src/timeline/route.rs +++ b/crates/notedeck_columns/src/timeline/route.rs @@ -3,6 +3,7 @@ use crate::{ draft::Drafts, nav::RenderNavAction, profile::ProfileAction, + storage::{ParseError, Payload, Token, TokenParser, TokenSerializable, TokenWriter}, timeline::{TimelineCache, TimelineId, TimelineKind}, ui::{ self, @@ -24,6 +25,67 @@ pub enum TimelineRoute { Quote(NoteId), } +const PROFILE_TOKENS: &[Token] = &[Token::id("profile"), Token::pubkey()]; +const THREAD_TOKENS: &[Token] = &[Token::id("thread"), Token::note_id()]; +const REPLY_TOKENS: &[Token] = &[Token::id("reply"), Token::note_id()]; +const QUOTE_TOKENS: &[Token] = &[Token::id("quote"), Token::note_id()]; + +impl TimelineRoute { + fn payload(&self) -> Option { + match self { + TimelineRoute::Profile(pk) => Some(Payload::pubkey(*pk)), + TimelineRoute::Thread(note_id) => Some(Payload::note_id(*note_id)), + TimelineRoute::Reply(note_id) => Some(Payload::note_id(*note_id)), + TimelineRoute::Quote(note_id) => Some(Payload::note_id(*note_id)), + TimelineRoute::Timeline(_timeline_id) => todo!("handle timeline_ids"), + } + } + + fn tokens(&self) -> &'static [Token] { + match self { + TimelineRoute::Profile(_) => PROFILE_TOKENS, + TimelineRoute::Thread(_) => THREAD_TOKENS, + TimelineRoute::Reply(_) => REPLY_TOKENS, + TimelineRoute::Quote(_) => QUOTE_TOKENS, + TimelineRoute::Timeline(_) => todo!("handle timeline_ids"), + } + } + + /// NOTE!! update parse_from_tokens as well when adding to this match + fn parse<'a>(&self, parser: &mut TokenParser<'a>) -> Result> { + let payload = Token::parse_all(parser, self.tokens())?; + + match self { + TimelineRoute::Profile(_) => { + Ok(TimelineRoute::Profile(Payload::parse_pubkey(payload)?)) + } + TimelineRoute::Thread(_) => Ok(TimelineRoute::Thread(Payload::parse_note_id(payload)?)), + TimelineRoute::Reply(_) => Ok(TimelineRoute::Reply(Payload::parse_note_id(payload)?)), + TimelineRoute::Quote(_) => Ok(TimelineRoute::Quote(Payload::parse_note_id(payload)?)), + TimelineRoute::Timeline(_) => todo!("handle timeline parsing"), + } + } +} + +impl TokenSerializable for TimelineRoute { + fn serialize_tokens(&self, writer: &mut TokenWriter) { + Token::serialize_all(writer, self.tokens(), self.payload().as_ref()); + } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + TokenParser::alt( + parser, + &[ + |p| TimelineRoute::Profile(Pubkey::new([0; 32])).parse(p), + |p| TimelineRoute::Thread(NoteId::new([0; 32])).parse(p), + |p| TimelineRoute::Reply(NoteId::new([0; 32])).parse(p), + |p| TimelineRoute::Quote(NoteId::new([0; 32])).parse(p), + |_p| todo!("handle timeline parsing"), + ], + ) + } +} + #[allow(clippy::too_many_arguments)] pub fn render_timeline_route( ndb: &Ndb, @@ -193,3 +255,28 @@ pub fn render_profile_route( None } } + +#[cfg(test)] +mod tests { + use crate::storage::{TokenParser, TokenSerializable, TokenWriter}; + use enostr::NoteId; + + #[test] + fn test_timeline_route_serialize() { + use super::TimelineRoute; + + { + let note_id_hex = "1c54e5b0c386425f7e017d9e068ddef8962eb2ce1bb08ed27e24b93411c12e60"; + let note_id = NoteId::from_hex(note_id_hex).unwrap(); + let data_str = format!("thread:{}", note_id_hex); + let data = &data_str.split(":").collect::>(); + let mut token_writer = TokenWriter::default(); + let mut parser = TokenParser::new(&data); + let parsed = TimelineRoute::parse_from_tokens(&mut parser).unwrap(); + let expected = TimelineRoute::Thread(note_id); + parsed.serialize_tokens(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); + } + } +} From 29491cca055e8c73b32bb2f20b3e0abb467445eb Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 21 Jan 2025 14:02:17 -0800 Subject: [PATCH 11/18] tokens: initial Route token serializer Signed-off-by: William Casarin --- crates/notedeck_columns/src/route.rs | 90 ++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/crates/notedeck_columns/src/route.rs b/crates/notedeck_columns/src/route.rs index 3f06dc44..471b4456 100644 --- a/crates/notedeck_columns/src/route.rs +++ b/crates/notedeck_columns/src/route.rs @@ -4,6 +4,7 @@ use std::fmt::{self}; use crate::{ accounts::AccountsRoute, column::Columns, + storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}, timeline::{kind::ColumnTitle, TimelineId, TimelineRoute}, ui::add_column::{AddAlgoRoute, AddColumnRoute}, }; @@ -22,6 +23,95 @@ pub enum Route { EditDeck(usize), } +impl TokenSerializable for Route { + fn serialize_tokens(&self, writer: &mut TokenWriter) { + match self { + Route::Timeline(routes) => routes.serialize_tokens(writer), + Route::Accounts(routes) => routes.serialize_tokens(writer), + Route::AddColumn(routes) => routes.serialize_tokens(writer), + Route::EditDeck(ind) => { + writer.write_token("deck"); + writer.write_token("edit"); + writer.write_token(&ind.to_string()); + } + Route::EditProfile(pubkey) => { + writer.write_token("profile"); + writer.write_token("edit"); + writer.write_token(&pubkey.hex()); + } + Route::Relays => { + writer.write_token("relay"); + } + Route::ComposeNote => { + writer.write_token("compose"); + } + Route::Support => { + writer.write_token("support"); + } + Route::NewDeck => { + writer.write_token("deck"); + writer.write_token("new"); + } + } + } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + TokenParser::alt( + parser, + &[ + |p| Ok(Route::Timeline(TimelineRoute::parse_from_tokens(p)?)), + |p| Ok(Route::Accounts(AccountsRoute::parse_from_tokens(p)?)), + |p| Ok(Route::AddColumn(AddColumnRoute::parse_from_tokens(p)?)), + |p| { + p.parse_all(|p| { + p.parse_token("deck")?; + p.parse_token("edit")?; + let ind_str = p.pull_token()?; + let parsed_index = ind_str + .parse::() + .map_err(|_| ParseError::DecodeFailed)?; + Ok(Route::EditDeck(parsed_index)) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("profile")?; + p.parse_token("edit")?; + let pubkey = Pubkey::from_hex(p.pull_token()?) + .map_err(|_| ParseError::HexDecodeFailed)?; + Ok(Route::EditProfile(pubkey)) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("relay")?; + Ok(Route::Relays) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("compose")?; + Ok(Route::ComposeNote) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("support")?; + Ok(Route::Support) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("deck")?; + p.parse_token("new")?; + Ok(Route::NewDeck) + }) + }, + ], + ) + } +} + impl Route { pub fn timeline(timeline_id: TimelineId) -> Self { Route::Timeline(TimelineRoute::Timeline(timeline_id)) From ed455f7ea406c01b2eca71e9a7f601531004c3e8 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 22 Jan 2025 13:11:30 -0800 Subject: [PATCH 12/18] add tokenator crate also remove a lot of the "advanced" token parsing style which was a bit too verbose for my tastes Signed-off-by: William Casarin --- Cargo.lock | 5 + Cargo.toml | 3 +- crates/notedeck_columns/Cargo.toml | 1 + crates/notedeck_columns/src/accounts/route.rs | 4 +- crates/notedeck_columns/src/route.rs | 3 +- crates/notedeck_columns/src/storage/decks.rs | 1 + crates/notedeck_columns/src/storage/mod.rs | 6 - .../src/storage/token_parser.rs | 512 ------------------ crates/notedeck_columns/src/timeline/kind.rs | 49 +- crates/notedeck_columns/src/timeline/route.rs | 93 ++-- crates/notedeck_columns/src/ui/add_column.rs | 36 +- crates/tokenator/Cargo.toml | 7 + crates/tokenator/README.md | 5 + crates/tokenator/src/lib.rs | 220 ++++++++ 14 files changed, 350 insertions(+), 595 deletions(-) delete mode 100644 crates/notedeck_columns/src/storage/token_parser.rs create mode 100644 crates/tokenator/Cargo.toml create mode 100644 crates/tokenator/README.md create mode 100644 crates/tokenator/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 5ec5ce8f..b2231563 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2828,6 +2828,7 @@ dependencies = [ "strum_macros", "tempfile", "thiserror 2.0.7", + "tokenator", "tokio", "tracing", "tracing-appender", @@ -4527,6 +4528,10 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +[[package]] +name = "tokenator" +version = "0.1.0" + [[package]] name = "tokio" version = "1.42.0" diff --git a/Cargo.toml b/Cargo.toml index c2c18722..fdb7a033 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ members = [ "crates/notedeck_chrome", "crates/notedeck_columns", - "crates/enostr", + "crates/enostr", "crates/tokenator", ] [workspace.dependencies] @@ -34,6 +34,7 @@ nostrdb = { git = "https://github.com/damus-io/nostrdb-rs", rev = "2111948b078b2 notedeck = { path = "crates/notedeck" } notedeck_chrome = { path = "crates/notedeck_chrome" } notedeck_columns = { path = "crates/notedeck_columns" } +tokenator = { path = "crates/tokenator" } open = "5.3.0" poll-promise = { version = "0.3.0", features = ["tokio"] } puffin = { git = "https://github.com/jb55/puffin", package = "puffin", rev = "70ff86d5503815219b01a009afd3669b7903a057" } diff --git a/crates/notedeck_columns/Cargo.toml b/crates/notedeck_columns/Cargo.toml index f641b25e..c6a7de0d 100644 --- a/crates/notedeck_columns/Cargo.toml +++ b/crates/notedeck_columns/Cargo.toml @@ -12,6 +12,7 @@ crate-type = ["lib", "cdylib"] [dependencies] notedeck = { workspace = true } +tokenator = { workspace = true } bitflags = { workspace = true } dirs = { workspace = true } eframe = { workspace = true } diff --git a/crates/notedeck_columns/src/accounts/route.rs b/crates/notedeck_columns/src/accounts/route.rs index 6447fffe..befcfc87 100644 --- a/crates/notedeck_columns/src/accounts/route.rs +++ b/crates/notedeck_columns/src/accounts/route.rs @@ -1,6 +1,6 @@ use super::{AccountLoginResponse, AccountsViewResponse}; -use crate::storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}; use serde::{Deserialize, Serialize}; +use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; pub enum AccountsRouteResponse { Accounts(AccountsViewResponse), @@ -58,7 +58,7 @@ fn parse_accounts_route<'a>( #[cfg(test)] mod tests { use super::*; - use crate::storage::{TokenParser, TokenSerializable, TokenWriter}; + use tokenator::{TokenParser, TokenSerializable, TokenWriter}; #[test] fn test_accounts_route_serialize() { diff --git a/crates/notedeck_columns/src/route.rs b/crates/notedeck_columns/src/route.rs index 471b4456..b8d8fe26 100644 --- a/crates/notedeck_columns/src/route.rs +++ b/crates/notedeck_columns/src/route.rs @@ -4,11 +4,12 @@ use std::fmt::{self}; use crate::{ accounts::AccountsRoute, column::Columns, - storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}, timeline::{kind::ColumnTitle, TimelineId, TimelineRoute}, ui::add_column::{AddAlgoRoute, AddColumnRoute}, }; +use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; + /// App routing. These describe different places you can go inside Notedeck. #[derive(Clone, Copy, Eq, PartialEq, Debug)] pub enum Route { diff --git a/crates/notedeck_columns/src/storage/decks.rs b/crates/notedeck_columns/src/storage/decks.rs index 8bd1389b..ee819500 100644 --- a/crates/notedeck_columns/src/storage/decks.rs +++ b/crates/notedeck_columns/src/storage/decks.rs @@ -18,6 +18,7 @@ use crate::{ }; use notedeck::{storage, DataPath, DataPathType, Directory}; +use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; pub static DECKS_CACHE_FILE: &str = "decks_cache.json"; diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index 95c88d7a..cda44eeb 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -1,11 +1,5 @@ mod decks; mod migration; -mod token_parser; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; pub use migration::{deserialize_columns, COLUMNS_FILE}; - -pub use token_parser::{ - ParseError, Payload, Token, TokenAlternatives, TokenParser, TokenPayload, TokenSerializable, - TokenWriter, UnexpectedToken, -}; diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs deleted file mode 100644 index 7a855995..00000000 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ /dev/null @@ -1,512 +0,0 @@ -use crate::timeline::kind::PubkeySource; -use enostr::{NoteId, Pubkey}; - -#[derive(Debug, Clone)] -pub struct UnexpectedToken<'fnd, 'exp> { - pub expected: &'exp str, - pub found: &'fnd str, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum TokenPayload { - PubkeySource, - Pubkey, - NoteId, -} - -pub struct TokenAlternatives { - /// This is the preferred token. It should be serialized this way - preferred: &'static str, - - /// These are deprecated tokens that should still be handled and parsed - deprecated: &'static [&'static str], -} - -impl TokenAlternatives { - pub const fn new(preferred: &'static str, deprecated: &'static [&'static str]) -> Self { - Self { - preferred, - deprecated, - } - } -} - -/// Token is a unified serialization helper. By specifying a list of -/// tokens for each thing you want to parse, you can type-safely parse -/// and serialize things -pub enum Token { - /// A simple identifier - Identifier(&'static str), - - /// There are multiple ways to parse this identifier - Alternatives(TokenAlternatives), - - /// Different payload types, pubkeys etc - Payload(TokenPayload), -} - -#[derive(Debug, Clone)] -pub enum Payload { - PubkeySource(PubkeySource), - Pubkey(Pubkey), - NoteId(NoteId), -} - -impl Payload { - pub fn token_payload(&self) -> TokenPayload { - match self { - Payload::PubkeySource(_) => TokenPayload::PubkeySource, - Payload::Pubkey(_) => TokenPayload::Pubkey, - Payload::NoteId(_) => TokenPayload::NoteId, - } - } - - pub fn parse_note_id(payload: Option) -> Result> { - payload - .and_then(|p| p.get_note_id().cloned()) - .ok_or(ParseError::ExpectedPayload(TokenPayload::NoteId)) - } - - pub fn parse_pubkey(payload: Option) -> Result> { - payload - .and_then(|p| p.get_pubkey().cloned()) - .ok_or(ParseError::ExpectedPayload(TokenPayload::Pubkey)) - } - - pub fn parse_pubkey_source( - payload: Option, - ) -> Result> { - payload - .and_then(|p| p.get_pubkey_source().cloned()) - .ok_or(ParseError::ExpectedPayload(TokenPayload::Pubkey)) - } - - pub fn parse<'a>( - expected: TokenPayload, - parser: &mut TokenParser<'a>, - ) -> Result> { - match expected { - TokenPayload::PubkeySource => Ok(Payload::pubkey_source( - PubkeySource::parse_from_tokens(parser)?, - )), - TokenPayload::Pubkey => { - let pubkey = parser.try_parse(|p| { - let hex = p.pull_token()?; - Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) - })?; - - Ok(Payload::pubkey(pubkey)) - } - TokenPayload::NoteId => { - let note_id = parser.try_parse(|p| { - let hex = p.pull_token()?; - NoteId::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) - })?; - - Ok(Payload::note_id(note_id)) - } - } - } - - pub fn pubkey(pubkey: Pubkey) -> Self { - Self::Pubkey(pubkey) - } - - pub fn pubkey_source(pubkey_src: PubkeySource) -> Self { - Self::PubkeySource(pubkey_src) - } - - pub fn note_id(note_id: NoteId) -> Self { - Self::NoteId(note_id) - } - - pub fn get_pubkey(&self) -> Option<&Pubkey> { - if let Self::Pubkey(pubkey) = self { - Some(pubkey) - } else { - None - } - } - - pub fn get_pubkey_source(&self) -> Option<&PubkeySource> { - if let Self::PubkeySource(pk_src) = self { - Some(pk_src) - } else { - None - } - } - - pub fn get_note_id(&self) -> Option<&NoteId> { - if let Self::NoteId(note_id) = self { - Some(note_id) - } else { - None - } - } -} - -impl Token { - pub fn parse<'a>( - &self, - parser: &mut TokenParser<'a>, - ) -> Result, ParseError<'a>> { - match self { - Token::Identifier(s) => { - parser.parse_token(s)?; - Ok(None) - } - - Token::Payload(payload) => { - let payload = Payload::parse(*payload, parser)?; - Ok(Some(payload)) - } - - Token::Alternatives(alts) => { - if parser.try_parse(|p| p.parse_token(alts.preferred)).is_ok() { - return Ok(None); - } - - for token in alts.deprecated { - if parser.try_parse(|p| p.parse_token(token)).is_ok() { - return Ok(None); - } - } - - Err(ParseError::AltAllFailed) - } - } - } - - /// Parse all of the tokens in sequence, ensuring that we extract a payload - /// if we find one. This only handles a single payload, if you need more, - /// then use a custom parser - pub fn parse_all<'a>( - parser: &mut TokenParser<'a>, - tokens: &[Token], - ) -> Result, ParseError<'a>> { - parser.try_parse(|p| { - let mut payload: Option = None; - for token in tokens { - if let Some(pl) = token.parse(p)? { - payload = Some(pl); - } - } - - Ok(payload) - }) - } - - pub fn serialize_all(writer: &mut TokenWriter, tokens: &[Token], payload: Option<&Payload>) { - for token in tokens { - token.serialize(writer, payload) - } - } - - pub fn serialize(&self, writer: &mut TokenWriter, payload: Option<&Payload>) { - match self { - Token::Identifier(s) => writer.write_token(s), - Token::Alternatives(alts) => writer.write_token(alts.preferred), - Token::Payload(token_payload) => match token_payload { - TokenPayload::PubkeySource => { - payload - .and_then(|p| p.get_pubkey_source()) - .expect("expected pubkey payload") - .serialize_tokens(writer); - } - - TokenPayload::Pubkey => { - let pubkey = payload - .and_then(|p| p.get_pubkey()) - .expect("expected note_id payload"); - writer.write_token(&hex::encode(pubkey.bytes())); - } - - TokenPayload::NoteId => { - let note_id = payload - .and_then(|p| p.get_note_id()) - .expect("expected note_id payload"); - writer.write_token(&hex::encode(note_id.bytes())); - } - }, - } - } - - pub const fn id(s: &'static str) -> Self { - Token::Identifier(s) - } - - pub const fn alts(primary: &'static str, deprecated: &'static [&'static str]) -> Self { - Token::Alternatives(TokenAlternatives::new(primary, deprecated)) - } - - pub const fn pubkey() -> Self { - Token::Payload(TokenPayload::Pubkey) - } - - pub const fn pubkey_source() -> Self { - Token::Payload(TokenPayload::PubkeySource) - } - - pub const fn note_id() -> Self { - Token::Payload(TokenPayload::NoteId) - } -} - -#[derive(Debug, Clone)] -pub enum ParseError<'a> { - /// Not done parsing yet - Incomplete, - - /// All parsing options failed - AltAllFailed, - - /// There was some issue decoding the data - DecodeFailed, - - /// There was some issue decoding the data - ExpectedPayload(TokenPayload), - - HexDecodeFailed, - - /// We encountered an unexpected token - UnexpectedToken(UnexpectedToken<'a, 'static>), - - /// No more tokens - EOF, -} - -pub struct TokenWriter { - delim: &'static str, - tokens_written: usize, - buf: Vec, -} - -impl Default for TokenWriter { - fn default() -> Self { - Self::new(":") - } -} - -impl TokenWriter { - pub fn new(delim: &'static str) -> Self { - let buf = vec![]; - let tokens_written = 0; - Self { - buf, - tokens_written, - delim, - } - } - - pub fn write_token(&mut self, token: &str) { - if self.tokens_written > 0 { - self.buf.extend_from_slice(self.delim.as_bytes()) - } - self.buf.extend_from_slice(token.as_bytes()); - self.tokens_written += 1; - } - - pub fn str(&self) -> &str { - // SAFETY: only &strs are ever serialized, so its guaranteed to be - // correct here - unsafe { std::str::from_utf8_unchecked(self.buffer()) } - } - - pub fn buffer(&self) -> &[u8] { - &self.buf - } -} - -#[derive(Clone)] -pub struct TokenParser<'a> { - tokens: &'a [&'a str], - index: usize, -} - -fn _parse_pubkey_src_tokens<'a>( - parser: &mut TokenParser<'a>, -) -> Result> { - match parser.pull_token() { - // we handle bare payloads and assume they are explicit pubkey sources - Ok("explicit") => { - let hex_str = parser.pull_token()?; - Pubkey::from_hex(hex_str) - .map_err(|_| ParseError::DecodeFailed) - .map(PubkeySource::Explicit) - } - - Err(ParseError::EOF) | Ok("deck_author") => Ok(PubkeySource::DeckAuthor), - - Ok(hex_payload) => Pubkey::from_hex(hex_payload) - .map_err(|_| ParseError::DecodeFailed) - .map(PubkeySource::Explicit), - - Err(e) => Err(e), - } -} - -impl<'a> TokenParser<'a> { - /// alt tries each parser in `routes` until one succeeds. - /// If all fail, returns `ParseError::AltAllFailed`. - #[allow(clippy::type_complexity)] - pub fn alt( - parser: &mut TokenParser<'a>, - routes: &[fn(&mut TokenParser<'a>) -> Result>], - ) -> Result> { - let start = parser.index; - for route in routes { - match route(parser) { - Ok(r) => return Ok(r), // if success, stop trying more routes - Err(_) => { - // revert index & try next route - parser.index = start; - } - } - } - // if we tried them all and none succeeded - Err(ParseError::AltAllFailed) - } - - pub fn new(tokens: &'a [&'a str]) -> Self { - let index = 0; - Self { tokens, index } - } - - pub fn peek_parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { - let found = self.peek_token()?; - if found == expected { - Ok(found) - } else { - Err(ParseError::UnexpectedToken(UnexpectedToken { - expected, - found, - })) - } - } - - pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { - let found = self.pull_token()?; - if found == expected { - Ok(found) - } else { - Err(ParseError::UnexpectedToken(UnexpectedToken { - expected, - found, - })) - } - } - - /// “Parse all” meaning: run the provided closure. If it fails, revert - /// the index. - pub fn parse_all( - &mut self, - parse_fn: impl FnOnce(&mut Self) -> Result>, - ) -> Result> { - let start = self.index; - let result = parse_fn(self); - - // If the parser closure fails, revert the index - if result.is_err() { - self.index = start; - result - } else if !self.is_eof() { - Err(ParseError::Incomplete) - } else { - result - } - } - - /// Attempt to parse something, backtrack if we fail. - pub fn try_parse( - &mut self, - parse_fn: impl FnOnce(&mut Self) -> Result>, - ) -> Result> { - let start = self.index; - let result = parse_fn(self); - - // If the parser closure fails, revert the index - if result.is_err() { - self.index = start; - result - } else { - result - } - } - - pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> { - let token = self - .tokens - .get(self.index) - .copied() - .ok_or(ParseError::EOF)?; - self.index += 1; - Ok(token) - } - - pub fn unpop_token(&mut self) { - if (self.index as isize) - 1 < 0 { - return; - } - - self.index -= 1; - } - - pub fn peek_token(&self) -> Result<&'a str, ParseError<'a>> { - self.tokens() - .first() - .ok_or(ParseError::DecodeFailed) - .copied() - } - - #[inline] - pub fn tokens(&self) -> &'a [&'a str] { - let min_index = self.index.min(self.tokens.len()); - &self.tokens[min_index..] - } - - #[inline] - pub fn is_eof(&self) -> bool { - self.tokens().is_empty() - } -} - -pub trait TokenSerializable: Sized { - /// Return a list of serialization plans for a type. We do this for - /// type safety and assume constructing these types are lightweight - fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result>; - fn serialize_tokens(&self, writer: &mut TokenWriter); -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_column_serialize() { - use crate::ui::add_column::{AddAlgoRoute, AddColumnRoute}; - - { - let data_str = "column:algo_selection:last_per_pubkey"; - let data = &data_str.split(":").collect::>(); - let mut token_writer = TokenWriter::default(); - let mut parser = TokenParser::new(&data); - let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap(); - let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey); - parsed.serialize_tokens(&mut token_writer); - assert_eq!(expected, parsed); - assert_eq!(token_writer.str(), data_str); - } - - { - let data_str = "column"; - let mut token_writer = TokenWriter::default(); - let data: &[&str] = &[data_str]; - let mut parser = TokenParser::new(data); - let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap(); - let expected = AddColumnRoute::Base; - parsed.serialize_tokens(&mut token_writer); - assert_eq!(expected, parsed); - assert_eq!(token_writer.str(), data_str); - } - } -} diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index 8aa40b85..3b22129d 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -1,13 +1,11 @@ use crate::error::Error; -use crate::storage::{ - ParseError, Payload, Token, TokenParser, TokenPayload, TokenSerializable, TokenWriter, -}; use crate::timeline::{Timeline, TimelineTab}; use enostr::{Filter, Pubkey}; use nostrdb::{Ndb, Transaction}; use notedeck::{filter::default_limit, FilterError, FilterState, RootNoteIdBuf}; use serde::{Deserialize, Serialize}; use std::{borrow::Cow, fmt::Display}; +use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; use tracing::{error, warn}; #[derive(Clone, Default, Debug, PartialEq, Eq, Serialize, Deserialize)] @@ -63,7 +61,7 @@ impl TokenSerializable for PubkeySource { let pk = Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed)?; Ok(PubkeySource::Explicit(pk)) } else { - Err(ParseError::ExpectedPayload(TokenPayload::Pubkey)) + Err(ParseError::HexDecodeFailed) } } @@ -78,8 +76,6 @@ impl TokenSerializable for PubkeySource { } } -const LIST_CONTACT_TOKENS: &[Token] = &[Token::alts("contacts", &["contact"]), Token::pubkey()]; - impl ListKind { pub fn contact_list(pk_src: PubkeySource) -> Self { ListKind::Contact(pk_src) @@ -90,38 +86,39 @@ impl ListKind { ListKind::Contact(pk_src) => Some(pk_src), } } - - fn payload(&self) -> Option { - match self { - ListKind::Contact(pk_src) => Some(Payload::pubkey_source(pk_src.clone())), - } - } - - const fn tokens(&self) -> &'static [Token] { - match self { - ListKind::Contact(_pubkey) => LIST_CONTACT_TOKENS, - } - } } impl TokenSerializable for ListKind { fn serialize_tokens(&self, writer: &mut TokenWriter) { - Token::serialize_all(writer, self.tokens(), self.payload().as_ref()); + match self { + ListKind::Contact(pk_src) => { + writer.write_token("contact"); + pk_src.serialize_tokens(writer); + } + } } fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + parser.parse_all(|p| { + p.parse_token("contact")?; + let pk_src = PubkeySource::parse_from_tokens(p)?; + Ok(ListKind::Contact(pk_src)) + }) + + /* here for u when you need more things to parse TokenParser::alt( parser, &[|p| { - let maybe_payload = - Token::parse_all(p, ListKind::Contact(PubkeySource::default()).tokens())?; - let payload = maybe_payload - .as_ref() - .and_then(|mp| mp.get_pubkey_source()) - .ok_or(ParseError::ExpectedPayload(TokenPayload::Pubkey))?; - Ok(ListKind::Contact(payload.to_owned())) + p.parse_all(|p| { + p.parse_token("contact")?; + let pk_src = PubkeySource::parse_from_tokens(p)?; + Ok(ListKind::Contact(pk_src)) + }); + },|p| { + // more cases... }], ) + */ } } diff --git a/crates/notedeck_columns/src/timeline/route.rs b/crates/notedeck_columns/src/timeline/route.rs index 7a0effc4..89ff5ee8 100644 --- a/crates/notedeck_columns/src/timeline/route.rs +++ b/crates/notedeck_columns/src/timeline/route.rs @@ -3,7 +3,6 @@ use crate::{ draft::Drafts, nav::RenderNavAction, profile::ProfileAction, - storage::{ParseError, Payload, Token, TokenParser, TokenSerializable, TokenWriter}, timeline::{TimelineCache, TimelineId, TimelineKind}, ui::{ self, @@ -12,6 +11,8 @@ use crate::{ }, }; +use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; + use enostr::{NoteId, Pubkey}; use nostrdb::{Ndb, Transaction}; use notedeck::{Accounts, ImageCache, MuteFun, NoteCache, UnknownIds}; @@ -25,61 +26,61 @@ pub enum TimelineRoute { Quote(NoteId), } -const PROFILE_TOKENS: &[Token] = &[Token::id("profile"), Token::pubkey()]; -const THREAD_TOKENS: &[Token] = &[Token::id("thread"), Token::note_id()]; -const REPLY_TOKENS: &[Token] = &[Token::id("reply"), Token::note_id()]; -const QUOTE_TOKENS: &[Token] = &[Token::id("quote"), Token::note_id()]; +fn parse_pubkey<'a>(parser: &mut TokenParser<'a>) -> Result> { + let hex = parser.pull_token()?; + Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) +} -impl TimelineRoute { - fn payload(&self) -> Option { - match self { - TimelineRoute::Profile(pk) => Some(Payload::pubkey(*pk)), - TimelineRoute::Thread(note_id) => Some(Payload::note_id(*note_id)), - TimelineRoute::Reply(note_id) => Some(Payload::note_id(*note_id)), - TimelineRoute::Quote(note_id) => Some(Payload::note_id(*note_id)), - TimelineRoute::Timeline(_timeline_id) => todo!("handle timeline_ids"), - } - } - - fn tokens(&self) -> &'static [Token] { - match self { - TimelineRoute::Profile(_) => PROFILE_TOKENS, - TimelineRoute::Thread(_) => THREAD_TOKENS, - TimelineRoute::Reply(_) => REPLY_TOKENS, - TimelineRoute::Quote(_) => QUOTE_TOKENS, - TimelineRoute::Timeline(_) => todo!("handle timeline_ids"), - } - } - - /// NOTE!! update parse_from_tokens as well when adding to this match - fn parse<'a>(&self, parser: &mut TokenParser<'a>) -> Result> { - let payload = Token::parse_all(parser, self.tokens())?; - - match self { - TimelineRoute::Profile(_) => { - Ok(TimelineRoute::Profile(Payload::parse_pubkey(payload)?)) - } - TimelineRoute::Thread(_) => Ok(TimelineRoute::Thread(Payload::parse_note_id(payload)?)), - TimelineRoute::Reply(_) => Ok(TimelineRoute::Reply(Payload::parse_note_id(payload)?)), - TimelineRoute::Quote(_) => Ok(TimelineRoute::Quote(Payload::parse_note_id(payload)?)), - TimelineRoute::Timeline(_) => todo!("handle timeline parsing"), - } - } +fn parse_note_id<'a>(parser: &mut TokenParser<'a>) -> Result> { + let hex = parser.pull_token()?; + NoteId::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) } impl TokenSerializable for TimelineRoute { fn serialize_tokens(&self, writer: &mut TokenWriter) { - Token::serialize_all(writer, self.tokens(), self.payload().as_ref()); + match self { + TimelineRoute::Profile(pk) => { + writer.write_token("profile"); + writer.write_token(&pk.hex()); + } + TimelineRoute::Thread(note_id) => { + writer.write_token("thread"); + writer.write_token(¬e_id.hex()); + } + TimelineRoute::Reply(note_id) => { + writer.write_token("reply"); + writer.write_token(¬e_id.hex()); + } + TimelineRoute::Quote(note_id) => { + writer.write_token("quote"); + writer.write_token(¬e_id.hex()); + } + TimelineRoute::Timeline(_tlid) => { + todo!("tlid") + } + } } fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { TokenParser::alt( parser, &[ - |p| TimelineRoute::Profile(Pubkey::new([0; 32])).parse(p), - |p| TimelineRoute::Thread(NoteId::new([0; 32])).parse(p), - |p| TimelineRoute::Reply(NoteId::new([0; 32])).parse(p), - |p| TimelineRoute::Quote(NoteId::new([0; 32])).parse(p), + |p| { + p.parse_token("profile")?; + Ok(TimelineRoute::Profile(parse_pubkey(p)?)) + }, + |p| { + p.parse_token("thread")?; + Ok(TimelineRoute::Thread(parse_note_id(p)?)) + }, + |p| { + p.parse_token("reply")?; + Ok(TimelineRoute::Reply(parse_note_id(p)?)) + }, + |p| { + p.parse_token("quote")?; + Ok(TimelineRoute::Quote(parse_note_id(p)?)) + }, |_p| todo!("handle timeline parsing"), ], ) @@ -258,8 +259,8 @@ pub fn render_profile_route( #[cfg(test)] mod tests { - use crate::storage::{TokenParser, TokenSerializable, TokenWriter}; use enostr::NoteId; + use tokenator::{TokenParser, TokenSerializable, TokenWriter}; #[test] fn test_timeline_route_serialize() { diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index cc8610f0..aeed68fb 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -11,13 +11,13 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, route::Route, - storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}, timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, }; use notedeck::{AppContext, ImageCache, NotedeckTextStyle, UserAccount}; +use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; use super::{anim::AnimationHelper, padding, ProfilePreview}; @@ -765,3 +765,37 @@ pub fn hashtag_ui( }) .inner } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_column_serialize() { + use super::{AddAlgoRoute, AddColumnRoute}; + + { + let data_str = "column:algo_selection:last_per_pubkey"; + let data = &data_str.split(":").collect::>(); + let mut token_writer = TokenWriter::default(); + let mut parser = TokenParser::new(&data); + let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap(); + let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey); + parsed.serialize_tokens(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); + } + + { + let data_str = "column"; + let mut token_writer = TokenWriter::default(); + let data: &[&str] = &[data_str]; + let mut parser = TokenParser::new(data); + let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap(); + let expected = AddColumnRoute::Base; + parsed.serialize_tokens(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); + } + } +} diff --git a/crates/tokenator/Cargo.toml b/crates/tokenator/Cargo.toml new file mode 100644 index 00000000..38a4d16f --- /dev/null +++ b/crates/tokenator/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "tokenator" +version = "0.1.0" +edition = "2021" +description = "A simple library for parsing a serializing string tokens" + +[dependencies] diff --git a/crates/tokenator/README.md b/crates/tokenator/README.md new file mode 100644 index 00000000..0fc537d5 --- /dev/null +++ b/crates/tokenator/README.md @@ -0,0 +1,5 @@ + +# tokenator + +Tokenator is a simple string token parser and serializer. + diff --git a/crates/tokenator/src/lib.rs b/crates/tokenator/src/lib.rs new file mode 100644 index 00000000..0206b69a --- /dev/null +++ b/crates/tokenator/src/lib.rs @@ -0,0 +1,220 @@ +#[derive(Debug, Clone)] +pub struct UnexpectedToken<'fnd, 'exp> { + pub expected: &'exp str, + pub found: &'fnd str, +} + +#[derive(Debug, Clone)] +pub enum ParseError<'a> { + /// Not done parsing yet + Incomplete, + + /// All parsing options failed + AltAllFailed, + + /// There was some issue decoding the data + DecodeFailed, + + HexDecodeFailed, + + /// We encountered an unexpected token + UnexpectedToken(UnexpectedToken<'a, 'static>), + + /// No more tokens + EOF, +} + +pub struct TokenWriter { + delim: &'static str, + tokens_written: usize, + buf: Vec, +} + +impl Default for TokenWriter { + fn default() -> Self { + Self::new(":") + } +} + +impl TokenWriter { + pub fn new(delim: &'static str) -> Self { + let buf = vec![]; + let tokens_written = 0; + Self { + buf, + tokens_written, + delim, + } + } + + pub fn write_token(&mut self, token: &str) { + if self.tokens_written > 0 { + self.buf.extend_from_slice(self.delim.as_bytes()) + } + self.buf.extend_from_slice(token.as_bytes()); + self.tokens_written += 1; + } + + pub fn str(&self) -> &str { + // SAFETY: only &strs are ever serialized, so its guaranteed to be + // correct here + unsafe { std::str::from_utf8_unchecked(self.buffer()) } + } + + pub fn buffer(&self) -> &[u8] { + &self.buf + } +} + +#[derive(Clone)] +pub struct TokenParser<'a> { + tokens: &'a [&'a str], + index: usize, +} + +impl<'a> TokenParser<'a> { + /// alt tries each parser in `routes` until one succeeds. + /// If all fail, returns `ParseError::AltAllFailed`. + #[allow(clippy::type_complexity)] + pub fn alt( + parser: &mut TokenParser<'a>, + routes: &[fn(&mut TokenParser<'a>) -> Result>], + ) -> Result> { + let start = parser.index; + for route in routes { + match route(parser) { + Ok(r) => return Ok(r), // if success, stop trying more routes + Err(_) => { + // revert index & try next route + parser.index = start; + } + } + } + // if we tried them all and none succeeded + Err(ParseError::AltAllFailed) + } + + pub fn new(tokens: &'a [&'a str]) -> Self { + let index = 0; + Self { tokens, index } + } + + pub fn peek_parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { + let found = self.peek_token()?; + if found == expected { + Ok(found) + } else { + Err(ParseError::UnexpectedToken(UnexpectedToken { + expected, + found, + })) + } + } + + /// Parse a list of alternative tokens, returning success if any match. + pub fn parse_any_token( + &mut self, + expected: &[&'static str], + ) -> Result<&'a str, ParseError<'a>> { + for token in expected { + let result = self.try_parse(|p| p.parse_token(token)); + if result.is_ok() { + return result; + } + } + + Err(ParseError::AltAllFailed) + } + + pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> { + let found = self.pull_token()?; + if found == expected { + Ok(found) + } else { + Err(ParseError::UnexpectedToken(UnexpectedToken { + expected, + found, + })) + } + } + + /// Ensure that we have parsed all tokens. If not the parser backtracks + /// and the parse does not succeed, returning [`ParseError::Incomplete`]. + pub fn parse_all( + &mut self, + parse_fn: impl FnOnce(&mut Self) -> Result>, + ) -> Result> { + let start = self.index; + let result = parse_fn(self); + + // If the parser closure fails, revert the index + if result.is_err() { + self.index = start; + result + } else if !self.is_eof() { + Err(ParseError::Incomplete) + } else { + result + } + } + + /// Attempt to parse something, backtrack if we fail. + pub fn try_parse( + &mut self, + parse_fn: impl FnOnce(&mut Self) -> Result>, + ) -> Result> { + let start = self.index; + let result = parse_fn(self); + + // If the parser closure fails, revert the index + if result.is_err() { + self.index = start; + result + } else { + result + } + } + + pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> { + let token = self + .tokens + .get(self.index) + .copied() + .ok_or(ParseError::EOF)?; + self.index += 1; + Ok(token) + } + + pub fn unpop_token(&mut self) { + if (self.index as isize) - 1 < 0 { + return; + } + + self.index -= 1; + } + + pub fn peek_token(&self) -> Result<&'a str, ParseError<'a>> { + self.tokens() + .first() + .ok_or(ParseError::DecodeFailed) + .copied() + } + + #[inline] + pub fn tokens(&self) -> &'a [&'a str] { + let min_index = self.index.min(self.tokens.len()); + &self.tokens[min_index..] + } + + #[inline] + pub fn is_eof(&self) -> bool { + self.tokens().is_empty() + } +} + +pub trait TokenSerializable: Sized { + /// Return a list of serialization plans for a type. We do this for + /// type safety and assume constructing these types are lightweight + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result>; + fn serialize_tokens(&self, writer: &mut TokenWriter); +} From d85b610cf42fd6c7ea35618d60db6c86be885c7c Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 22 Jan 2025 13:12:03 -0800 Subject: [PATCH 13/18] note_id: add hex helpers for root notes Signed-off-by: William Casarin --- crates/notedeck/src/note.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/crates/notedeck/src/note.rs b/crates/notedeck/src/note.rs index 4c351b10..e342186d 100644 --- a/crates/notedeck/src/note.rs +++ b/crates/notedeck/src/note.rs @@ -34,6 +34,10 @@ impl RootNoteIdBuf { root_note_id_from_selected_id(ndb, note_cache, txn, id).map(|rnid| Self(*rnid.bytes())) } + pub fn hex(&self) -> String { + hex::encode(self.bytes()) + } + pub fn new_unsafe(id: [u8; 32]) -> Self { Self(id) } @@ -52,6 +56,10 @@ impl<'a> RootNoteId<'a> { self.0 } + pub fn hex(&self) -> String { + hex::encode(self.bytes()) + } + pub fn to_owned(&self) -> RootNoteIdBuf { RootNoteIdBuf::new_unsafe(*self.bytes()) } From d108df86b4f4cf482556b1ca86f84a1c0a004b44 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 22 Jan 2025 13:13:04 -0800 Subject: [PATCH 14/18] tokens: add token serialization for AlgoTimeline Signed-off-by: William Casarin --- crates/notedeck_columns/src/timeline/kind.rs | 26 ++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index 3b22129d..5e39135e 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -160,6 +160,32 @@ pub enum AlgoTimeline { LastPerPubkey(ListKind), } +/// The identifier for our last per pubkey algo +const LAST_PER_PUBKEY_TOKEN: &str = "last_per_pubkey"; + +impl TokenSerializable for AlgoTimeline { + fn serialize_tokens(&self, writer: &mut TokenWriter) { + match self { + AlgoTimeline::LastPerPubkey(list_kind) => { + writer.write_token(LAST_PER_PUBKEY_TOKEN); + list_kind.serialize_tokens(writer); + } + } + } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + TokenParser::alt( + parser, + &[|p| { + p.parse_all(|p| { + p.parse_token(LAST_PER_PUBKEY_TOKEN)?; + Ok(AlgoTimeline::LastPerPubkey(ListKind::parse_from_tokens(p)?)) + }) + }], + ) + } +} + impl Display for TimelineKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { From 5ba06986db27894b4e55e3f70a64671cb07ae12b Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 22 Jan 2025 13:12:42 -0800 Subject: [PATCH 15/18] tokens: add token serialization for TimelineKind Signed-off-by: William Casarin --- crates/notedeck_columns/src/timeline/kind.rs | 81 ++++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index 5e39135e..eacc1d68 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -152,6 +152,87 @@ pub enum TimelineKind { Hashtag(String), } +const NOTIFS_TOKEN_DEPRECATED: &str = "notifs"; +const NOTIFS_TOKEN: &str = "notifications"; + +fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> { + let hex = parser.pull_token()?; + hex::decode(hex) + .map_err(|_| ParseError::HexDecodeFailed)? + .as_slice() + .try_into() + .map_err(|_| ParseError::HexDecodeFailed) +} + +impl TokenSerializable for TimelineKind { + fn serialize_tokens(&self, writer: &mut TokenWriter) { + match self { + TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer), + TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer), + TimelineKind::Notifications(pk_src) => { + writer.write_token(NOTIFS_TOKEN); + pk_src.serialize_tokens(writer); + } + TimelineKind::Profile(pk_src) => { + writer.write_token("profile"); + pk_src.serialize_tokens(writer); + } + TimelineKind::Thread(root_note_id) => { + writer.write_token("thread"); + writer.write_token(&root_note_id.hex()); + } + TimelineKind::Universe => { + writer.write_token("universe"); + } + TimelineKind::Generic => { + writer.write_token("generic"); + } + TimelineKind::Hashtag(ht) => { + writer.write_token("hashtag"); + writer.write_token(ht); + } + } + } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + TokenParser::alt( + parser, + &[ + |p| Ok(TimelineKind::List(ListKind::parse_from_tokens(p)?)), + |p| Ok(TimelineKind::Algo(AlgoTimeline::parse_from_tokens(p)?)), + |p| { + // still handle deprecated form (notifs) + p.parse_any_token(&[NOTIFS_TOKEN, NOTIFS_TOKEN_DEPRECATED])?; + Ok(TimelineKind::Notifications( + PubkeySource::parse_from_tokens(p)?, + )) + }, + |p| { + p.parse_token("profile")?; + Ok(TimelineKind::Profile(PubkeySource::parse_from_tokens(p)?)) + }, + |p| { + p.parse_token("thread")?; + let note_id = RootNoteIdBuf::new_unsafe(parse_hex_id(p)?); + Ok(TimelineKind::Thread(note_id)) + }, + |p| { + p.parse_token("universe")?; + Ok(TimelineKind::Universe) + }, + |p| { + p.parse_token("generic")?; + Ok(TimelineKind::Generic) + }, + |p| { + p.parse_token("hashtag")?; + Ok(TimelineKind::Hashtag(p.pull_token()?.to_string())) + }, + ], + ) + } +} + /// Hardcoded algo timelines #[derive(Debug, Clone, PartialEq, Eq)] pub enum AlgoTimeline { From d46e526a45cce7b65c6e294a198b68ce4c286f5a Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 22 Jan 2025 13:13:25 -0800 Subject: [PATCH 16/18] tokens: switch over to using token serialization This removes all of the old serialization code Signed-off-by: William Casarin --- crates/notedeck_columns/src/storage/decks.rs | 691 ++----------------- 1 file changed, 52 insertions(+), 639 deletions(-) diff --git a/crates/notedeck_columns/src/storage/decks.rs b/crates/notedeck_columns/src/storage/decks.rs index ee819500..8033814c 100644 --- a/crates/notedeck_columns/src/storage/decks.rs +++ b/crates/notedeck_columns/src/storage/decks.rs @@ -1,19 +1,15 @@ use std::{collections::HashMap, fmt, str::FromStr}; -use enostr::{NoteId, Pubkey}; +use enostr::Pubkey; use nostrdb::Ndb; use serde::{Deserialize, Serialize}; -use strum::IntoEnumIterator; -use strum_macros::EnumIter; use tracing::{error, info}; use crate::{ - accounts::AccountsRoute, column::{Columns, IntermediaryRoute}, decks::{Deck, Decks, DecksCache}, route::Route, - timeline::{kind::ListKind, AlgoTimeline, PubkeySource, TimelineKind, TimelineRoute}, - ui::add_column::{AddAlgoRoute, AddColumnRoute}, + timeline::TimelineKind, Error, }; @@ -286,9 +282,9 @@ fn serialize_columns(columns: &Columns) -> Vec> { for column in columns.columns() { let mut column_routes = Vec::new(); for route in column.router().routes() { - if let Some(route_str) = serialize_route(route, columns) { - column_routes.push(route_str); - } + let mut writer = TokenWriter::default(); + route.serialize_tokens(&mut writer); + column_routes.push(writer.str().to_string()); } cols_serialized.push(column_routes); } @@ -296,27 +292,26 @@ fn serialize_columns(columns: &Columns) -> Vec> { cols_serialized } -fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec>) -> Columns { +fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec>) -> Columns { let mut cols = Columns::new(); - for serialized_routes in serialized { + for column in columns { let mut cur_routes = Vec::new(); - for serialized_route in serialized_routes { - let selections = Selection::from_serialized(&serialized_route); - if let Some(route_intermediary) = selections_to_route(&selections) { - if let Some(ir) = route_intermediary.intermediary_route(ndb, Some(deck_user)) { - match &ir { - IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Thread(_))) - | IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Profile(_))) => { - // Do nothing. TimelineRoute Threads & Profiles not yet supported for deserialization - } - _ => cur_routes.push(ir), + + for route in column { + let tokens: Vec<&str> = route.split(":").collect(); + let mut parser = TokenParser::new(&tokens); + + match CleanIntermediaryRoute::parse_from_tokens(&mut parser) { + Ok(route_intermediary) => { + if let Some(ir) = + route_intermediary.into_intermediary_route(ndb, Some(deck_user)) + { + cur_routes.push(ir); } } - } else { - error!( - "could not turn selections to RouteIntermediary: {:?}", - selections - ); + Err(err) => { + error!("could not turn tokens to RouteIntermediary: {:?}", err); + } } } @@ -328,223 +323,17 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec Result { - Ok(parse_selection(serialized)) - } -} - -#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] -enum AlgoKeyword { - LastPerPubkey, -} - -impl AlgoKeyword { - #[inline] - pub fn name(&self) -> &'static str { - match self { - AlgoKeyword::LastPerPubkey => "last_per_pubkey", - } - } -} - -#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] -enum ListKeyword { - Contact, -} - -impl ListKeyword { - #[inline] - pub fn name(&self) -> &'static str { - match self { - ListKeyword::Contact => "contact", - } - } -} - -#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] -enum PubkeySourceKeyword { - Explicit, - DeckAuthor, -} - -impl PubkeySourceKeyword { - #[inline] - pub fn name(&self) -> &'static str { - match self { - PubkeySourceKeyword::Explicit => "explicit", - PubkeySourceKeyword::DeckAuthor => "deck_author", - } - } -} - -#[derive(Clone, PartialEq, Eq, Debug, EnumIter)] -enum Keyword { - Notifs, - Universe, - Profile, - Hashtag, - Generic, - Thread, - Reply, - Quote, - Account, - Show, - New, - Relay, - Compose, - Column, - AlgoSelection, - NotificationSelection, - ExternalNotifSelection, - HashtagSelection, - Support, - Deck, - Edit, - IndividualSelection, - ExternalIndividualSelection, -} - -impl Keyword { - fn name(&self) -> &'static str { - match self { - Keyword::Notifs => "notifs", - Keyword::Universe => "universe", - Keyword::Profile => "profile", - Keyword::Hashtag => "hashtag", - Keyword::Generic => "generic", - Keyword::Thread => "thread", - Keyword::Reply => "reply", - Keyword::Quote => "quote", - Keyword::Account => "account", - Keyword::Show => "show", - Keyword::New => "new", - Keyword::Relay => "relay", - Keyword::Compose => "compose", - Keyword::Column => "column", - Keyword::AlgoSelection => "algo_selection", - Keyword::NotificationSelection => "notification_selection", - Keyword::ExternalNotifSelection => "external_notif_selection", - Keyword::IndividualSelection => "individual_selection", - Keyword::ExternalIndividualSelection => "external_individual_selection", - Keyword::HashtagSelection => "hashtag_selection", - Keyword::Support => "support", - Keyword::Deck => "deck", - Keyword::Edit => "edit", - } - } -} - -impl fmt::Display for Keyword { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name()) - } -} - -impl fmt::Display for AlgoKeyword { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name()) - } -} - -impl fmt::Display for ListKeyword { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name()) - } -} - -impl FromStr for PubkeySourceKeyword { - type Err = Error; - - fn from_str(serialized: &str) -> Result { - for keyword in Self::iter() { - if serialized == keyword.name() { - return Ok(keyword); - } - } - - Err(Error::Generic( - "Could not convert string to Keyword enum".to_owned(), - )) - } -} - -impl FromStr for ListKeyword { - type Err = Error; - - fn from_str(serialized: &str) -> Result { - for keyword in Self::iter() { - if serialized == keyword.name() { - return Ok(keyword); - } - } - - Err(Error::Generic( - "Could not convert string to Keyword enum".to_owned(), - )) - } -} - -impl fmt::Display for PubkeySourceKeyword { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.name()) - } -} - -impl FromStr for AlgoKeyword { - type Err = Error; - - fn from_str(serialized: &str) -> Result { - for keyword in Self::iter() { - if serialized == keyword.name() { - return Ok(keyword); - } - } - - Err(Error::Generic( - "Could not convert string to Keyword enum".to_owned(), - )) - } -} - -impl FromStr for Keyword { - type Err = Error; - - fn from_str(serialized: &str) -> Result { - for keyword in Self::iter() { - if serialized == keyword.name() { - return Ok(keyword); - } - } - - Err(Error::Generic( - "Could not convert string to Keyword enum".to_owned(), - )) - } -} - enum CleanIntermediaryRoute { ToTimeline(TimelineKind), ToRoute(Route), } impl CleanIntermediaryRoute { - fn intermediary_route(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option { + fn into_intermediary_route( + self, + ndb: &Ndb, + user: Option<&[u8; 32]>, + ) -> Option { match self { CleanIntermediaryRoute::ToTimeline(timeline_kind) => Some(IntermediaryRoute::Timeline( timeline_kind.into_timeline(ndb, user)?, @@ -554,411 +343,35 @@ impl CleanIntermediaryRoute { } } -// TODO: The public-accessible version will be a subset of this -fn serialize_route(route: &Route, columns: &Columns) -> Option { - let mut selections: Vec = Vec::new(); - match route { - Route::Timeline(timeline_route) => match timeline_route { - TimelineRoute::Timeline(timeline_id) => { - if let Some(timeline) = columns.find_timeline(*timeline_id) { - match &timeline.kind { - TimelineKind::List(list_kind) => match list_kind { - ListKind::Contact(pubkey_source) => { - selections.push(Selection::List(ListKeyword::Contact)); - selections.extend(generate_pubkey_selections(pubkey_source)); - } - }, - TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => { - match list_kind { - ListKind::Contact(pk_src) => { - selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey)); - selections.push(Selection::List(ListKeyword::Contact)); - selections.extend(generate_pubkey_selections(pk_src)); - } - } - } - TimelineKind::Notifications(pubkey_source) => { - selections.push(Selection::Keyword(Keyword::Notifs)); - selections.extend(generate_pubkey_selections(pubkey_source)); - } - TimelineKind::Profile(pubkey_source) => { - selections.push(Selection::Keyword(Keyword::Profile)); - selections.extend(generate_pubkey_selections(pubkey_source)); - } - TimelineKind::Universe => { - selections.push(Selection::Keyword(Keyword::Universe)) - } - TimelineKind::Thread(root_id) => { - selections.push(Selection::Keyword(Keyword::Thread)); - selections.push(Selection::Payload(hex::encode(root_id.bytes()))); - } - TimelineKind::Generic => { - selections.push(Selection::Keyword(Keyword::Generic)) - } - TimelineKind::Hashtag(hashtag) => { - selections.push(Selection::Keyword(Keyword::Hashtag)); - selections.push(Selection::Payload(hashtag.to_string())); - } - } - } - } - TimelineRoute::Thread(note_id) => { - selections.push(Selection::Keyword(Keyword::Thread)); - selections.push(Selection::Payload(note_id.hex())); - } - TimelineRoute::Profile(pubkey) => { - selections.push(Selection::Keyword(Keyword::Profile)); - selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit)); - selections.push(Selection::Payload(pubkey.hex())); - } - TimelineRoute::Reply(note_id) => { - selections.push(Selection::Keyword(Keyword::Reply)); - selections.push(Selection::Payload(note_id.hex())); - } - TimelineRoute::Quote(note_id) => { - selections.push(Selection::Keyword(Keyword::Quote)); - selections.push(Selection::Payload(note_id.hex())); - } - }, - Route::Accounts(accounts_route) => { - selections.push(Selection::Keyword(Keyword::Account)); - match accounts_route { - AccountsRoute::Accounts => selections.push(Selection::Keyword(Keyword::Show)), - AccountsRoute::AddAccount => selections.push(Selection::Keyword(Keyword::New)), - } - } - Route::Relays => selections.push(Selection::Keyword(Keyword::Relay)), - Route::ComposeNote => selections.push(Selection::Keyword(Keyword::Compose)), - Route::AddColumn(add_column_route) => { - selections.push(Selection::Keyword(Keyword::Column)); - match add_column_route { - AddColumnRoute::Base => (), - AddColumnRoute::Algo(algo_route) => match algo_route { - AddAlgoRoute::Base => { - selections.push(Selection::Keyword(Keyword::AlgoSelection)) - } - - AddAlgoRoute::LastPerPubkey => { - selections.push(Selection::Keyword(Keyword::AlgoSelection)); - selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey)); - } - }, - AddColumnRoute::UndecidedNotification => { - selections.push(Selection::Keyword(Keyword::NotificationSelection)) - } - AddColumnRoute::ExternalNotification => { - selections.push(Selection::Keyword(Keyword::ExternalNotifSelection)) - } - AddColumnRoute::Hashtag => { - selections.push(Selection::Keyword(Keyword::HashtagSelection)) - } - AddColumnRoute::UndecidedIndividual => { - selections.push(Selection::Keyword(Keyword::IndividualSelection)) - } - AddColumnRoute::ExternalIndividual => { - selections.push(Selection::Keyword(Keyword::ExternalIndividualSelection)) - } - } - } - Route::Support => selections.push(Selection::Keyword(Keyword::Support)), - Route::NewDeck => { - selections.push(Selection::Keyword(Keyword::Deck)); - selections.push(Selection::Keyword(Keyword::New)); - } - Route::EditDeck(index) => { - selections.push(Selection::Keyword(Keyword::Deck)); - selections.push(Selection::Keyword(Keyword::Edit)); - selections.push(Selection::Payload(index.to_string())); - } - Route::EditProfile(pubkey) => { - selections.push(Selection::Keyword(Keyword::Profile)); - selections.push(Selection::Keyword(Keyword::Edit)); - selections.push(Selection::Payload(pubkey.hex())); - } - } - - if selections.is_empty() { - None - } else { - Some( - selections - .iter() - .map(|k| k.to_string()) - .collect::>() - .join(":"), - ) - } -} - -fn generate_pubkey_selections(source: &PubkeySource) -> Vec { - let mut selections = Vec::new(); - match source { - PubkeySource::Explicit(pubkey) => { - selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit)); - selections.push(Selection::Payload(pubkey.hex())); - } - PubkeySource::DeckAuthor => { - selections.push(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor)); - } - } - selections -} - -/// Parses a selection -fn parse_selection(token: &str) -> Selection { - AlgoKeyword::from_str(token) - .map(Selection::Algo) - .or_else(|_| ListKeyword::from_str(token).map(Selection::List)) - .or_else(|_| PubkeySourceKeyword::from_str(token).map(Selection::PubkeySource)) - .or_else(|_| Keyword::from_str(token).map(Selection::Keyword)) - .unwrap_or_else(|_| Selection::Payload(token.to_owned())) -} - -impl Selection { - fn from_serialized(buffer: &str) -> Vec { - let mut selections = Vec::new(); - let seperator = ":"; - let sep_len = seperator.len(); - let mut pos = 0; - - while let Some(offset) = buffer[pos..].find(seperator) { - selections.push(parse_selection(&buffer[pos..pos + offset])); - pos = pos + offset + sep_len; - } - - selections.push(parse_selection(&buffer[pos..])); - - selections - } -} - -/// Parse an explicit:abdef... or deck_author from a Selection token stream. -/// -/// Also handle the case where there is nothing. We assume this means deck_author. -fn parse_pubkey_src_selection(tokens: &[Selection]) -> Option { - match tokens.first() { - // we handle bare payloads and assume they are explicit pubkey sources - Some(Selection::Payload(hex)) => { - let pk = Pubkey::from_hex(hex.as_str()).ok()?; - Some(PubkeySource::Explicit(pk)) - } - - Some(Selection::PubkeySource(PubkeySourceKeyword::Explicit)) => { - if let Selection::Payload(hex) = tokens.get(1)? { - let pk = Pubkey::from_hex(hex.as_str()).ok()?; - Some(PubkeySource::Explicit(pk)) - } else { - None - } - } - - None | Some(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor)) => { - Some(PubkeySource::DeckAuthor) - } - - Some(Selection::Keyword(_kw)) => None, - Some(Selection::Algo(_kw)) => None, - Some(Selection::List(_kw)) => None, - } -} - -/// Parse ListKinds from Selections -fn parse_list_kind_selections(tokens: &[Selection]) -> Option { - // only list selections are valid in this position - let list_kw = if let Selection::List(list_kw) = tokens.first()? { - list_kw - } else { - return None; - }; - - let pubkey_src = parse_pubkey_src_selection(&tokens[1..])?; - - Some(match list_kw { - ListKeyword::Contact => ListKind::contact_list(pubkey_src), - }) -} - -fn selections_to_route(selections: &[Selection]) -> Option { - match selections.first()? { - Selection::Keyword(Keyword::AlgoSelection) => { - let r = match selections.get(1) { - None => AddColumnRoute::Algo(AddAlgoRoute::Base), - Some(Selection::Algo(algo_kw)) => match algo_kw { - AlgoKeyword::LastPerPubkey => AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey), - }, - // other keywords are invalid here - Some(_) => { - return None; - } - }; - - Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(r))) - } - - // Algorithm timelines - Selection::Algo(algo_kw) => { - let timeline_kind = match algo_kw { - AlgoKeyword::LastPerPubkey => { - let list_kind = parse_list_kind_selections(&selections[1..])?; - TimelineKind::last_per_pubkey(list_kind) - } - }; - - Some(CleanIntermediaryRoute::ToTimeline(timeline_kind)) - } - - // We never have PubkeySource keywords at the top level - Selection::PubkeySource(_pk_src) => None, - - Selection::List(ListKeyword::Contact) => { - // only pubkey/src is allowed in this position - let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; - Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::contact_list(pubkey_src), - )) - } - - Selection::Keyword(Keyword::Notifs) => { - let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; - Some(CleanIntermediaryRoute::ToTimeline( - TimelineKind::notifications(pubkey_src), - )) - } - - Selection::Keyword(Keyword::Profile) => { - // we only expect PubkeySource in this position - let pubkey_src = parse_pubkey_src_selection(&selections[1..])?; - Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile( - pubkey_src, - ))) - } - - Selection::Keyword(Keyword::Universe) => { - Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Universe)) - } - - Selection::Keyword(Keyword::Hashtag) => { - if let Selection::Payload(hashtag) = selections.get(1)? { - Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Hashtag( - hashtag.to_string(), - ))) - } else { - None - } - } - - Selection::Keyword(Keyword::Generic) => { - Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Generic)) - } - - Selection::Keyword(Keyword::Thread) => { - if let Selection::Payload(hex) = selections.get(1)? { - Some(CleanIntermediaryRoute::ToRoute(Route::thread( - NoteId::from_hex(hex.as_str()).ok()?, - ))) - } else { - None - } - } - - Selection::Keyword(Keyword::Reply) => { - if let Selection::Payload(hex) = selections.get(1)? { - Some(CleanIntermediaryRoute::ToRoute(Route::reply( - NoteId::from_hex(hex.as_str()).ok()?, - ))) - } else { - None - } - } - Selection::Keyword(Keyword::Quote) => { - if let Selection::Payload(hex) = selections.get(1)? { - Some(CleanIntermediaryRoute::ToRoute(Route::quote( - NoteId::from_hex(hex.as_str()).ok()?, - ))) - } else { - None - } - } - Selection::Keyword(Keyword::Account) => match selections.get(1)? { - Selection::Keyword(Keyword::Show) => Some(CleanIntermediaryRoute::ToRoute( - Route::Accounts(AccountsRoute::Accounts), - )), - Selection::Keyword(Keyword::New) => Some(CleanIntermediaryRoute::ToRoute( - Route::Accounts(AccountsRoute::AddAccount), - )), - _ => None, - }, - Selection::Keyword(Keyword::Relay) => Some(CleanIntermediaryRoute::ToRoute(Route::Relays)), - Selection::Keyword(Keyword::Compose) => { - Some(CleanIntermediaryRoute::ToRoute(Route::ComposeNote)) - } - Selection::Keyword(Keyword::Column) => match selections.get(1)? { - Selection::Keyword(Keyword::NotificationSelection) => { - Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn( - AddColumnRoute::UndecidedNotification, - ))) - } - Selection::Keyword(Keyword::ExternalNotifSelection) => { - Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn( - AddColumnRoute::ExternalNotification, - ))) - } - Selection::Keyword(Keyword::HashtagSelection) => Some(CleanIntermediaryRoute::ToRoute( - Route::AddColumn(AddColumnRoute::Hashtag), - )), - Selection::Keyword(Keyword::IndividualSelection) => { - Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn( - AddColumnRoute::UndecidedIndividual, - ))) - } - Selection::Keyword(Keyword::ExternalIndividualSelection) => { - Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn( - AddColumnRoute::ExternalIndividual, - ))) - } - _ => None, - }, - Selection::Keyword(Keyword::Support) => { - Some(CleanIntermediaryRoute::ToRoute(Route::Support)) - } - Selection::Keyword(Keyword::Deck) => match selections.get(1)? { - Selection::Keyword(Keyword::New) => { - Some(CleanIntermediaryRoute::ToRoute(Route::NewDeck)) - } - Selection::Keyword(Keyword::Edit) => { - if let Selection::Payload(index_str) = selections.get(2)? { - let parsed_index = index_str.parse::().ok()?; - Some(CleanIntermediaryRoute::ToRoute(Route::EditDeck( - parsed_index, - ))) - } else { - None - } - } - _ => None, - }, - Selection::Payload(_) - | Selection::Keyword(Keyword::New) - | Selection::Keyword(Keyword::Show) - | Selection::Keyword(Keyword::NotificationSelection) - | Selection::Keyword(Keyword::ExternalNotifSelection) - | Selection::Keyword(Keyword::HashtagSelection) - | Selection::Keyword(Keyword::IndividualSelection) - | Selection::Keyword(Keyword::ExternalIndividualSelection) - | Selection::Keyword(Keyword::Edit) => None, - } -} - -impl fmt::Display for Selection { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl TokenSerializable for CleanIntermediaryRoute { + fn serialize_tokens(&self, writer: &mut TokenWriter) { match self { - Selection::Keyword(keyword) => write!(f, "{}", keyword), - Selection::Payload(payload) => write!(f, "{}", payload), - Selection::Algo(algo_kw) => write!(f, "{}", algo_kw), - Selection::List(list_kw) => write!(f, "{}", list_kw), - Selection::PubkeySource(pk_src_kw) => write!(f, "{}", pk_src_kw), + CleanIntermediaryRoute::ToTimeline(tlk) => { + tlk.serialize_tokens(writer); + } + CleanIntermediaryRoute::ToRoute(route) => { + route.serialize_tokens(writer); + } } } + + fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + TokenParser::alt( + parser, + &[ + |p| { + Ok(CleanIntermediaryRoute::ToTimeline( + TimelineKind::parse_from_tokens(p)?, + )) + }, + |p| { + Ok(CleanIntermediaryRoute::ToRoute(Route::parse_from_tokens( + p, + )?)) + }, + ], + ) + } } #[cfg(test)] From 0cc1d8a60070734a7194f22a5e7147bb28229c2b Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 22 Jan 2025 15:59:21 -0800 Subject: [PATCH 17/18] Switch to unified timeline cache via TimelineKinds This is a fairly large rewrite which unifies our threads, timelines and profiles. Now all timelines have a MultiSubscriber, and can be added and removed to columns just like Threads and Profiles. Signed-off-by: William Casarin --- .gitignore | 2 + Cargo.lock | 3 + Makefile | 2 +- crates/enostr/src/note.rs | 2 +- crates/notedeck/src/accounts.rs | 6 + crates/notedeck/src/error.rs | 3 + crates/notedeck/src/filter.rs | 15 +- crates/notedeck/src/note.rs | 9 +- crates/notedeck_chrome/src/notedeck.rs | 15 +- crates/notedeck_columns/src/actionbar.rs | 99 +-- crates/notedeck_columns/src/app.rs | 98 +-- crates/notedeck_columns/src/args.rs | 67 +- crates/notedeck_columns/src/column.rs | 158 ++-- crates/notedeck_columns/src/decks.rs | 43 +- crates/notedeck_columns/src/error.rs | 3 + crates/notedeck_columns/src/lib.rs | 1 - .../notedeck_columns/src/multi_subscriber.rs | 198 +++-- crates/notedeck_columns/src/nav.rs | 192 +++-- crates/notedeck_columns/src/profile.rs | 34 +- crates/notedeck_columns/src/route.rs | 172 +++-- crates/notedeck_columns/src/storage/decks.rs | 118 +-- .../notedeck_columns/src/storage/migration.rs | 697 ------------------ crates/notedeck_columns/src/storage/mod.rs | 2 - crates/notedeck_columns/src/subscriptions.rs | 4 +- crates/notedeck_columns/src/thread.rs | 27 - crates/notedeck_columns/src/timeline/cache.rs | 245 +++--- crates/notedeck_columns/src/timeline/kind.rs | 599 +++++++++------ crates/notedeck_columns/src/timeline/mod.rs | 133 ++-- crates/notedeck_columns/src/timeline/route.rs | 257 ++----- crates/notedeck_columns/src/ui/add_column.rs | 108 +-- .../notedeck_columns/src/ui/column/header.rs | 67 +- crates/notedeck_columns/src/ui/profile/mod.rs | 6 +- crates/notedeck_columns/src/ui/side_panel.rs | 14 +- crates/notedeck_columns/src/ui/thread.rs | 4 +- crates/notedeck_columns/src/ui/timeline.rs | 23 +- crates/notedeck_columns/src/unknowns.rs | 10 +- crates/tokenator/Cargo.toml | 1 + crates/tokenator/src/lib.rs | 12 + shell.nix | 1 - 39 files changed, 1395 insertions(+), 2055 deletions(-) delete mode 100644 crates/notedeck_columns/src/storage/migration.rs delete mode 100644 crates/notedeck_columns/src/thread.rs diff --git a/.gitignore b/.gitignore index 596f9259..17bd2514 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ .buildcmd build.log perf.data +rusty-tags.vi +notedeck-settings perf.data.old crates/notedeck_chrome/android/app/build .privenv diff --git a/Cargo.lock b/Cargo.lock index b2231563..6f2b8510 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4531,6 +4531,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokenator" version = "0.1.0" +dependencies = [ + "hex", +] [[package]] name = "tokio" diff --git a/Makefile b/Makefile index c2ddda8f..cdb3ab39 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ check: cargo check tags: fake - find . -type d -name target -prune -o -type f -name '*.rs' -print | xargs ctags + rusty-tags vi jni: fake cargo ndk --target arm64-v8a -o $(ANDROID_DIR)/app/src/main/jniLibs/ build --profile release diff --git a/crates/enostr/src/note.rs b/crates/enostr/src/note.rs index 389f0839..12a05645 100644 --- a/crates/enostr/src/note.rs +++ b/crates/enostr/src/note.rs @@ -9,7 +9,7 @@ pub struct NoteId([u8; 32]); impl fmt::Debug for NoteId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.hex()) + write!(f, "NoteId({})", self.hex()) } } diff --git a/crates/notedeck/src/accounts.rs b/crates/notedeck/src/accounts.rs index a5b787f1..efd7636d 100644 --- a/crates/notedeck/src/accounts.rs +++ b/crates/notedeck/src/accounts.rs @@ -414,6 +414,12 @@ impl Accounts { .or_else(|| self.accounts.iter().find_map(|a| a.to_full())) } + /// Get the selected account's pubkey as bytes. Common operation so + /// we make it a helper here. + pub fn selected_account_pubkey_bytes(&self) -> Option<&[u8; 32]> { + self.get_selected_account().map(|kp| kp.pubkey.bytes()) + } + pub fn get_selected_account(&self) -> Option<&UserAccount> { if let Some(account_index) = self.currently_selected_account { if let Some(account) = self.get_account(account_index) { diff --git a/crates/notedeck/src/error.rs b/crates/notedeck/src/error.rs index 282bf2de..0faed60f 100644 --- a/crates/notedeck/src/error.rs +++ b/crates/notedeck/src/error.rs @@ -35,6 +35,9 @@ impl From for Error { pub enum FilterError { #[error("empty contact list")] EmptyContactList, + + #[error("filter not ready")] + FilterNotReady, } #[derive(Debug, Eq, PartialEq, Copy, Clone, thiserror::Error)] diff --git a/crates/notedeck/src/filter.rs b/crates/notedeck/src/filter.rs index 387beb4e..fba15011 100644 --- a/crates/notedeck/src/filter.rs +++ b/crates/notedeck/src/filter.rs @@ -1,6 +1,5 @@ use crate::error::{Error, FilterError}; use crate::note::NoteRef; -use crate::Result; use nostrdb::{Filter, FilterBuilder, Note, Subscription}; use std::collections::HashMap; use tracing::{debug, warn}; @@ -24,7 +23,7 @@ pub struct FilterStates { } impl FilterStates { - pub fn get(&mut self, relay: &str) -> &FilterState { + pub fn get_mut(&mut self, relay: &str) -> &FilterState { // if our initial state is ready, then just use that if let FilterState::Ready(_) = self.initial_state { &self.initial_state @@ -195,7 +194,7 @@ pub fn last_n_per_pubkey_from_tags( note: &Note, kind: u64, notes_per_pubkey: u64, -) -> Result> { +) -> Result, Error> { let mut filters: Vec = vec![]; for tag in note.tags() { @@ -250,7 +249,7 @@ pub fn filter_from_tags( note: &Note, add_pubkey: Option<&[u8; 32]>, with_hashtags: bool, -) -> Result { +) -> Result { let mut author_filter = Filter::new(); let mut hashtag_filter = Filter::new(); let mut author_res: Option = None; @@ -338,3 +337,11 @@ pub fn filter_from_tags( hashtags: hashtag_res, }) } + +pub fn make_filters_since(raw: &[Filter], since: u64) -> Vec { + let mut filters = Vec::with_capacity(raw.len()); + for builder in raw { + filters.push(Filter::copy_from(builder).since(since).build()); + } + filters +} diff --git a/crates/notedeck/src/note.rs b/crates/notedeck/src/note.rs index e342186d..c63ae12e 100644 --- a/crates/notedeck/src/note.rs +++ b/crates/notedeck/src/note.rs @@ -3,6 +3,7 @@ use enostr::NoteId; use nostrdb::{Ndb, Note, NoteKey, QueryResult, Transaction}; use std::borrow::Borrow; use std::cmp::Ordering; +use std::fmt; #[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)] pub struct NoteRef { @@ -10,9 +11,15 @@ pub struct NoteRef { pub created_at: u64, } -#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)] +#[derive(Clone, Copy, Eq, PartialEq, Hash)] pub struct RootNoteIdBuf([u8; 32]); +impl fmt::Debug for RootNoteIdBuf { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "RootNoteIdBuf({})", self.hex()) + } +} + #[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)] pub struct RootNoteId<'a>(&'a [u8; 32]); diff --git a/crates/notedeck_chrome/src/notedeck.rs b/crates/notedeck_chrome/src/notedeck.rs index 20a3787a..0ca84366 100644 --- a/crates/notedeck_chrome/src/notedeck.rs +++ b/crates/notedeck_chrome/src/notedeck.rs @@ -183,21 +183,20 @@ mod tests { .column(0) .router() .top() - .timeline_id(); + .timeline_id() + .unwrap(); let tl2 = app .columns(app_ctx.accounts) .column(1) .router() .top() - .timeline_id(); + .timeline_id() + .unwrap(); - assert_eq!(tl1.is_some(), true); - assert_eq!(tl2.is_some(), true); - - let timelines = app.columns(app_ctx.accounts).timelines(); - assert!(timelines[0].kind.is_notifications()); - assert!(timelines[1].kind.is_contacts()); + let timelines = app.timeline_cache.timelines.len() == 2; + assert!(app.timeline_cache.timelines.get(&tl1).is_some()); + assert!(app.timeline_cache.timelines.get(&tl2).is_some()); rmrf(tmpdir); } diff --git a/crates/notedeck_columns/src/actionbar.rs b/crates/notedeck_columns/src/actionbar.rs index f1636e85..04f23336 100644 --- a/crates/notedeck_columns/src/actionbar.rs +++ b/crates/notedeck_columns/src/actionbar.rs @@ -1,7 +1,7 @@ use crate::{ column::Columns, route::{Route, Router}, - timeline::{TimelineCache, TimelineCacheKey}, + timeline::{ThreadSelection, TimelineCache, TimelineKind}, }; use enostr::{NoteId, Pubkey, RelayPool}; @@ -17,13 +17,13 @@ pub enum NoteAction { OpenProfile(Pubkey), } -pub struct NewNotes<'a> { - pub id: TimelineCacheKey<'a>, +pub struct NewNotes { + pub id: TimelineKind, pub notes: Vec, } -pub enum TimelineOpenResult<'a> { - NewNotes(NewNotes<'a>), +pub enum TimelineOpenResult { + NewNotes(NewNotes), } /// open_thread is called when a note is selected and we need to navigate @@ -32,16 +32,18 @@ pub enum TimelineOpenResult<'a> { /// the thread view. We don't have a concept of model/view/controller etc /// in egui, but this is the closest thing to that. #[allow(clippy::too_many_arguments)] -fn open_thread<'txn>( +fn open_thread( ndb: &Ndb, - txn: &'txn Transaction, + txn: &Transaction, router: &mut Router, note_cache: &mut NoteCache, pool: &mut RelayPool, timeline_cache: &mut TimelineCache, - selected_note: &'txn [u8; 32], -) -> Option> { - router.route_to(Route::thread(NoteId::new(selected_note.to_owned()))); + selected_note: &[u8; 32], +) -> Option { + router.route_to(Route::thread( + ThreadSelection::from_note_id(ndb, note_cache, txn, NoteId::new(*selected_note)).ok()?, + )); match root_note_id_from_selected_id(ndb, note_cache, txn, selected_note) { Ok(root_id) => timeline_cache.open( @@ -49,7 +51,7 @@ fn open_thread<'txn>( note_cache, txn, pool, - TimelineCacheKey::thread(root_id), + &TimelineKind::Thread(ThreadSelection::from_root_id(root_id.to_owned())), ), Err(RootIdError::NoteNotFound) => { @@ -72,18 +74,15 @@ fn open_thread<'txn>( impl NoteAction { #[allow(clippy::too_many_arguments)] - pub fn execute<'txn, 'a>( - &'a self, + pub fn execute( + &self, ndb: &Ndb, router: &mut Router, timeline_cache: &mut TimelineCache, note_cache: &mut NoteCache, pool: &mut RelayPool, - txn: &'txn Transaction, - ) -> Option> - where - 'a: 'txn, - { + txn: &Transaction, + ) -> Option { match self { NoteAction::Reply(note_id) => { router.route_to(Route::reply(*note_id)); @@ -102,13 +101,7 @@ impl NoteAction { NoteAction::OpenProfile(pubkey) => { router.route_to(Route::profile(*pubkey)); - timeline_cache.open( - ndb, - note_cache, - txn, - pool, - TimelineCacheKey::profile(pubkey.as_ref()), - ) + timeline_cache.open(ndb, note_cache, txn, pool, &TimelineKind::Profile(*pubkey)) } NoteAction::Quote(note_id) => { @@ -138,8 +131,8 @@ impl NoteAction { } } -impl<'a> TimelineOpenResult<'a> { - pub fn new_notes(notes: Vec, id: TimelineCacheKey<'a>) -> Self { +impl TimelineOpenResult { + pub fn new_notes(notes: Vec, id: TimelineKind) -> Self { Self::NewNotes(NewNotes::new(notes, id)) } @@ -160,8 +153,8 @@ impl<'a> TimelineOpenResult<'a> { } } -impl<'a> NewNotes<'a> { - pub fn new(notes: Vec, id: TimelineCacheKey<'a>) -> Self { +impl NewNotes { + pub fn new(notes: Vec, id: TimelineKind) -> Self { NewNotes { notes, id } } @@ -175,46 +168,18 @@ impl<'a> NewNotes<'a> { unknown_ids: &mut UnknownIds, note_cache: &mut NoteCache, ) { - match self.id { - TimelineCacheKey::Profile(pubkey) => { - let profile = if let Some(profile) = timeline_cache.profiles.get_mut(pubkey.bytes()) - { - profile - } else { - return; - }; + let reversed = matches!(&self.id, TimelineKind::Thread(_)); - let reversed = false; + let timeline = if let Some(profile) = timeline_cache.timelines.get_mut(&self.id) { + profile + } else { + error!("NewNotes: could not get timeline for key {}", self.id); + return; + }; - if let Err(err) = profile.timeline.insert( - &self.notes, - ndb, - txn, - unknown_ids, - note_cache, - reversed, - ) { - error!("error inserting notes into profile timeline: {err}") - } - } - - TimelineCacheKey::Thread(root_id) => { - // threads are chronological, ie reversed from reverse-chronological, the default. - let reversed = true; - let thread = if let Some(thread) = timeline_cache.threads.get_mut(root_id.bytes()) { - thread - } else { - return; - }; - - if let Err(err) = - thread - .timeline - .insert(&self.notes, ndb, txn, unknown_ids, note_cache, reversed) - { - error!("error inserting notes into thread timeline: {err}") - } - } + if let Err(err) = timeline.insert(&self.notes, ndb, txn, unknown_ids, note_cache, reversed) + { + error!("error inserting notes into profile timeline: {err}") } } } diff --git a/crates/notedeck_columns/src/app.rs b/crates/notedeck_columns/src/app.rs index 27302e16..d32edb6b 100644 --- a/crates/notedeck_columns/src/app.rs +++ b/crates/notedeck_columns/src/app.rs @@ -111,7 +111,7 @@ fn try_process_event( timeline::send_initial_timeline_filters( app_ctx.ndb, damus.since_optimize, - get_active_columns_mut(app_ctx.accounts, &mut damus.decks_cache), + &mut damus.timeline_cache, &mut damus.subscriptions, app_ctx.pool, &ev.relay, @@ -127,30 +127,16 @@ fn try_process_event( } } - let current_columns = get_active_columns_mut(app_ctx.accounts, &mut damus.decks_cache); - let n_timelines = current_columns.timelines().len(); - for timeline_ind in 0..n_timelines { - let is_ready = { - let timeline = &mut current_columns.timelines[timeline_ind]; - timeline::is_timeline_ready( - app_ctx.ndb, - app_ctx.pool, - app_ctx.note_cache, - timeline, - app_ctx - .accounts - .get_selected_account() - .as_ref() - .map(|sa| &sa.pubkey), - ) - }; + for (_kind, timeline) in damus.timeline_cache.timelines.iter_mut() { + let is_ready = + timeline::is_timeline_ready(app_ctx.ndb, app_ctx.pool, app_ctx.note_cache, timeline); if is_ready { let txn = Transaction::new(app_ctx.ndb).expect("txn"); // only thread timelines are reversed let reversed = false; - if let Err(err) = current_columns.timelines_mut()[timeline_ind].poll_notes_into_view( + if let Err(err) = timeline.poll_notes_into_view( app_ctx.ndb, &txn, app_ctx.unknown_ids, @@ -193,7 +179,7 @@ fn update_damus(damus: &mut Damus, app_ctx: &mut AppContext<'_>, ctx: &egui::Con if let Err(err) = timeline::setup_initial_nostrdb_subs( app_ctx.ndb, app_ctx.note_cache, - &mut damus.decks_cache, + &mut damus.timeline_cache, ) { warn!("update_damus init: {err}"); } @@ -208,15 +194,16 @@ fn update_damus(damus: &mut Damus, app_ctx: &mut AppContext<'_>, ctx: &egui::Con } fn handle_eose( - damus: &mut Damus, + subscriptions: &Subscriptions, + timeline_cache: &mut TimelineCache, ctx: &mut AppContext<'_>, subid: &str, relay_url: &str, ) -> Result<()> { - let sub_kind = if let Some(sub_kind) = damus.subscriptions().get(subid) { + let sub_kind = if let Some(sub_kind) = subscriptions.subs.get(subid) { sub_kind } else { - let n_subids = damus.subscriptions().len(); + let n_subids = subscriptions.subs.len(); warn!( "got unknown eose subid {}, {} tracked subscriptions", subid, n_subids @@ -224,7 +211,7 @@ fn handle_eose( return Ok(()); }; - match *sub_kind { + match sub_kind { SubKind::Timeline(_) => { // eose on timeline? whatevs } @@ -233,7 +220,7 @@ fn handle_eose( unknowns::update_from_columns( &txn, ctx.unknown_ids, - get_active_columns(ctx.accounts, &damus.decks_cache), + timeline_cache, ctx.ndb, ctx.note_cache, ); @@ -250,10 +237,7 @@ fn handle_eose( } SubKind::FetchingContactList(timeline_uid) => { - let timeline = if let Some(tl) = - get_active_columns_mut(ctx.accounts, &mut damus.decks_cache) - .find_timeline_mut(timeline_uid) - { + let timeline = if let Some(tl) = timeline_cache.timelines.get_mut(timeline_uid) { tl } else { error!( @@ -263,7 +247,7 @@ fn handle_eose( return Ok(()); }; - let filter_state = timeline.filter.get(relay_url); + let filter_state = timeline.filter.get_mut(relay_url); // If this request was fetching a contact list, our filter // state should be "FetchingRemote". We look at the local @@ -325,7 +309,13 @@ fn process_message(damus: &mut Damus, ctx: &mut AppContext<'_>, relay: &str, msg RelayMessage::Notice(msg) => warn!("Notice from {}: {}", relay, msg), RelayMessage::OK(cr) => info!("OK {:?}", cr), RelayMessage::Eose(sid) => { - if let Err(err) = handle_eose(damus, ctx, sid, relay) { + if let Err(err) = handle_eose( + &damus.subscriptions, + &mut damus.timeline_cache, + ctx, + sid, + relay, + ) { error!("error handling eose: {}", err); } } @@ -367,39 +357,58 @@ impl Damus { pub fn new(ctx: &mut AppContext<'_>, args: &[String]) -> Self { // arg parsing - let parsed_args = ColumnsArgs::parse(args); + let parsed_args = ColumnsArgs::parse( + args, + ctx.accounts + .get_selected_account() + .as_ref() + .map(|kp| &kp.pubkey), + ); + let account = ctx .accounts .get_selected_account() .as_ref() .map(|a| a.pubkey.bytes()); + let mut timeline_cache = TimelineCache::default(); let tmp_columns = !parsed_args.columns.is_empty(); let decks_cache = if tmp_columns { info!("DecksCache: loading from command line arguments"); let mut columns: Columns = Columns::new(); + let txn = Transaction::new(ctx.ndb).unwrap(); for col in parsed_args.columns { - if let Some(timeline) = col.into_timeline(ctx.ndb, account) { - columns.add_new_timeline_column(timeline); + let timeline_kind = col.into_timeline_kind(); + if let Some(add_result) = columns.add_new_timeline_column( + &mut timeline_cache, + &txn, + ctx.ndb, + ctx.note_cache, + ctx.pool, + &timeline_kind, + ) { + add_result.process( + ctx.ndb, + ctx.note_cache, + &txn, + &mut timeline_cache, + ctx.unknown_ids, + ); } } columns_to_decks_cache(columns, account) - } else if let Some(decks_cache) = crate::storage::load_decks_cache(ctx.path, ctx.ndb) { + } else if let Some(decks_cache) = + crate::storage::load_decks_cache(ctx.path, ctx.ndb, &mut timeline_cache) + { info!( "DecksCache: loading from disk {}", crate::storage::DECKS_CACHE_FILE ); decks_cache - } else if let Some(cols) = storage::deserialize_columns(ctx.path, ctx.ndb, account) { - info!( - "DecksCache: loading from disk at depreciated location {}", - crate::storage::COLUMNS_FILE - ); - columns_to_decks_cache(cols, account) } else { info!("DecksCache: creating new with demo configuration"); - let mut cache = DecksCache::new_with_demo_config(ctx.ndb); + let mut cache = DecksCache::new_with_demo_config(&mut timeline_cache, ctx); for account in ctx.accounts.get_accounts() { cache.add_deck_default(account.pubkey); } @@ -414,7 +423,7 @@ impl Damus { Self { subscriptions: Subscriptions::default(), since_optimize: parsed_args.since_optimize, - timeline_cache: TimelineCache::default(), + timeline_cache, drafts: Drafts::default(), state: DamusState::Initializing, textmode: parsed_args.textmode, @@ -565,7 +574,8 @@ fn timelines_view(ui: &mut egui::Ui, sizes: Size, app: &mut Damus, ctx: &mut App let mut save_cols = false; if let Some(action) = side_panel_action { - save_cols = save_cols || action.process(&mut app.decks_cache, ctx); + save_cols = + save_cols || action.process(&mut app.timeline_cache, &mut app.decks_cache, ctx); } let num_cols = app.columns(ctx.accounts).num_columns(); diff --git a/crates/notedeck_columns/src/args.rs b/crates/notedeck_columns/src/args.rs index 54d64ec3..fc2905ea 100644 --- a/crates/notedeck_columns/src/args.rs +++ b/crates/notedeck_columns/src/args.rs @@ -1,8 +1,5 @@ -use notedeck::FilterState; - -use crate::timeline::{PubkeySource, Timeline, TimelineKind, TimelineTab}; +use crate::timeline::TimelineKind; use enostr::{Filter, Pubkey}; -use nostrdb::Ndb; use tracing::{debug, error, info}; pub struct ColumnsArgs { @@ -12,7 +9,7 @@ pub struct ColumnsArgs { } impl ColumnsArgs { - pub fn parse(args: &[String]) -> Self { + pub fn parse(args: &[String], deck_author: Option<&Pubkey>) -> Self { let mut res = Self { columns: vec![], since_optimize: true, @@ -55,40 +52,48 @@ impl ColumnsArgs { if let Ok(pubkey) = Pubkey::parse(rest) { info!("contact column for user {}", pubkey.hex()); res.columns - .push(ArgColumn::Timeline(TimelineKind::contact_list( - PubkeySource::Explicit(pubkey), - ))) + .push(ArgColumn::Timeline(TimelineKind::contact_list(pubkey))) } else { error!("error parsing contacts pubkey {}", rest); continue; } } else if column_name == "contacts" { - res.columns - .push(ArgColumn::Timeline(TimelineKind::contact_list( - PubkeySource::DeckAuthor, - ))) + if let Some(deck_author) = deck_author { + res.columns + .push(ArgColumn::Timeline(TimelineKind::contact_list( + deck_author.to_owned(), + ))) + } else { + panic!("No accounts available, could not handle implicit pubkey contacts column") + } } else if let Some(notif_pk_str) = column_name.strip_prefix("notifications:") { if let Ok(pubkey) = Pubkey::parse(notif_pk_str) { info!("got notifications column for user {}", pubkey.hex()); res.columns - .push(ArgColumn::Timeline(TimelineKind::notifications( - PubkeySource::Explicit(pubkey), - ))) + .push(ArgColumn::Timeline(TimelineKind::notifications(pubkey))) } else { error!("error parsing notifications pubkey {}", notif_pk_str); continue; } } else if column_name == "notifications" { debug!("got notification column for default user"); - res.columns - .push(ArgColumn::Timeline(TimelineKind::notifications( - PubkeySource::DeckAuthor, - ))) + if let Some(deck_author) = deck_author { + res.columns + .push(ArgColumn::Timeline(TimelineKind::notifications( + deck_author.to_owned(), + ))); + } else { + panic!("Tried to push notifications timeline with no available users"); + } } else if column_name == "profile" { debug!("got profile column for default user"); - res.columns.push(ArgColumn::Timeline(TimelineKind::profile( - PubkeySource::DeckAuthor, - ))) + if let Some(deck_author) = deck_author { + res.columns.push(ArgColumn::Timeline(TimelineKind::profile( + deck_author.to_owned(), + ))); + } else { + panic!("Tried to push profile timeline with no available users"); + } } else if column_name == "universe" { debug!("got universe column"); res.columns @@ -96,9 +101,8 @@ impl ColumnsArgs { } else if let Some(profile_pk_str) = column_name.strip_prefix("profile:") { if let Ok(pubkey) = Pubkey::parse(profile_pk_str) { info!("got profile column for user {}", pubkey.hex()); - res.columns.push(ArgColumn::Timeline(TimelineKind::profile( - PubkeySource::Explicit(pubkey), - ))) + res.columns + .push(ArgColumn::Timeline(TimelineKind::profile(pubkey))) } else { error!("error parsing profile pubkey {}", profile_pk_str); continue; @@ -146,14 +150,13 @@ pub enum ArgColumn { } impl ArgColumn { - pub fn into_timeline(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option { + pub fn into_timeline_kind(self) -> TimelineKind { match self { - ArgColumn::Generic(filters) => Some(Timeline::new( - TimelineKind::Generic, - FilterState::ready(filters), - TimelineTab::full_tabs(), - )), - ArgColumn::Timeline(tk) => tk.into_timeline(ndb, user), + ArgColumn::Generic(_filters) => { + // TODO: fix generic filters by referencing some filter map + TimelineKind::Generic(0) + } + ArgColumn::Timeline(tk) => tk, } } } diff --git a/crates/notedeck_columns/src/column.rs b/crates/notedeck_columns/src/column.rs index 2af24b6a..546f382a 100644 --- a/crates/notedeck_columns/src/column.rs +++ b/crates/notedeck_columns/src/column.rs @@ -1,8 +1,12 @@ -use crate::route::{Route, Router}; -use crate::timeline::{Timeline, TimelineId}; -use indexmap::IndexMap; +use crate::{ + actionbar::TimelineOpenResult, + route::{Route, Router}, + timeline::{Timeline, TimelineCache, TimelineKind}, +}; +use enostr::RelayPool; +use nostrdb::{Ndb, Transaction}; +use notedeck::NoteCache; use std::iter::Iterator; -use std::sync::atomic::{AtomicU32, Ordering}; use tracing::warn; #[derive(Clone)] @@ -28,36 +32,29 @@ impl Column { #[derive(Default)] pub struct Columns { /// Columns are simply routers into settings, timelines, etc - columns: IndexMap, - - /// Timeline state is not tied to routing logic separately, so that - /// different columns can navigate to and from settings to timelines, - /// etc. - pub timelines: IndexMap, + columns: Vec, /// The selected column for key navigation selected: i32, } -static UIDS: AtomicU32 = AtomicU32::new(0); impl Columns { pub fn new() -> Self { Columns::default() } - pub fn add_new_timeline_column(&mut self, timeline: Timeline) { - let id = Self::get_new_id(); - let routes = vec![Route::timeline(timeline.id)]; - self.timelines.insert(id, timeline); - self.columns.insert(id, Column::new(routes)); - } - - pub fn add_timeline_to_column(&mut self, col: usize, timeline: Timeline) { - let col_id = self.get_column_id_at_index(col); - self.column_mut(col) - .router_mut() - .route_to_replaced(Route::timeline(timeline.id)); - self.timelines.insert(col_id, timeline); + pub fn add_new_timeline_column( + &mut self, + timeline_cache: &mut TimelineCache, + txn: &Transaction, + ndb: &Ndb, + note_cache: &mut NoteCache, + pool: &mut RelayPool, + kind: &TimelineKind, + ) -> Option { + self.columns + .push(Column::new(vec![Route::timeline(kind.to_owned())])); + timeline_cache.open(ndb, note_cache, txn, pool, kind) } pub fn new_column_picker(&mut self) { @@ -66,38 +63,38 @@ impl Columns { )])); } - pub fn insert_intermediary_routes(&mut self, intermediary_routes: Vec) { - let id = Self::get_new_id(); - + pub fn insert_intermediary_routes( + &mut self, + timeline_cache: &mut TimelineCache, + intermediary_routes: Vec, + ) { let routes = intermediary_routes .into_iter() .map(|r| match r { IntermediaryRoute::Timeline(timeline) => { - let route = Route::timeline(timeline.id); - self.timelines.insert(id, timeline); + let route = Route::timeline(timeline.kind.clone()); + timeline_cache + .timelines + .insert(timeline.kind.clone(), timeline); route } IntermediaryRoute::Route(route) => route, }) .collect(); - self.columns.insert(id, Column::new(routes)); - } - - fn get_new_id() -> u32 { - UIDS.fetch_add(1, Ordering::Relaxed) + self.columns.push(Column::new(routes)); } pub fn add_column_at(&mut self, column: Column, index: u32) { - self.columns.insert(index, column); + self.columns.insert(index as usize, column); } pub fn add_column(&mut self, column: Column) { - self.columns.insert(Self::get_new_id(), column); + self.columns.push(column); } - pub fn columns_mut(&mut self) -> Vec<&mut Column> { - self.columns.values_mut().collect() + pub fn columns_mut(&mut self) -> &mut Vec { + &mut self.columns } pub fn num_columns(&self) -> usize { @@ -110,72 +107,23 @@ impl Columns { if self.columns.is_empty() { self.new_column_picker(); } - self.columns - .get_index_mut(0) - .expect("There should be at least one column") - .1 - .router_mut() - } - - pub fn timeline_mut(&mut self, timeline_ind: usize) -> &mut Timeline { - self.timelines - .get_index_mut(timeline_ind) - .expect("expected index to be in bounds") - .1 + self.columns[0].router_mut() } pub fn column(&self, ind: usize) -> &Column { - self.columns - .get_index(ind) - .expect("Expected index to be in bounds") - .1 + &self.columns[ind] } - pub fn columns(&self) -> Vec<&Column> { - self.columns.values().collect() - } - - pub fn get_column_id_at_index(&self, ind: usize) -> u32 { - *self - .columns - .get_index(ind) - .expect("expected index to be within bounds") - .0 + pub fn columns(&self) -> &[Column] { + &self.columns } pub fn selected(&mut self) -> &mut Column { - self.columns - .get_index_mut(self.selected as usize) - .expect("Expected selected index to be in bounds") - .1 - } - - pub fn timelines_mut(&mut self) -> Vec<&mut Timeline> { - self.timelines.values_mut().collect() - } - - pub fn timelines(&self) -> Vec<&Timeline> { - self.timelines.values().collect() - } - - pub fn find_timeline_mut(&mut self, id: TimelineId) -> Option<&mut Timeline> { - self.timelines_mut().into_iter().find(|tl| tl.id == id) - } - - pub fn find_timeline(&self, id: TimelineId) -> Option<&Timeline> { - self.timelines().into_iter().find(|tl| tl.id == id) + &mut self.columns[self.selected as usize] } pub fn column_mut(&mut self, ind: usize) -> &mut Column { - self.columns - .get_index_mut(ind) - .expect("Expected index to be in bounds") - .1 - } - - pub fn find_timeline_for_column_index(&self, ind: usize) -> Option<&Timeline> { - let col_id = self.get_column_id_at_index(ind); - self.timelines.get(&col_id) + &mut self.columns[ind] } pub fn select_down(&mut self) { @@ -200,16 +148,22 @@ impl Columns { self.selected += 1; } - pub fn delete_column(&mut self, index: usize) { - if let Some((key, _)) = self.columns.get_index_mut(index) { - self.timelines.shift_remove(key); + #[must_use = "you must call timeline_cache.pop() for each returned value"] + pub fn delete_column(&mut self, index: usize) -> Vec { + let mut kinds_to_pop: Vec = vec![]; + for route in self.columns[index].router().routes() { + if let Route::Timeline(kind) = route { + kinds_to_pop.push(kind.clone()); + } } - self.columns.shift_remove_index(index); + self.columns.remove(index); if self.columns.is_empty() { self.new_column_picker(); } + + kinds_to_pop } pub fn move_col(&mut self, from_index: usize, to_index: usize) { @@ -220,15 +174,7 @@ impl Columns { return; } - if from_index < to_index { - for i in from_index..to_index { - self.columns.swap_indices(i, i + 1); - } - } else { - for i in (to_index..from_index).rev() { - self.columns.swap_indices(i, i + 1); - } - } + self.columns.swap(from_index, to_index); } } diff --git a/crates/notedeck_columns/src/decks.rs b/crates/notedeck_columns/src/decks.rs index 1d832bd9..ea7a7763 100644 --- a/crates/notedeck_columns/src/decks.rs +++ b/crates/notedeck_columns/src/decks.rs @@ -1,14 +1,15 @@ use std::collections::{hash_map::ValuesMut, HashMap}; use enostr::Pubkey; -use nostrdb::Ndb; +use nostrdb::Transaction; +use notedeck::AppContext; use tracing::{error, info}; use crate::{ accounts::AccountsRoute, column::{Column, Columns}, route::Route, - timeline::{self, Timeline, TimelineKind}, + timeline::{TimelineCache, TimelineKind}, ui::{add_column::AddColumnRoute, configure_deck::ConfigureDeckResponse}, }; @@ -44,10 +45,13 @@ impl DecksCache { } } - pub fn new_with_demo_config(ndb: &Ndb) -> Self { + pub fn new_with_demo_config(timeline_cache: &mut TimelineCache, ctx: &mut AppContext) -> Self { let mut account_to_decks: HashMap = Default::default(); let fallback_pubkey = FALLBACK_PUBKEY(); - account_to_decks.insert(fallback_pubkey, demo_decks(fallback_pubkey, ndb)); + account_to_decks.insert( + fallback_pubkey, + demo_decks(fallback_pubkey, timeline_cache, ctx), + ); DecksCache::new(account_to_decks) } @@ -298,7 +302,11 @@ impl Deck { } } -pub fn demo_decks(demo_pubkey: Pubkey, ndb: &Ndb) -> Decks { +pub fn demo_decks( + demo_pubkey: Pubkey, + timeline_cache: &mut TimelineCache, + ctx: &mut AppContext, +) -> Decks { let deck = { let mut columns = Columns::default(); columns.add_column(Column::new(vec![ @@ -306,14 +314,27 @@ pub fn demo_decks(demo_pubkey: Pubkey, ndb: &Ndb) -> Decks { Route::Accounts(AccountsRoute::Accounts), ])); - if let Some(timeline) = - TimelineKind::contact_list(timeline::PubkeySource::Explicit(demo_pubkey)) - .into_timeline(ndb, Some(demo_pubkey.bytes())) - { - columns.add_new_timeline_column(timeline); + let kind = TimelineKind::contact_list(demo_pubkey); + let txn = Transaction::new(ctx.ndb).unwrap(); + + if let Some(results) = columns.add_new_timeline_column( + timeline_cache, + &txn, + ctx.ndb, + ctx.note_cache, + ctx.pool, + &kind, + ) { + results.process( + ctx.ndb, + ctx.note_cache, + &txn, + timeline_cache, + ctx.unknown_ids, + ); } - columns.add_new_timeline_column(Timeline::hashtag("introductions".to_string())); + //columns.add_new_timeline_column(Timeline::hashtag("introductions".to_string())); Deck { icon: '🇩', diff --git a/crates/notedeck_columns/src/error.rs b/crates/notedeck_columns/src/error.rs index b00b28d8..abd6e7ec 100644 --- a/crates/notedeck_columns/src/error.rs +++ b/crates/notedeck_columns/src/error.rs @@ -5,6 +5,9 @@ pub enum Error { #[error("timeline not found")] TimelineNotFound, + #[error("timeline is missing a subscription")] + MissingSubscription, + #[error("load failed")] LoadFailed, diff --git a/crates/notedeck_columns/src/lib.rs b/crates/notedeck_columns/src/lib.rs index 923faa11..3ae0e640 100644 --- a/crates/notedeck_columns/src/lib.rs +++ b/crates/notedeck_columns/src/lib.rs @@ -29,7 +29,6 @@ mod route; mod subscriptions; mod support; mod test_data; -mod thread; mod timeline; pub mod ui; mod unknowns; diff --git a/crates/notedeck_columns/src/multi_subscriber.rs b/crates/notedeck_columns/src/multi_subscriber.rs index f1615e6c..d1ee34c3 100644 --- a/crates/notedeck_columns/src/multi_subscriber.rs +++ b/crates/notedeck_columns/src/multi_subscriber.rs @@ -1,107 +1,145 @@ use enostr::{Filter, RelayPool}; -use nostrdb::Ndb; +use nostrdb::{Ndb, Subscription}; use tracing::{error, info}; use uuid::Uuid; -use notedeck::UnifiedSubscription; - +#[derive(Debug)] pub struct MultiSubscriber { - filters: Vec, - pub sub: Option, - subscribers: u32, + pub filters: Vec, + pub local_subid: Option, + pub remote_subid: Option, + local_subscribers: u32, + remote_subscribers: u32, } impl MultiSubscriber { + /// Create a MultiSubscriber with an initial local subscription. + pub fn with_initial_local_sub(sub: Subscription, filters: Vec) -> Self { + let mut msub = MultiSubscriber::new(filters); + msub.local_subid = Some(sub); + msub.local_subscribers = 1; + msub + } + pub fn new(filters: Vec) -> Self { Self { filters, - sub: None, - subscribers: 0, + local_subid: None, + remote_subid: None, + local_subscribers: 0, + remote_subscribers: 0, } } - fn real_subscribe( - ndb: &Ndb, - pool: &mut RelayPool, - filters: Vec, - ) -> Option { - let subid = Uuid::new_v4().to_string(); - let sub = ndb.subscribe(&filters).ok()?; - - pool.subscribe(subid.clone(), filters); - - Some(UnifiedSubscription { - local: sub, - remote: subid, - }) - } - - pub fn unsubscribe(&mut self, ndb: &mut Ndb, pool: &mut RelayPool) { - if self.subscribers == 0 { - error!("No subscribers to unsubscribe from"); - return; - } - - self.subscribers -= 1; - if self.subscribers == 0 { - let sub = match self.sub { - Some(ref sub) => sub, - None => { - error!("No remote subscription to unsubscribe from"); - return; - } - }; - let local_sub = &sub.local; - if let Err(e) = ndb.unsubscribe(*local_sub) { - error!( - "failed to unsubscribe from object: {e}, subid:{}, {} active subscriptions", - local_sub.id(), - ndb.subscription_count() - ); - } else { - info!( - "Unsubscribed from object subid:{}. {} active subscriptions", - local_sub.id(), - ndb.subscription_count() - ); - } - - // unsub from remote - pool.unsubscribe(sub.remote.clone()); - self.sub = None; + fn unsubscribe_remote(&mut self, ndb: &Ndb, pool: &mut RelayPool) { + let remote_subid = if let Some(remote_subid) = &self.remote_subid { + remote_subid } else { - info!( - "Locally unsubscribing. {} active ndb subscriptions. {} active subscriptions for this object", - ndb.subscription_count(), - self.subscribers, - ); + self.err_log(ndb, "unsubscribe_remote: nothing to unsubscribe from?"); + return; + }; + + pool.unsubscribe(remote_subid.clone()); + + self.remote_subid = None; + } + + /// Locally unsubscribe if we have one + fn unsubscribe_local(&mut self, ndb: &mut Ndb) { + let local_sub = if let Some(local_sub) = self.local_subid { + local_sub + } else { + self.err_log(ndb, "unsubscribe_local: nothing to unsubscribe from?"); + return; + }; + + match ndb.unsubscribe(local_sub) { + Err(e) => { + self.err_log(ndb, &format!("Failed to unsubscribe: {e}")); + } + Ok(_) => { + self.local_subid = None; + } } } + pub fn unsubscribe(&mut self, ndb: &mut Ndb, pool: &mut RelayPool) -> bool { + if self.local_subscribers == 0 && self.remote_subscribers == 0 { + self.err_log( + ndb, + "Called multi_subscriber unsubscribe when both sub counts are 0", + ); + return false; + } + + self.local_subscribers = self.local_subscribers.saturating_sub(1); + self.remote_subscribers = self.remote_subscribers.saturating_sub(1); + + if self.local_subscribers == 0 && self.remote_subscribers == 0 { + self.info_log(ndb, "Locally unsubscribing"); + self.unsubscribe_local(ndb); + self.unsubscribe_remote(ndb, pool); + self.local_subscribers = 0; + self.remote_subscribers = 0; + true + } else { + false + } + } + + fn info_log(&self, ndb: &Ndb, msg: &str) { + info!( + "{msg}. {}/{}/{} active ndb/local/remote subscriptions.", + ndb.subscription_count(), + self.local_subscribers, + self.remote_subscribers, + ); + } + + fn err_log(&self, ndb: &Ndb, msg: &str) { + error!( + "{msg}. {}/{}/{} active ndb/local/remote subscriptions.", + ndb.subscription_count(), + self.local_subscribers, + self.remote_subscribers, + ); + } + pub fn subscribe(&mut self, ndb: &Ndb, pool: &mut RelayPool) { - self.subscribers += 1; - if self.subscribers == 1 { - if self.sub.is_some() { - error!("Object is first subscriber, but it already had remote subscription"); + self.local_subscribers += 1; + self.remote_subscribers += 1; + + if self.remote_subscribers == 1 { + if self.remote_subid.is_some() { + self.err_log( + ndb, + "Object is first subscriber, but it already had a subscription", + ); + return; + } else { + let subid = Uuid::new_v4().to_string(); + pool.subscribe(subid.clone(), self.filters.clone()); + self.info_log(ndb, "First remote subscription"); + self.remote_subid = Some(subid); + } + } + + if self.local_subscribers == 1 { + if self.local_subid.is_some() { + self.err_log(ndb, "Should not have a local subscription already"); return; } - self.sub = Self::real_subscribe(ndb, pool, self.filters.clone()); - info!( - "Remotely subscribing to object. {} total active subscriptions, {} on this object", - ndb.subscription_count(), - self.subscribers, - ); + match ndb.subscribe(&self.filters) { + Ok(sub) => { + self.info_log(ndb, "First local subscription"); + self.local_subid = Some(sub); + } - if self.sub.is_none() { - error!("Error subscribing remotely to object"); + Err(err) => { + error!("multi_subscriber: error subscribing locally: '{err}'") + } } - } else { - info!( - "Locally subscribing. {} total active subscriptions, {} for this object", - ndb.subscription_count(), - self.subscribers, - ) } } } diff --git a/crates/notedeck_columns/src/nav.rs b/crates/notedeck_columns/src/nav.rs index bcfedb5a..8b82ca08 100644 --- a/crates/notedeck_columns/src/nav.rs +++ b/crates/notedeck_columns/src/nav.rs @@ -1,7 +1,7 @@ use crate::{ accounts::render_accounts_route, actionbar::NoteAction, - app::{get_active_columns, get_active_columns_mut, get_decks_mut}, + app::{get_active_columns_mut, get_decks_mut}, column::ColumnsAction, deck_state::DeckState, decks::{Deck, DecksAction, DecksCache}, @@ -9,10 +9,7 @@ use crate::{ profile_state::ProfileState, relay_pool_manager::RelayPoolManager, route::Route, - timeline::{ - route::{render_timeline_route, TimelineRoute}, - Timeline, - }, + timeline::{route::render_timeline_route, TimelineCache}, ui::{ self, add_column::render_add_column_routes, @@ -27,11 +24,10 @@ use crate::{ Damus, }; -use notedeck::{AccountsAction, AppContext, RootIdError}; - use egui_nav::{Nav, NavAction, NavResponse, NavUiType}; -use nostrdb::{Ndb, Transaction}; -use tracing::{error, info}; +use nostrdb::Transaction; +use notedeck::{AccountsAction, AppContext}; +use tracing::error; #[allow(clippy::enum_variant_names)] pub enum RenderNavAction { @@ -51,7 +47,12 @@ pub enum SwitchingAction { impl SwitchingAction { /// process the action, and return whether switching occured - pub fn process(&self, decks_cache: &mut DecksCache, ctx: &mut AppContext<'_>) -> bool { + pub fn process( + &self, + timeline_cache: &mut TimelineCache, + decks_cache: &mut DecksCache, + ctx: &mut AppContext<'_>, + ) -> bool { match &self { SwitchingAction::Accounts(account_action) => match account_action { AccountsAction::Switch(switch_action) => { @@ -68,8 +69,15 @@ impl SwitchingAction { }, SwitchingAction::Columns(columns_action) => match *columns_action { ColumnsAction::Remove(index) => { - get_active_columns_mut(ctx.accounts, decks_cache).delete_column(index) + let kinds_to_pop = + get_active_columns_mut(ctx.accounts, decks_cache).delete_column(index); + for kind in &kinds_to_pop { + if let Err(err) = timeline_cache.pop(kind, ctx.ndb, ctx.pool) { + error!("error popping timeline: {err}"); + } + } } + ColumnsAction::Switch(from, to) => { get_active_columns_mut(ctx.accounts, decks_cache).move_col(from, to); } @@ -133,14 +141,14 @@ impl RenderNavResponse { } RenderNavAction::RemoveColumn => { - let tl = app - .columns(ctx.accounts) - .find_timeline_for_column_index(col); - if let Some(timeline) = tl { - unsubscribe_timeline(ctx.ndb, timeline); + let kinds_to_pop = app.columns_mut(ctx.accounts).delete_column(col); + + for kind in &kinds_to_pop { + if let Err(err) = app.timeline_cache.pop(kind, ctx.ndb, ctx.pool) { + error!("error popping timeline: {err}"); + } } - app.columns_mut(ctx.accounts).delete_column(col); switching_occured = true; } @@ -169,7 +177,11 @@ impl RenderNavResponse { } RenderNavAction::SwitchingAction(switching_action) => { - switching_occured = switching_action.process(&mut app.decks_cache, ctx); + switching_occured = switching_action.process( + &mut app.timeline_cache, + &mut app.decks_cache, + ctx, + ); } RenderNavAction::ProfileAction(profile_action) => { profile_action.process( @@ -192,40 +204,12 @@ impl RenderNavResponse { .column_mut(col) .router_mut() .pop(); - let txn = Transaction::new(ctx.ndb).expect("txn"); - if let Some(Route::Timeline(TimelineRoute::Thread(id))) = r { - match notedeck::note::root_note_id_from_selected_id( - ctx.ndb, - ctx.note_cache, - &txn, - id.bytes(), - ) { - Ok(root_id) => { - if let Some(thread) = - app.timeline_cache.threads.get_mut(root_id.bytes()) - { - if let Some(sub) = &mut thread.subscription { - sub.unsubscribe(ctx.ndb, ctx.pool); - } - } - } - - Err(RootIdError::NoteNotFound) => { - error!("thread returned: note not found for unsub??: {}", id.hex()) - } - - Err(RootIdError::NoRootId) => { - error!("thread returned: note not found for unsub??: {}", id.hex()) - } + if let Some(Route::Timeline(kind)) = &r { + if let Err(err) = app.timeline_cache.pop(kind, ctx.ndb, ctx.pool) { + error!("popping timeline had an error: {err} for {:?}", kind); } - } else if let Some(Route::Timeline(TimelineRoute::Profile(pubkey))) = r { - if let Some(profile) = app.timeline_cache.profiles.get_mut(pubkey.bytes()) { - if let Some(sub) = &mut profile.subscription { - sub.unsubscribe(ctx.ndb, ctx.pool); - } - } - } + }; switching_occured = true; } @@ -255,21 +239,21 @@ fn render_nav_body( app: &mut Damus, ctx: &mut AppContext<'_>, top: &Route, + depth: usize, col: usize, ) -> Option { match top { - Route::Timeline(tlr) => render_timeline_route( + Route::Timeline(kind) => render_timeline_route( ctx.ndb, - get_active_columns_mut(ctx.accounts, &mut app.decks_cache), - &mut app.drafts, ctx.img_cache, ctx.unknown_ids, ctx.note_cache, &mut app.timeline_cache, ctx.accounts, - *tlr, + kind, col, app.textmode, + depth, ui, ), Route::Accounts(amr) => { @@ -294,6 +278,78 @@ fn render_nav_body( RelayView::new(ctx.accounts, manager, &mut app.view_state.id_string_map).ui(ui); None } + + Route::Reply(id) => { + let txn = if let Ok(txn) = Transaction::new(ctx.ndb) { + txn + } else { + ui.label("Reply to unknown note"); + return None; + }; + + let note = if let Ok(note) = ctx.ndb.get_note_by_id(&txn, id.bytes()) { + note + } else { + ui.label("Reply to unknown note"); + return None; + }; + + let id = egui::Id::new(("post", col, note.key().unwrap())); + let poster = ctx.accounts.selected_or_first_nsec()?; + + let action = { + let draft = app.drafts.reply_mut(note.id()); + + let response = egui::ScrollArea::vertical().show(ui, |ui| { + ui::PostReplyView::new( + ctx.ndb, + poster, + draft, + ctx.note_cache, + ctx.img_cache, + ¬e, + ) + .id_source(id) + .show(ui) + }); + + response.inner.action + }; + + action.map(Into::into) + } + + Route::Quote(id) => { + let txn = Transaction::new(ctx.ndb).expect("txn"); + + let note = if let Ok(note) = ctx.ndb.get_note_by_id(&txn, id.bytes()) { + note + } else { + ui.label("Quote of unknown note"); + return None; + }; + + let id = egui::Id::new(("post", col, note.key().unwrap())); + + let poster = ctx.accounts.selected_or_first_nsec()?; + let draft = app.drafts.quote_mut(note.id()); + + let response = egui::ScrollArea::vertical().show(ui, |ui| { + crate::ui::note::QuoteRepostView::new( + ctx.ndb, + poster, + ctx.note_cache, + ctx.img_cache, + draft, + ¬e, + ) + .id_source(id) + .show(ui) + }); + + response.inner.action.map(Into::into) + } + Route::ComposeNote => { let kp = ctx.accounts.get_selected_account()?.to_full()?; let draft = app.drafts.compose_mut(); @@ -421,9 +477,6 @@ pub fn render_nav( ctx: &mut AppContext<'_>, ui: &mut egui::Ui, ) -> RenderNavResponse { - let col_id = get_active_columns(ctx.accounts, &app.decks_cache).get_column_id_at_index(col); - // TODO(jb55): clean up this router_mut mess by using Router in egui-nav directly - let nav_response = Nav::new( &app.columns(ctx.accounts) .column(col) @@ -443,33 +496,24 @@ pub fn render_nav( .router_mut() .returning, ) - .id_source(egui::Id::new(col_id)) + .id_source(egui::Id::new(("nav", col))) .show_mut(ui, |ui, render_type, nav| match render_type { NavUiType::Title => NavTitle::new( ctx.ndb, ctx.img_cache, get_active_columns_mut(ctx.accounts, &mut app.decks_cache), - ctx.accounts.get_selected_account().map(|a| &a.pubkey), nav.routes(), col, ) .show(ui), - NavUiType::Body => render_nav_body(ui, app, ctx, nav.routes().last().expect("top"), col), + NavUiType::Body => { + if let Some(top) = nav.routes().last() { + render_nav_body(ui, app, ctx, top, nav.routes().len(), col) + } else { + None + } + } }); RenderNavResponse::new(col, nav_response) } - -fn unsubscribe_timeline(ndb: &mut Ndb, timeline: &Timeline) { - if let Some(sub_id) = timeline.subscription { - if let Err(e) = ndb.unsubscribe(sub_id) { - error!("unsubscribe error: {}", e); - } else { - info!( - "successfully unsubscribed from timeline {} with sub id {}", - timeline.id, - sub_id.id() - ); - } - } -} diff --git a/crates/notedeck_columns/src/profile.rs b/crates/notedeck_columns/src/profile.rs index 1a39be23..3a6f25fc 100644 --- a/crates/notedeck_columns/src/profile.rs +++ b/crates/notedeck_columns/src/profile.rs @@ -1,16 +1,13 @@ use std::collections::HashMap; -use enostr::{Filter, FullKeypair, Pubkey, PubkeyRef, RelayPool}; -use nostrdb::{FilterBuilder, Ndb, Note, NoteBuildOptions, NoteBuilder, ProfileRecord}; +use enostr::{FullKeypair, Pubkey, RelayPool}; +use nostrdb::{Ndb, Note, NoteBuildOptions, NoteBuilder, ProfileRecord}; -use notedeck::{filter::default_limit, FilterState}; use tracing::info; use crate::{ - multi_subscriber::MultiSubscriber, profile_state::ProfileState, route::{Route, Router}, - timeline::{PubkeySource, Timeline, TimelineKind, TimelineTab}, }; pub struct NostrName<'a> { @@ -75,33 +72,6 @@ pub fn get_display_name<'a>(record: Option<&ProfileRecord<'a>>) -> NostrName<'a> } } -pub struct Profile { - pub timeline: Timeline, - pub subscription: Option, -} - -impl Profile { - pub fn new(source: PubkeySource, filters: Vec) -> Self { - let timeline = Timeline::new( - TimelineKind::profile(source), - FilterState::ready(filters), - TimelineTab::full_tabs(), - ); - - Profile { - timeline, - subscription: None, - } - } - - pub fn filters_raw(pk: PubkeyRef<'_>) -> Vec { - vec![Filter::new() - .authors([pk.bytes()]) - .kinds([1]) - .limit(default_limit())] - } -} - pub struct SaveProfileChanges { pub kp: FullKeypair, pub state: ProfileState, diff --git a/crates/notedeck_columns/src/route.rs b/crates/notedeck_columns/src/route.rs index b8d8fe26..bcad8096 100644 --- a/crates/notedeck_columns/src/route.rs +++ b/crates/notedeck_columns/src/route.rs @@ -3,18 +3,22 @@ use std::fmt::{self}; use crate::{ accounts::AccountsRoute, - column::Columns, - timeline::{kind::ColumnTitle, TimelineId, TimelineRoute}, + timeline::{ + kind::{AlgoTimeline, ColumnTitle, ListKind}, + ThreadSelection, TimelineKind, + }, ui::add_column::{AddAlgoRoute, AddColumnRoute}, }; use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; /// App routing. These describe different places you can go inside Notedeck. -#[derive(Clone, Copy, Eq, PartialEq, Debug)] +#[derive(Clone, Eq, PartialEq, Debug)] pub enum Route { - Timeline(TimelineRoute), + Timeline(TimelineKind), Accounts(AccountsRoute), + Reply(NoteId), + Quote(NoteId), Relays, ComposeNote, AddColumn(AddColumnRoute), @@ -24,12 +28,60 @@ pub enum Route { EditDeck(usize), } -impl TokenSerializable for Route { - fn serialize_tokens(&self, writer: &mut TokenWriter) { +impl Route { + pub fn timeline(timeline_kind: TimelineKind) -> Self { + Route::Timeline(timeline_kind) + } + + pub fn timeline_id(&self) -> Option<&TimelineKind> { + if let Route::Timeline(tid) = self { + Some(tid) + } else { + None + } + } + + pub fn relays() -> Self { + Route::Relays + } + + pub fn thread(thread_selection: ThreadSelection) -> Self { + Route::Timeline(TimelineKind::Thread(thread_selection)) + } + + pub fn profile(pubkey: Pubkey) -> Self { + Route::Timeline(TimelineKind::profile(pubkey)) + } + + pub fn reply(replying_to: NoteId) -> Self { + Route::Reply(replying_to) + } + + pub fn quote(quoting: NoteId) -> Self { + Route::Quote(quoting) + } + + pub fn accounts() -> Self { + Route::Accounts(AccountsRoute::Accounts) + } + + pub fn add_account() -> Self { + Route::Accounts(AccountsRoute::AddAccount) + } + + pub fn serialize_tokens(&self, writer: &mut TokenWriter) { match self { - Route::Timeline(routes) => routes.serialize_tokens(writer), + Route::Timeline(timeline_kind) => timeline_kind.serialize_tokens(writer), Route::Accounts(routes) => routes.serialize_tokens(writer), Route::AddColumn(routes) => routes.serialize_tokens(writer), + Route::Reply(note_id) => { + writer.write_token("reply"); + writer.write_token(¬e_id.hex()); + } + Route::Quote(note_id) => { + writer.write_token("quote"); + writer.write_token(¬e_id.hex()); + } Route::EditDeck(ind) => { writer.write_token("deck"); writer.write_token("edit"); @@ -56,11 +108,20 @@ impl TokenSerializable for Route { } } - fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + pub fn parse<'a>( + parser: &mut TokenParser<'a>, + deck_author: &Pubkey, + ) -> Result> { + let tlkind = + parser.try_parse(|p| Ok(Route::Timeline(TimelineKind::parse(p, deck_author)?))); + + if tlkind.is_ok() { + return tlkind; + } + TokenParser::alt( parser, &[ - |p| Ok(Route::Timeline(TimelineRoute::parse_from_tokens(p)?)), |p| Ok(Route::Accounts(AccountsRoute::parse_from_tokens(p)?)), |p| Ok(Route::AddColumn(AddColumnRoute::parse_from_tokens(p)?)), |p| { @@ -89,6 +150,18 @@ impl TokenSerializable for Route { Ok(Route::Relays) }) }, + |p| { + p.parse_all(|p| { + p.parse_token("quote")?; + Ok(Route::Quote(NoteId::new(tokenator::parse_hex_id(p)?))) + }) + }, + |p| { + p.parse_all(|p| { + p.parse_token("reply")?; + Ok(Route::Reply(NoteId::new(tokenator::parse_hex_id(p)?))) + }) + }, |p| { p.parse_all(|p| { p.parse_token("compose")?; @@ -111,64 +184,13 @@ impl TokenSerializable for Route { ], ) } -} -impl Route { - pub fn timeline(timeline_id: TimelineId) -> Self { - Route::Timeline(TimelineRoute::Timeline(timeline_id)) - } - - pub fn timeline_id(&self) -> Option<&TimelineId> { - if let Route::Timeline(TimelineRoute::Timeline(tid)) = self { - Some(tid) - } else { - None - } - } - - pub fn relays() -> Self { - Route::Relays - } - - pub fn thread(thread_root: NoteId) -> Self { - Route::Timeline(TimelineRoute::Thread(thread_root)) - } - - pub fn profile(pubkey: Pubkey) -> Self { - Route::Timeline(TimelineRoute::Profile(pubkey)) - } - - pub fn reply(replying_to: NoteId) -> Self { - Route::Timeline(TimelineRoute::Reply(replying_to)) - } - - pub fn quote(quoting: NoteId) -> Self { - Route::Timeline(TimelineRoute::Quote(quoting)) - } - - pub fn accounts() -> Self { - Route::Accounts(AccountsRoute::Accounts) - } - - pub fn add_account() -> Self { - Route::Accounts(AccountsRoute::AddAccount) - } - - pub fn title<'a>(&self, columns: &'a Columns) -> ColumnTitle<'a> { + pub fn title(&self) -> ColumnTitle<'_> { match self { - Route::Timeline(tlr) => match tlr { - TimelineRoute::Timeline(id) => { - if let Some(timeline) = columns.find_timeline(*id) { - timeline.kind.to_title() - } else { - ColumnTitle::simple("Unknown") - } - } - TimelineRoute::Thread(_id) => ColumnTitle::simple("Thread"), - TimelineRoute::Reply(_id) => ColumnTitle::simple("Reply"), - TimelineRoute::Quote(_id) => ColumnTitle::simple("Quote"), - TimelineRoute::Profile(_pubkey) => ColumnTitle::simple("Profile"), - }, + Route::Timeline(kind) => kind.to_title(), + + Route::Reply(_id) => ColumnTitle::simple("Reply"), + Route::Quote(_id) => ColumnTitle::simple("Quote"), Route::Relays => ColumnTitle::simple("Relays"), @@ -292,14 +314,22 @@ impl Router { impl fmt::Display for Route { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Route::Timeline(tlr) => match tlr { - TimelineRoute::Timeline(name) => write!(f, "{}", name), - TimelineRoute::Thread(_id) => write!(f, "Thread"), - TimelineRoute::Profile(_id) => write!(f, "Profile"), - TimelineRoute::Reply(_id) => write!(f, "Reply"), - TimelineRoute::Quote(_id) => write!(f, "Quote"), + Route::Timeline(kind) => match kind { + TimelineKind::List(ListKind::Contact(_pk)) => write!(f, "Contacts"), + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(_))) => { + write!(f, "Last Per Pubkey (Contact)") + } + TimelineKind::Notifications(_) => write!(f, "Notifications"), + TimelineKind::Universe => write!(f, "Universe"), + TimelineKind::Generic(_) => write!(f, "Custom"), + TimelineKind::Hashtag(ht) => write!(f, "Hashtag ({})", ht), + TimelineKind::Thread(_id) => write!(f, "Thread"), + TimelineKind::Profile(_id) => write!(f, "Profile"), }, + Route::Reply(_id) => write!(f, "Reply"), + Route::Quote(_id) => write!(f, "Quote"), + Route::Relays => write!(f, "Relays"), Route::Accounts(amr) => match amr { diff --git a/crates/notedeck_columns/src/storage/decks.rs b/crates/notedeck_columns/src/storage/decks.rs index 8033814c..e6cb948d 100644 --- a/crates/notedeck_columns/src/storage/decks.rs +++ b/crates/notedeck_columns/src/storage/decks.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, fmt, str::FromStr}; use enostr::Pubkey; -use nostrdb::Ndb; +use nostrdb::{Ndb, Transaction}; use serde::{Deserialize, Serialize}; use tracing::{error, info}; @@ -9,16 +9,20 @@ use crate::{ column::{Columns, IntermediaryRoute}, decks::{Deck, Decks, DecksCache}, route::Route, - timeline::TimelineKind, + timeline::{TimelineCache, TimelineKind}, Error, }; use notedeck::{storage, DataPath, DataPathType, Directory}; -use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; +use tokenator::{ParseError, TokenParser, TokenWriter}; pub static DECKS_CACHE_FILE: &str = "decks_cache.json"; -pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option { +pub fn load_decks_cache( + path: &DataPath, + ndb: &Ndb, + timeline_cache: &mut TimelineCache, +) -> Option { let data_path = path.path(DataPathType::Setting); let decks_cache_str = match Directory::new(data_path).get_file(DECKS_CACHE_FILE.to_owned()) { @@ -35,7 +39,9 @@ pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option { let serializable_decks_cache = serde_json::from_str::(&decks_cache_str).ok()?; - serializable_decks_cache.decks_cache(ndb).ok() + serializable_decks_cache + .decks_cache(ndb, timeline_cache) + .ok() } pub fn save_decks_cache(path: &DataPath, decks_cache: &DecksCache) { @@ -81,14 +87,17 @@ impl SerializableDecksCache { } } - pub fn decks_cache(self, ndb: &Ndb) -> Result { + pub fn decks_cache( + self, + ndb: &Ndb, + timeline_cache: &mut TimelineCache, + ) -> Result { let account_to_decks = self .decks_cache .into_iter() .map(|(pubkey, serializable_decks)| { - let deck_key = pubkey.bytes(); serializable_decks - .decks(ndb, deck_key) + .decks(ndb, timeline_cache, &pubkey) .map(|decks| (pubkey, decks)) }) .collect::, Error>>()?; @@ -142,12 +151,17 @@ impl SerializableDecks { } } - fn decks(self, ndb: &Ndb, deck_key: &[u8; 32]) -> Result { + fn decks( + self, + ndb: &Ndb, + timeline_cache: &mut TimelineCache, + deck_key: &Pubkey, + ) -> Result { Ok(Decks::from_decks( self.active_deck, self.decks .into_iter() - .map(|d| d.deck(ndb, deck_key)) + .map(|d| d.deck(ndb, timeline_cache, deck_key)) .collect::>()?, )) } @@ -252,8 +266,13 @@ impl SerializableDeck { SerializableDeck { metadata, columns } } - pub fn deck(self, ndb: &Ndb, deck_user: &[u8; 32]) -> Result { - let columns = deserialize_columns(ndb, deck_user, self.columns); + pub fn deck( + self, + ndb: &Ndb, + timeline_cache: &mut TimelineCache, + deck_user: &Pubkey, + ) -> Result { + let columns = deserialize_columns(ndb, timeline_cache, deck_user, self.columns); let deserialized_metadata = deserialize_metadata(self.metadata) .ok_or(Error::Generic("Could not deserialize metadata".to_owned()))?; @@ -292,7 +311,12 @@ fn serialize_columns(columns: &Columns) -> Vec> { cols_serialized } -fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec>) -> Columns { +fn deserialize_columns( + ndb: &Ndb, + timeline_cache: &mut TimelineCache, + deck_user: &Pubkey, + columns: Vec>, +) -> Columns { let mut cols = Columns::new(); for column in columns { let mut cur_routes = Vec::new(); @@ -301,11 +325,9 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec let tokens: Vec<&str> = route.split(":").collect(); let mut parser = TokenParser::new(&tokens); - match CleanIntermediaryRoute::parse_from_tokens(&mut parser) { + match CleanIntermediaryRoute::parse(&mut parser, deck_user) { Ok(route_intermediary) => { - if let Some(ir) = - route_intermediary.into_intermediary_route(ndb, Some(deck_user)) - { + if let Some(ir) = route_intermediary.into_intermediary_route(ndb) { cur_routes.push(ir); } } @@ -316,7 +338,7 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec } if !cur_routes.is_empty() { - cols.insert_intermediary_routes(cur_routes); + cols.insert_intermediary_routes(timeline_cache, cur_routes); } } @@ -329,48 +351,38 @@ enum CleanIntermediaryRoute { } impl CleanIntermediaryRoute { - fn into_intermediary_route( - self, - ndb: &Ndb, - user: Option<&[u8; 32]>, - ) -> Option { + fn into_intermediary_route(self, ndb: &Ndb) -> Option { match self { - CleanIntermediaryRoute::ToTimeline(timeline_kind) => Some(IntermediaryRoute::Timeline( - timeline_kind.into_timeline(ndb, user)?, - )), + CleanIntermediaryRoute::ToTimeline(timeline_kind) => { + let txn = Transaction::new(ndb).unwrap(); + Some(IntermediaryRoute::Timeline( + timeline_kind.into_timeline(&txn, ndb)?, + )) + } CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)), } } -} -impl TokenSerializable for CleanIntermediaryRoute { - fn serialize_tokens(&self, writer: &mut TokenWriter) { - match self { - CleanIntermediaryRoute::ToTimeline(tlk) => { - tlk.serialize_tokens(writer); - } - CleanIntermediaryRoute::ToRoute(route) => { - route.serialize_tokens(writer); - } + fn parse<'a>( + parser: &mut TokenParser<'a>, + deck_author: &Pubkey, + ) -> Result> { + let timeline = parser.try_parse(|p| { + Ok(CleanIntermediaryRoute::ToTimeline(TimelineKind::parse( + p, + deck_author, + )?)) + }); + if timeline.is_ok() { + return timeline; } - } - fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { - TokenParser::alt( - parser, - &[ - |p| { - Ok(CleanIntermediaryRoute::ToTimeline( - TimelineKind::parse_from_tokens(p)?, - )) - }, - |p| { - Ok(CleanIntermediaryRoute::ToRoute(Route::parse_from_tokens( - p, - )?)) - }, - ], - ) + parser.try_parse(|p| { + Ok(CleanIntermediaryRoute::ToRoute(Route::parse( + p, + deck_author, + )?)) + }) } } diff --git a/crates/notedeck_columns/src/storage/migration.rs b/crates/notedeck_columns/src/storage/migration.rs deleted file mode 100644 index 65e41e50..00000000 --- a/crates/notedeck_columns/src/storage/migration.rs +++ /dev/null @@ -1,697 +0,0 @@ -use enostr::{NoteId, Pubkey}; -use nostrdb::Ndb; -use serde::{Deserialize, Deserializer}; -use tracing::error; - -use crate::{ - accounts::AccountsRoute, - column::{Columns, IntermediaryRoute}, - route::Route, - timeline::{kind::ListKind, PubkeySource, Timeline, TimelineId, TimelineKind, TimelineRoute}, - ui::add_column::AddColumnRoute, - Result, -}; - -use notedeck::{DataPath, DataPathType, Directory}; - -pub static COLUMNS_FILE: &str = "columns.json"; - -fn columns_json(path: &DataPath) -> Option { - let data_path = path.path(DataPathType::Setting); - Directory::new(data_path) - .get_file(COLUMNS_FILE.to_string()) - .ok() -} - -#[derive(Deserialize, Debug, PartialEq)] -enum MigrationTimelineRoute { - Timeline(u32), - Thread(String), - Profile(String), - Reply(String), - Quote(String), -} - -impl MigrationTimelineRoute { - fn timeline_route(self) -> Option { - match self { - MigrationTimelineRoute::Timeline(id) => { - Some(TimelineRoute::Timeline(TimelineId::new(id))) - } - MigrationTimelineRoute::Thread(note_id_hex) => { - Some(TimelineRoute::Thread(NoteId::from_hex(¬e_id_hex).ok()?)) - } - MigrationTimelineRoute::Profile(pubkey_hex) => { - Some(TimelineRoute::Profile(Pubkey::from_hex(&pubkey_hex).ok()?)) - } - MigrationTimelineRoute::Reply(note_id_hex) => { - Some(TimelineRoute::Reply(NoteId::from_hex(¬e_id_hex).ok()?)) - } - MigrationTimelineRoute::Quote(note_id_hex) => { - Some(TimelineRoute::Quote(NoteId::from_hex(¬e_id_hex).ok()?)) - } - } - } -} - -#[derive(Deserialize, Debug, PartialEq)] -enum MigrationRoute { - Timeline(MigrationTimelineRoute), - Accounts(MigrationAccountsRoute), - Relays, - ComposeNote, - AddColumn(MigrationAddColumnRoute), - Support, -} - -impl MigrationRoute { - fn route(self) -> Option { - match self { - MigrationRoute::Timeline(migration_timeline_route) => { - Some(Route::Timeline(migration_timeline_route.timeline_route()?)) - } - MigrationRoute::Accounts(migration_accounts_route) => { - Some(Route::Accounts(migration_accounts_route.accounts_route())) - } - MigrationRoute::Relays => Some(Route::Relays), - MigrationRoute::ComposeNote => Some(Route::ComposeNote), - MigrationRoute::AddColumn(migration_add_column_route) => Some(Route::AddColumn( - migration_add_column_route.add_column_route(), - )), - MigrationRoute::Support => Some(Route::Support), - } - } -} - -#[derive(Deserialize, Debug, PartialEq)] -enum MigrationAccountsRoute { - Accounts, - AddAccount, -} - -impl MigrationAccountsRoute { - fn accounts_route(self) -> AccountsRoute { - match self { - MigrationAccountsRoute::Accounts => AccountsRoute::Accounts, - MigrationAccountsRoute::AddAccount => AccountsRoute::AddAccount, - } - } -} - -#[derive(Deserialize, Debug, PartialEq)] -enum MigrationAddColumnRoute { - Base, - UndecidedNotification, - ExternalNotification, - Hashtag, -} - -impl MigrationAddColumnRoute { - fn add_column_route(self) -> AddColumnRoute { - match self { - MigrationAddColumnRoute::Base => AddColumnRoute::Base, - MigrationAddColumnRoute::UndecidedNotification => AddColumnRoute::UndecidedNotification, - MigrationAddColumnRoute::ExternalNotification => AddColumnRoute::ExternalNotification, - MigrationAddColumnRoute::Hashtag => AddColumnRoute::Hashtag, - } - } -} - -#[derive(Debug, PartialEq)] -struct MigrationColumn { - routes: Vec, -} - -impl<'de> Deserialize<'de> for MigrationColumn { - fn deserialize(deserializer: D) -> std::result::Result - where - D: Deserializer<'de>, - { - let routes = Vec::::deserialize(deserializer)?; - - Ok(MigrationColumn { routes }) - } -} - -#[derive(Deserialize, Debug)] -struct MigrationColumns { - columns: Vec, - timelines: Vec, -} - -#[derive(Deserialize, Debug, Clone, PartialEq)] -struct MigrationTimeline { - id: u32, - kind: MigrationTimelineKind, -} - -impl MigrationTimeline { - fn into_timeline(self, ndb: &Ndb, deck_user_pubkey: Option<&[u8; 32]>) -> Option { - self.kind - .into_timeline_kind()? - .into_timeline(ndb, deck_user_pubkey) - } -} - -#[derive(Deserialize, Clone, Debug, PartialEq)] -enum MigrationListKind { - Contact(MigrationPubkeySource), -} - -impl MigrationListKind { - fn list_kind(self) -> Option { - match self { - MigrationListKind::Contact(migration_pubkey_source) => { - Some(ListKind::Contact(migration_pubkey_source.pubkey_source()?)) - } - } - } -} - -#[derive(Deserialize, Clone, Debug, PartialEq)] -enum MigrationPubkeySource { - Explicit(String), - DeckAuthor, -} - -impl MigrationPubkeySource { - fn pubkey_source(self) -> Option { - match self { - MigrationPubkeySource::Explicit(hex) => { - Some(PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?)) - } - MigrationPubkeySource::DeckAuthor => Some(PubkeySource::DeckAuthor), - } - } -} - -#[derive(Deserialize, Clone, Debug, PartialEq)] -enum MigrationTimelineKind { - List(MigrationListKind), - Notifications(MigrationPubkeySource), - Profile(MigrationPubkeySource), - Universe, - Generic, - Hashtag(String), -} - -impl MigrationTimelineKind { - fn into_timeline_kind(self) -> Option { - match self { - MigrationTimelineKind::List(migration_list_kind) => { - Some(TimelineKind::List(migration_list_kind.list_kind()?)) - } - MigrationTimelineKind::Notifications(migration_pubkey_source) => Some( - TimelineKind::Notifications(migration_pubkey_source.pubkey_source()?), - ), - MigrationTimelineKind::Profile(migration_pubkey_source) => Some(TimelineKind::Profile( - migration_pubkey_source.pubkey_source()?, - )), - MigrationTimelineKind::Universe => Some(TimelineKind::Universe), - MigrationTimelineKind::Generic => Some(TimelineKind::Generic), - MigrationTimelineKind::Hashtag(hashtag) => Some(TimelineKind::Hashtag(hashtag)), - } - } -} - -impl MigrationColumns { - fn into_columns(self, ndb: &Ndb, deck_pubkey: Option<&[u8; 32]>) -> Columns { - let mut columns = Columns::default(); - - for column in self.columns { - let mut cur_routes = Vec::new(); - for route in column.routes { - match route { - MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(timeline_id)) => { - if let Some(migration_tl) = - self.timelines.iter().find(|tl| tl.id == timeline_id) - { - let tl = migration_tl.clone().into_timeline(ndb, deck_pubkey); - if let Some(tl) = tl { - cur_routes.push(IntermediaryRoute::Timeline(tl)); - } else { - error!("Problem deserializing timeline {:?}", migration_tl); - } - } - } - MigrationRoute::Timeline(MigrationTimelineRoute::Thread(_thread)) => {} - _ => { - if let Some(route) = route.route() { - cur_routes.push(IntermediaryRoute::Route(route)); - } - } - } - } - if !cur_routes.is_empty() { - columns.insert_intermediary_routes(cur_routes); - } - } - columns - } -} - -fn string_to_columns( - serialized_columns: String, - ndb: &Ndb, - user: Option<&[u8; 32]>, -) -> Option { - Some( - deserialize_columns_string(serialized_columns) - .ok()? - .into_columns(ndb, user), - ) -} - -pub fn deserialize_columns(path: &DataPath, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option { - string_to_columns(columns_json(path)?, ndb, user) -} - -fn deserialize_columns_string(serialized_columns: String) -> Result { - Ok( - serde_json::from_str::(&serialized_columns) - .map_err(notedeck::Error::Json)?, - ) -} - -#[cfg(test)] -mod tests { - use crate::storage::migration::{ - MigrationColumn, MigrationListKind, MigrationPubkeySource, MigrationRoute, - MigrationTimeline, MigrationTimelineKind, MigrationTimelineRoute, - }; - - impl MigrationColumn { - fn from_route(route: MigrationRoute) -> Self { - Self { - routes: vec![route], - } - } - - fn from_routes(routes: Vec) -> Self { - Self { routes } - } - } - - impl MigrationTimeline { - fn new(id: u32, kind: MigrationTimelineKind) -> Self { - Self { id, kind } - } - } - - use super::*; - - #[test] - fn multi_column() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}],[{"Timeline":{"Timeline":0}}],[{"Timeline":{"Timeline":1}}]],"timelines":[{"id":0,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}},{"id":1,"kind":{"Hashtag":"introductions"}},{"id":2,"kind":"Universe"}]}"#; // Multi-column - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - - assert_eq!(migration_cols.columns.len(), 3); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(2) - )) - ); - - assert_eq!( - *migration_cols.columns.get(1).unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(0) - )) - ); - - assert_eq!( - *migration_cols.columns.get(2).unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(1) - )) - ); - - assert_eq!(migration_cols.timelines.len(), 3); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new( - 0, - MigrationTimelineKind::List(MigrationListKind::Contact( - MigrationPubkeySource::Explicit( - "aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe" - .to_owned() - ) - )) - ) - ); - assert_eq!( - *migration_cols.timelines.get(1).unwrap(), - MigrationTimeline::new( - 1, - MigrationTimelineKind::Hashtag("introductions".to_owned()) - ) - ); - - assert_eq!( - *migration_cols.timelines.get(2).unwrap(), - MigrationTimeline::new(2, MigrationTimelineKind::Universe) - ) - } - - #[test] - fn base() { - let route = r#"{"columns":[[{"AddColumn":"Base"}]],"timelines":[]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_route(MigrationRoute::AddColumn(MigrationAddColumnRoute::Base)) - ); - - assert!(migration_cols.timelines.is_empty()); - } - - #[test] - fn universe() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":0}}]],"timelines":[{"id":0,"kind":"Universe"}]}"#; - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(0) - )) - ); - - assert_eq!(migration_cols.timelines.len(), 1); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new(0, MigrationTimelineKind::Universe) - ) - } - - #[test] - fn home() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}]],"timelines":[{"id":2,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}}]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(2) - )) - ); - - assert_eq!(migration_cols.timelines.len(), 1); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new( - 2, - MigrationTimelineKind::List(MigrationListKind::Contact( - MigrationPubkeySource::Explicit( - "aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe" - .to_owned() - ) - )) - ) - ) - } - - #[test] - fn thread() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Thread":"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),), - MigrationRoute::Timeline(MigrationTimelineRoute::Thread( - "fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25".to_owned() - )), - ]) - ); - - assert_eq!(migration_cols.timelines.len(), 1); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new( - 7, - MigrationTimelineKind::List(MigrationListKind::Contact( - MigrationPubkeySource::Explicit( - "4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967" - .to_owned() - ) - )) - ) - ) - } - - #[test] - fn profile() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Profile":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),), - MigrationRoute::Timeline(MigrationTimelineRoute::Profile( - "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned() - )), - ]) - ); - - assert_eq!(migration_cols.timelines.len(), 1); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new( - 7, - MigrationTimelineKind::List(MigrationListKind::Contact( - MigrationPubkeySource::Explicit( - "4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967" - .to_owned() - ) - )) - ) - ) - } - - #[test] - fn your_notifs() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":5}}]],"timelines":[{"id":5,"kind":{"Notifications":"DeckAuthor"}}]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(5) - )) - ); - - assert_eq!(migration_cols.timelines.len(), 1); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new( - 5, - MigrationTimelineKind::Notifications(MigrationPubkeySource::DeckAuthor) - ) - ) - } - - #[test] - fn undecided_notifs() { - let route = r#"{"columns":[[{"AddColumn":"Base"},{"AddColumn":"UndecidedNotification"}]],"timelines":[]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::AddColumn(MigrationAddColumnRoute::Base), - MigrationRoute::AddColumn(MigrationAddColumnRoute::UndecidedNotification), - ]) - ); - - assert!(migration_cols.timelines.is_empty()); - } - - #[test] - fn extern_notifs() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":4}}]],"timelines":[{"id":4,"kind":{"Notifications":{"Explicit":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}}]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(4) - )) - ); - - assert_eq!(migration_cols.timelines.len(), 1); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new( - 4, - MigrationTimelineKind::Notifications(MigrationPubkeySource::Explicit( - "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned() - )) - ) - ) - } - - #[test] - fn hashtag() { - let route = r#"{"columns":[[{"Timeline":{"Timeline":6}}]],"timelines":[{"id":6,"kind":{"Hashtag":"notedeck"}}]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_route(MigrationRoute::Timeline( - MigrationTimelineRoute::Timeline(6) - )) - ); - - assert_eq!(migration_cols.timelines.len(), 1); - assert_eq!( - *migration_cols.timelines.first().unwrap(), - MigrationTimeline::new(6, MigrationTimelineKind::Hashtag("notedeck".to_owned())) - ) - } - - #[test] - fn support() { - let route = r#"{"columns":[[{"AddColumn":"Base"},"Support"]],"timelines":[]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::AddColumn(MigrationAddColumnRoute::Base), - MigrationRoute::Support - ]) - ); - - assert!(migration_cols.timelines.is_empty()); - } - - #[test] - fn post() { - let route = r#"{"columns":[[{"AddColumn":"Base"},"ComposeNote"]],"timelines":[]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::AddColumn(MigrationAddColumnRoute::Base), - MigrationRoute::ComposeNote - ]) - ); - - assert!(migration_cols.timelines.is_empty()); - } - - #[test] - fn relay() { - let route = r#"{"columns":[[{"AddColumn":"Base"},"Relays"]],"timelines":[]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::AddColumn(MigrationAddColumnRoute::Base), - MigrationRoute::Relays - ]) - ); - - assert!(migration_cols.timelines.is_empty()); - } - - #[test] - fn accounts() { - let route = - r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"}]],"timelines":[]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::AddColumn(MigrationAddColumnRoute::Base), - MigrationRoute::Accounts(MigrationAccountsRoute::Accounts), - ]) - ); - - assert!(migration_cols.timelines.is_empty()); - } - - #[test] - fn login() { - let route = r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"},{"Accounts":"AddAccount"}]],"timelines":[]}"#; - - let deserialized_columns = deserialize_columns_string(route.to_string()); - assert!(deserialized_columns.is_ok()); - - let migration_cols = deserialized_columns.unwrap(); - assert_eq!(migration_cols.columns.len(), 1); - assert_eq!( - *migration_cols.columns.first().unwrap(), - MigrationColumn::from_routes(vec![ - MigrationRoute::AddColumn(MigrationAddColumnRoute::Base), - MigrationRoute::Accounts(MigrationAccountsRoute::Accounts), - MigrationRoute::Accounts(MigrationAccountsRoute::AddAccount), - ]) - ); - - assert!(migration_cols.timelines.is_empty()); - } -} diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index cda44eeb..58cc2e7f 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -1,5 +1,3 @@ mod decks; -mod migration; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; -pub use migration::{deserialize_columns, COLUMNS_FILE}; diff --git a/crates/notedeck_columns/src/subscriptions.rs b/crates/notedeck_columns/src/subscriptions.rs index 5396c17f..5abbb7fb 100644 --- a/crates/notedeck_columns/src/subscriptions.rs +++ b/crates/notedeck_columns/src/subscriptions.rs @@ -1,4 +1,4 @@ -use crate::timeline::{TimelineId, TimelineKind}; +use crate::timeline::TimelineKind; use std::collections::HashMap; use uuid::Uuid; @@ -16,7 +16,7 @@ pub enum SubKind { /// We are fetching a contact list so that we can use it for our follows /// Filter. // TODO: generalize this to any list? - FetchingContactList(TimelineId), + FetchingContactList(TimelineKind), } /// Subscriptions that need to be tracked at various stages. Sometimes we diff --git a/crates/notedeck_columns/src/thread.rs b/crates/notedeck_columns/src/thread.rs deleted file mode 100644 index 868314f5..00000000 --- a/crates/notedeck_columns/src/thread.rs +++ /dev/null @@ -1,27 +0,0 @@ -use crate::{multi_subscriber::MultiSubscriber, timeline::Timeline}; - -use nostrdb::FilterBuilder; -use notedeck::{RootNoteId, RootNoteIdBuf}; - -pub struct Thread { - pub timeline: Timeline, - pub subscription: Option, -} - -impl Thread { - pub fn new(root_id: RootNoteIdBuf) -> Self { - let timeline = Timeline::thread(root_id); - - Thread { - timeline, - subscription: None, - } - } - - pub fn filters_raw(root_id: RootNoteId<'_>) -> Vec { - vec![ - nostrdb::Filter::new().kinds([1]).event(root_id.bytes()), - nostrdb::Filter::new().ids([root_id.bytes()]).limit(1), - ] - } -} diff --git a/crates/notedeck_columns/src/timeline/cache.rs b/crates/notedeck_columns/src/timeline/cache.rs index e58d6f84..ca77e147 100644 --- a/crates/notedeck_columns/src/timeline/cache.rs +++ b/crates/notedeck_columns/src/timeline/cache.rs @@ -1,23 +1,21 @@ use crate::{ actionbar::TimelineOpenResult, + error::Error, multi_subscriber::MultiSubscriber, - profile::Profile, - thread::Thread, //subscriptions::SubRefs, - timeline::{PubkeySource, Timeline}, + timeline::{Timeline, TimelineKind}, }; -use notedeck::{NoteCache, NoteRef, RootNoteId, RootNoteIdBuf}; +use notedeck::{filter, FilterState, NoteCache, NoteRef}; -use enostr::{Pubkey, PubkeyRef, RelayPool}; -use nostrdb::{Filter, FilterBuilder, Ndb, Transaction}; +use enostr::RelayPool; +use nostrdb::{Filter, Ndb, Transaction}; use std::collections::HashMap; -use tracing::{debug, info, warn}; +use tracing::{debug, error, info, warn}; #[derive(Default)] pub struct TimelineCache { - pub threads: HashMap, - pub profiles: HashMap, + pub timelines: HashMap, } pub enum Vitality<'a, M> { @@ -41,102 +39,64 @@ impl<'a, M> Vitality<'a, M> { } } -#[derive(Hash, Debug, Copy, Clone)] -pub enum TimelineCacheKey<'a> { - Profile(PubkeyRef<'a>), - Thread(RootNoteId<'a>), -} - -impl<'a> TimelineCacheKey<'a> { - pub fn profile(pubkey: PubkeyRef<'a>) -> Self { - Self::Profile(pubkey) - } - - pub fn thread(root_id: RootNoteId<'a>) -> Self { - Self::Thread(root_id) - } - - pub fn bytes(&self) -> &[u8; 32] { - match self { - Self::Profile(pk) => pk.bytes(), - Self::Thread(root_id) => root_id.bytes(), - } - } - - /// The filters used to update our timeline cache - pub fn filters_raw(&self) -> Vec { - match self { - TimelineCacheKey::Thread(root_id) => Thread::filters_raw(*root_id), - - TimelineCacheKey::Profile(pubkey) => vec![Filter::new() - .authors([pubkey.bytes()]) - .kinds([1]) - .limit(notedeck::filter::default_limit())], - } - } - - pub fn filters_since(&self, since: u64) -> Vec { - self.filters_raw() - .into_iter() - .map(|fb| fb.since(since).build()) - .collect() - } - - pub fn filters(&self) -> Vec { - self.filters_raw() - .into_iter() - .map(|mut fb| fb.build()) - .collect() - } -} - impl TimelineCache { - fn contains_key(&self, key: TimelineCacheKey<'_>) -> bool { - match key { - TimelineCacheKey::Profile(pubkey) => self.profiles.contains_key(pubkey.bytes()), - TimelineCacheKey::Thread(root_id) => self.threads.contains_key(root_id.bytes()), + /// Pop a timeline from the timeline cache. This only removes the timeline + /// if it has reached 0 subscribers, meaning it was the last one to be + /// removed + pub fn pop( + &mut self, + id: &TimelineKind, + ndb: &mut Ndb, + pool: &mut RelayPool, + ) -> Result<(), Error> { + let timeline = if let Some(timeline) = self.timelines.get_mut(id) { + timeline + } else { + return Err(Error::TimelineNotFound); + }; + + if let Some(sub) = &mut timeline.subscription { + // if this is the last subscriber, remove the timeline from cache + if sub.unsubscribe(ndb, pool) { + debug!( + "popped last timeline {:?}, removing from timeline cache", + id + ); + self.timelines.remove(id); + } + + Ok(()) + } else { + Err(Error::MissingSubscription) } } - fn get_expected_mut(&mut self, key: TimelineCacheKey<'_>) -> &mut Timeline { - match key { - TimelineCacheKey::Profile(pubkey) => self - .profiles - .get_mut(pubkey.bytes()) - .map(|p| &mut p.timeline), - TimelineCacheKey::Thread(root_id) => self - .threads - .get_mut(root_id.bytes()) - .map(|t| &mut t.timeline), - } - .expect("expected notes in timline cache") + fn get_expected_mut(&mut self, key: &TimelineKind) -> &mut Timeline { + self.timelines + .get_mut(key) + .expect("expected notes in timline cache") } - /// Insert a new profile or thread into the cache, based on the TimelineCacheKey + /// Insert a new timeline into the cache, based on the TimelineKind #[allow(clippy::too_many_arguments)] fn insert_new( &mut self, - id: TimelineCacheKey<'_>, + id: TimelineKind, txn: &Transaction, ndb: &Ndb, notes: &[NoteRef], note_cache: &mut NoteCache, - filters: Vec, ) { - match id { - TimelineCacheKey::Profile(pubkey) => { - let mut profile = Profile::new(PubkeySource::Explicit(pubkey.to_owned()), filters); - // insert initial notes into timeline - profile.timeline.insert_new(txn, ndb, note_cache, notes); - self.profiles.insert(pubkey.to_owned(), profile); - } + let mut timeline = if let Some(timeline) = id.clone().into_timeline(txn, ndb) { + timeline + } else { + error!("Error creating timeline from {:?}", &id); + return; + }; - TimelineCacheKey::Thread(root_id) => { - let mut thread = Thread::new(root_id.to_owned()); - thread.timeline.insert_new(txn, ndb, note_cache, notes); - self.threads.insert(root_id.to_owned(), thread); - } - } + // insert initial notes into timeline + timeline.insert_new(txn, ndb, note_cache, notes); + self.timelines.insert(id, timeline); } /// Get and/or update the notes associated with this timeline @@ -145,24 +105,28 @@ impl TimelineCache { ndb: &Ndb, note_cache: &mut NoteCache, txn: &Transaction, - id: TimelineCacheKey<'a>, + id: &TimelineKind, ) -> Vitality<'a, Timeline> { // we can't use the naive hashmap entry API here because lookups // require a copy, wait until we have a raw entry api. We could // also use hashbrown? - if self.contains_key(id) { + if self.timelines.contains_key(id) { return Vitality::Stale(self.get_expected_mut(id)); } - let filters = id.filters(); - let notes = if let Ok(results) = ndb.query(txn, &filters, 1000) { - results - .into_iter() - .map(NoteRef::from_query_result) - .collect() + let notes = if let FilterState::Ready(filters) = id.filters(txn, ndb) { + if let Ok(results) = ndb.query(txn, &filters, 1000) { + results + .into_iter() + .map(NoteRef::from_query_result) + .collect() + } else { + debug!("got no results from TimelineCache lookup for {:?}", id); + vec![] + } } else { - debug!("got no results from TimelineCache lookup for {:?}", id); + // filter is not ready yet vec![] }; @@ -172,44 +136,37 @@ impl TimelineCache { info!("found NotesHolder with {} notes", notes.len()); } - self.insert_new(id, txn, ndb, ¬es, note_cache, filters); + self.insert_new(id.to_owned(), txn, ndb, ¬es, note_cache); Vitality::Fresh(self.get_expected_mut(id)) } - pub fn subscription( - &mut self, - id: TimelineCacheKey<'_>, - ) -> Option<&mut Option> { - match id { - TimelineCacheKey::Profile(pubkey) => self - .profiles - .get_mut(pubkey.bytes()) - .map(|p| &mut p.subscription), - TimelineCacheKey::Thread(root_id) => self - .threads - .get_mut(root_id.bytes()) - .map(|t| &mut t.subscription), - } - } - - pub fn open<'a>( + /// Open a timeline, this is another way of saying insert a timeline + /// into the timeline cache. If there exists a timeline already, we + /// bump its subscription reference count. If it's new we start a new + /// subscription + pub fn open( &mut self, ndb: &Ndb, note_cache: &mut NoteCache, txn: &Transaction, pool: &mut RelayPool, - id: TimelineCacheKey<'a>, - ) -> Option> { - let result = match self.notes(ndb, note_cache, txn, id) { + id: &TimelineKind, + ) -> Option { + let (open_result, timeline) = match self.notes(ndb, note_cache, txn, id) { Vitality::Stale(timeline) => { // The timeline cache is stale, let's update it - let notes = find_new_notes(timeline.all_or_any_notes(), id, txn, ndb); - let cached_timeline_result = if notes.is_empty() { + let notes = find_new_notes( + timeline.all_or_any_notes(), + timeline.subscription.as_ref().map(|s| &s.filters)?, + txn, + ndb, + ); + let open_result = if notes.is_empty() { None } else { let new_notes = notes.iter().map(|n| n.key).collect(); - Some(TimelineOpenResult::new_notes(new_notes, id)) + Some(TimelineOpenResult::new_notes(new_notes, id.clone())) }; // we can't insert and update the VirtualList now, because we @@ -217,42 +174,36 @@ impl TimelineCache { // result instead // // holder.get_view().insert(¬es); <-- no - cached_timeline_result + (open_result, timeline) } - Vitality::Fresh(_timeline) => None, + Vitality::Fresh(timeline) => (None, timeline), }; - let sub_id = if let Some(sub) = self.subscription(id) { - if let Some(multi_subscriber) = sub { - multi_subscriber.subscribe(ndb, pool); - multi_subscriber.sub.as_ref().map(|s| s.local) - } else { - let mut multi_sub = MultiSubscriber::new(id.filters()); - multi_sub.subscribe(ndb, pool); - let sub_id = multi_sub.sub.as_ref().map(|s| s.local); - *sub = Some(multi_sub); - sub_id - } + if let Some(multi_sub) = &mut timeline.subscription { + debug!("got open with *old* subscription for {:?}", &timeline.kind); + multi_sub.subscribe(ndb, pool); + } else if let Some(filter) = timeline.filter.get_any_ready() { + debug!("got open with *new* subscription for {:?}", &timeline.kind); + let mut multi_sub = MultiSubscriber::new(filter.clone()); + multi_sub.subscribe(ndb, pool); + timeline.subscription = Some(multi_sub); } else { - None + // This should never happen reasoning, self.notes would have + // failed above if the filter wasn't ready + error!( + "open: filter not ready, so could not setup subscription. this should never happen" + ); }; - let timeline = self.get_expected_mut(id); - if let Some(sub_id) = sub_id { - timeline.subscription = Some(sub_id); - } - - // TODO: We have subscription ids tracked in different places. Fix this - - result + open_result } } /// Look for new thread notes since our last fetch fn find_new_notes( notes: &[NoteRef], - id: TimelineCacheKey<'_>, + filters: &[Filter], txn: &Transaction, ndb: &Ndb, ) -> Vec { @@ -261,7 +212,7 @@ fn find_new_notes( } let last_note = notes[0]; - let filters = id.filters_since(last_note.created_at + 1); + let filters = filter::make_filters_since(filters, last_note.created_at + 1); if let Ok(results) = ndb.query(txn, &filters, 1000) { debug!("got {} results from NotesHolder update", results.len()); diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index eacc1d68..189d1134 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -1,23 +1,35 @@ use crate::error::Error; use crate::timeline::{Timeline, TimelineTab}; -use enostr::{Filter, Pubkey}; +use enostr::{Filter, NoteId, Pubkey}; use nostrdb::{Ndb, Transaction}; -use notedeck::{filter::default_limit, FilterError, FilterState, RootNoteIdBuf}; +use notedeck::{ + filter::{self, default_limit}, + FilterError, FilterState, NoteCache, RootIdError, RootNoteIdBuf, +}; use serde::{Deserialize, Serialize}; +use std::hash::{Hash, Hasher}; use std::{borrow::Cow, fmt::Display}; use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; use tracing::{error, warn}; -#[derive(Clone, Default, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Hash, Copy, Default, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum PubkeySource { Explicit(Pubkey), #[default] DeckAuthor, } -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)] pub enum ListKind { - Contact(PubkeySource), + Contact(Pubkey), +} + +impl ListKind { + pub fn pubkey(&self) -> Option<&Pubkey> { + match self { + Self::Contact(pk) => Some(pk), + } + } } impl PubkeySource { @@ -31,13 +43,6 @@ impl PubkeySource { PubkeySource::DeckAuthor => deck_author, } } - - pub fn to_pubkey_bytes<'a>(&'a self, deck_author: &'a [u8; 32]) -> &'a [u8; 32] { - match self { - PubkeySource::Explicit(pk) => pk.bytes(), - PubkeySource::DeckAuthor => deck_author, - } - } } impl TokenSerializable for PubkeySource { @@ -77,32 +82,18 @@ impl TokenSerializable for PubkeySource { } impl ListKind { - pub fn contact_list(pk_src: PubkeySource) -> Self { - ListKind::Contact(pk_src) + pub fn contact_list(pk: Pubkey) -> Self { + ListKind::Contact(pk) } - pub fn pubkey_source(&self) -> Option<&PubkeySource> { - match self { - ListKind::Contact(pk_src) => Some(pk_src), - } - } -} - -impl TokenSerializable for ListKind { - fn serialize_tokens(&self, writer: &mut TokenWriter) { - match self { - ListKind::Contact(pk_src) => { - writer.write_token("contact"); - pk_src.serialize_tokens(writer); - } - } - } - - fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { + pub fn parse<'a>( + parser: &mut TokenParser<'a>, + deck_author: &Pubkey, + ) -> Result> { parser.parse_all(|p| { p.parse_token("contact")?; let pk_src = PubkeySource::parse_from_tokens(p)?; - Ok(ListKind::Contact(pk_src)) + Ok(ListKind::Contact(*pk_src.to_pubkey(deck_author))) }) /* here for u when you need more things to parse @@ -120,8 +111,80 @@ impl TokenSerializable for ListKind { ) */ } + + pub fn serialize_tokens(&self, writer: &mut TokenWriter) { + match self { + ListKind::Contact(pk) => { + writer.write_token("contact"); + PubkeySource::pubkey(*pk).serialize_tokens(writer); + } + } + } } +/// Thread selection hashing is done in a specific way. For TimelineCache +/// lookups, we want to only let the root_id influence thread selection. +/// This way Thread TimelineKinds always map to the same cached timeline +/// for now (we will likely have to rework this since threads aren't +/// *really* timelines) +#[derive(Debug, Clone)] +pub struct ThreadSelection { + pub root_id: RootNoteIdBuf, + + /// The selected note, if different than the root_id. None here + /// means the root is selected + pub selected_note: Option, +} + +impl ThreadSelection { + pub fn selected_or_root(&self) -> &[u8; 32] { + self.selected_note + .as_ref() + .map(|sn| sn.bytes()) + .unwrap_or(self.root_id.bytes()) + } + + pub fn from_root_id(root_id: RootNoteIdBuf) -> Self { + Self { + root_id, + selected_note: None, + } + } + + pub fn from_note_id( + ndb: &Ndb, + note_cache: &mut NoteCache, + txn: &Transaction, + note_id: NoteId, + ) -> Result { + let root_id = RootNoteIdBuf::new(ndb, note_cache, txn, note_id.bytes())?; + Ok(if root_id.bytes() == note_id.bytes() { + Self::from_root_id(root_id) + } else { + Self { + root_id, + selected_note: Some(note_id), + } + }) + } +} + +impl Hash for ThreadSelection { + fn hash(&self, state: &mut H) { + // only hash the root id for thread selection + self.root_id.hash(state) + } +} + +// need this to only match root_id or else hash lookups will fail +impl PartialEq for ThreadSelection { + fn eq(&self, other: &Self) -> bool { + self.root_id == other.root_id + } +} + +impl Eq for ThreadSelection {} + /// /// What kind of timeline is it? /// - Follow List @@ -130,24 +193,23 @@ impl TokenSerializable for ListKind { /// - filter /// - ... etc /// -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum TimelineKind { List(ListKind), /// The last not per pubkey Algo(AlgoTimeline), - Notifications(PubkeySource), + Notifications(Pubkey), - Profile(PubkeySource), + Profile(Pubkey), - /// This could be any note id, doesn't need to be the root id - Thread(RootNoteIdBuf), + Thread(ThreadSelection), Universe, - /// Generic filter - Generic, + /// Generic filter, references a hash of a filter + Generic(u64), Hashtag(String), } @@ -155,86 +217,8 @@ pub enum TimelineKind { const NOTIFS_TOKEN_DEPRECATED: &str = "notifs"; const NOTIFS_TOKEN: &str = "notifications"; -fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> { - let hex = parser.pull_token()?; - hex::decode(hex) - .map_err(|_| ParseError::HexDecodeFailed)? - .as_slice() - .try_into() - .map_err(|_| ParseError::HexDecodeFailed) -} - -impl TokenSerializable for TimelineKind { - fn serialize_tokens(&self, writer: &mut TokenWriter) { - match self { - TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer), - TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer), - TimelineKind::Notifications(pk_src) => { - writer.write_token(NOTIFS_TOKEN); - pk_src.serialize_tokens(writer); - } - TimelineKind::Profile(pk_src) => { - writer.write_token("profile"); - pk_src.serialize_tokens(writer); - } - TimelineKind::Thread(root_note_id) => { - writer.write_token("thread"); - writer.write_token(&root_note_id.hex()); - } - TimelineKind::Universe => { - writer.write_token("universe"); - } - TimelineKind::Generic => { - writer.write_token("generic"); - } - TimelineKind::Hashtag(ht) => { - writer.write_token("hashtag"); - writer.write_token(ht); - } - } - } - - fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { - TokenParser::alt( - parser, - &[ - |p| Ok(TimelineKind::List(ListKind::parse_from_tokens(p)?)), - |p| Ok(TimelineKind::Algo(AlgoTimeline::parse_from_tokens(p)?)), - |p| { - // still handle deprecated form (notifs) - p.parse_any_token(&[NOTIFS_TOKEN, NOTIFS_TOKEN_DEPRECATED])?; - Ok(TimelineKind::Notifications( - PubkeySource::parse_from_tokens(p)?, - )) - }, - |p| { - p.parse_token("profile")?; - Ok(TimelineKind::Profile(PubkeySource::parse_from_tokens(p)?)) - }, - |p| { - p.parse_token("thread")?; - let note_id = RootNoteIdBuf::new_unsafe(parse_hex_id(p)?); - Ok(TimelineKind::Thread(note_id)) - }, - |p| { - p.parse_token("universe")?; - Ok(TimelineKind::Universe) - }, - |p| { - p.parse_token("generic")?; - Ok(TimelineKind::Generic) - }, - |p| { - p.parse_token("hashtag")?; - Ok(TimelineKind::Hashtag(p.pull_token()?.to_string())) - }, - ], - ) - } -} - /// Hardcoded algo timelines -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)] pub enum AlgoTimeline { /// LastPerPubkey: a special nostr query that fetches the last N /// notes for each pubkey on the list @@ -244,8 +228,8 @@ pub enum AlgoTimeline { /// The identifier for our last per pubkey algo const LAST_PER_PUBKEY_TOKEN: &str = "last_per_pubkey"; -impl TokenSerializable for AlgoTimeline { - fn serialize_tokens(&self, writer: &mut TokenWriter) { +impl AlgoTimeline { + pub fn serialize_tokens(&self, writer: &mut TokenWriter) { match self { AlgoTimeline::LastPerPubkey(list_kind) => { writer.write_token(LAST_PER_PUBKEY_TOKEN); @@ -254,16 +238,17 @@ impl TokenSerializable for AlgoTimeline { } } - fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { - TokenParser::alt( - parser, - &[|p| { - p.parse_all(|p| { - p.parse_token(LAST_PER_PUBKEY_TOKEN)?; - Ok(AlgoTimeline::LastPerPubkey(ListKind::parse_from_tokens(p)?)) - }) - }], - ) + pub fn parse<'a>( + parser: &mut TokenParser<'a>, + deck_author: &Pubkey, + ) -> Result> { + parser.parse_all(|p| { + p.parse_token(LAST_PER_PUBKEY_TOKEN)?; + Ok(AlgoTimeline::LastPerPubkey(ListKind::parse( + p, + deck_author, + )?)) + }) } } @@ -272,7 +257,7 @@ impl Display for TimelineKind { match self { TimelineKind::List(ListKind::Contact(_src)) => f.write_str("Contacts"), TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_lk)) => f.write_str("Last Notes"), - TimelineKind::Generic => f.write_str("Timeline"), + TimelineKind::Generic(_) => f.write_str("Timeline"), TimelineKind::Notifications(_) => f.write_str("Notifications"), TimelineKind::Profile(_) => f.write_str("Profile"), TimelineKind::Universe => f.write_str("Universe"), @@ -283,14 +268,14 @@ impl Display for TimelineKind { } impl TimelineKind { - pub fn pubkey_source(&self) -> Option<&PubkeySource> { + pub fn pubkey(&self) -> Option<&Pubkey> { match self { - TimelineKind::List(list_kind) => list_kind.pubkey_source(), - TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => list_kind.pubkey_source(), - TimelineKind::Notifications(pk_src) => Some(pk_src), - TimelineKind::Profile(pk_src) => Some(pk_src), + TimelineKind::List(list_kind) => list_kind.pubkey(), + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => list_kind.pubkey(), + TimelineKind::Notifications(pk) => Some(pk), + TimelineKind::Profile(pk) => Some(pk), TimelineKind::Universe => None, - TimelineKind::Generic => None, + TimelineKind::Generic(_) => None, TimelineKind::Hashtag(_ht) => None, TimelineKind::Thread(_ht) => None, } @@ -305,17 +290,108 @@ impl TimelineKind { TimelineKind::Notifications(_pk_src) => true, TimelineKind::Profile(_pk_src) => true, TimelineKind::Universe => true, - TimelineKind::Generic => true, + TimelineKind::Generic(_) => true, TimelineKind::Hashtag(_ht) => true, TimelineKind::Thread(_ht) => true, } } + pub fn serialize_tokens(&self, writer: &mut TokenWriter) { + match self { + TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer), + TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer), + TimelineKind::Notifications(pk) => { + writer.write_token(NOTIFS_TOKEN); + PubkeySource::pubkey(*pk).serialize_tokens(writer); + } + TimelineKind::Profile(pk) => { + writer.write_token("profile"); + PubkeySource::pubkey(*pk).serialize_tokens(writer); + } + TimelineKind::Thread(root_note_id) => { + writer.write_token("thread"); + writer.write_token(&root_note_id.root_id.hex()); + } + TimelineKind::Universe => { + writer.write_token("universe"); + } + TimelineKind::Generic(_usize) => { + // TODO: lookup filter and then serialize + writer.write_token("generic"); + } + TimelineKind::Hashtag(ht) => { + writer.write_token("hashtag"); + writer.write_token(ht); + } + } + } + + pub fn parse<'a>( + parser: &mut TokenParser<'a>, + deck_author: &Pubkey, + ) -> Result> { + let profile = parser.try_parse(|p| { + p.parse_token("profile")?; + let pk_src = PubkeySource::parse_from_tokens(p)?; + Ok(TimelineKind::Profile(*pk_src.to_pubkey(deck_author))) + }); + if profile.is_ok() { + return profile; + } + + let notifications = parser.try_parse(|p| { + // still handle deprecated form (notifs) + p.parse_any_token(&[NOTIFS_TOKEN, NOTIFS_TOKEN_DEPRECATED])?; + let pk_src = PubkeySource::parse_from_tokens(p)?; + Ok(TimelineKind::Notifications(*pk_src.to_pubkey(deck_author))) + }); + if notifications.is_ok() { + return notifications; + } + + let list_tl = + parser.try_parse(|p| Ok(TimelineKind::List(ListKind::parse(p, deck_author)?))); + if list_tl.is_ok() { + return list_tl; + } + + let algo_tl = + parser.try_parse(|p| Ok(TimelineKind::Algo(AlgoTimeline::parse(p, deck_author)?))); + if algo_tl.is_ok() { + return algo_tl; + } + + TokenParser::alt( + parser, + &[ + |p| { + p.parse_token("thread")?; + Ok(TimelineKind::Thread(ThreadSelection::from_root_id( + RootNoteIdBuf::new_unsafe(tokenator::parse_hex_id(p)?), + ))) + }, + |p| { + p.parse_token("universe")?; + Ok(TimelineKind::Universe) + }, + |p| { + p.parse_token("generic")?; + // TODO: generic filter serialization + Ok(TimelineKind::Generic(0)) + }, + |p| { + p.parse_token("hashtag")?; + Ok(TimelineKind::Hashtag(p.pull_token()?.to_string())) + }, + ], + ) + } + pub fn last_per_pubkey(list_kind: ListKind) -> Self { TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) } - pub fn contact_list(pk: PubkeySource) -> Self { + pub fn contact_list(pk: Pubkey) -> Self { TimelineKind::List(ListKind::contact_list(pk)) } @@ -323,51 +399,98 @@ impl TimelineKind { matches!(self, TimelineKind::List(ListKind::Contact(_))) } - pub fn profile(pk: PubkeySource) -> Self { + pub fn profile(pk: Pubkey) -> Self { TimelineKind::Profile(pk) } - pub fn thread(root_id: RootNoteIdBuf) -> Self { - TimelineKind::Thread(root_id) + pub fn thread(selected_note: ThreadSelection) -> Self { + TimelineKind::Thread(selected_note) } pub fn is_notifications(&self) -> bool { matches!(self, TimelineKind::Notifications(_)) } - pub fn notifications(pk: PubkeySource) -> Self { + pub fn notifications(pk: Pubkey) -> Self { TimelineKind::Notifications(pk) } - pub fn into_timeline(self, ndb: &Ndb, default_user: Option<&[u8; 32]>) -> Option { + // TODO: probably should set default limit here + pub fn filters(&self, txn: &Transaction, ndb: &Ndb) -> FilterState { + match self { + TimelineKind::Universe => FilterState::ready(universe_filter()), + + TimelineKind::List(list_k) => match list_k { + ListKind::Contact(pubkey) => contact_filter_state(txn, ndb, pubkey), + }, + + // TODO: still need to update this to fetch likes, zaps, etc + TimelineKind::Notifications(pubkey) => FilterState::ready(vec![Filter::new() + .pubkeys([pubkey.bytes()]) + .kinds([1]) + .limit(default_limit()) + .build()]), + + TimelineKind::Hashtag(hashtag) => FilterState::ready(vec![Filter::new() + .kinds([1]) + .limit(filter::default_limit()) + .tags([hashtag.clone()], 't') + .build()]), + + TimelineKind::Algo(algo_timeline) => match algo_timeline { + AlgoTimeline::LastPerPubkey(list_k) => match list_k { + ListKind::Contact(pubkey) => last_per_pubkey_filter_state(ndb, pubkey), + }, + }, + + TimelineKind::Generic(_) => { + todo!("implement generic filter lookups") + } + + TimelineKind::Thread(selection) => FilterState::ready(vec![ + nostrdb::Filter::new() + .kinds([1]) + .event(selection.root_id.bytes()) + .build(), + nostrdb::Filter::new() + .ids([selection.root_id.bytes()]) + .limit(1) + .build(), + ]), + + TimelineKind::Profile(pk) => FilterState::ready(vec![Filter::new() + .authors([pk.bytes()]) + .kinds([1]) + .limit(default_limit()) + .build()]), + } + } + + pub fn into_timeline(self, txn: &Transaction, ndb: &Ndb) -> Option { match self { TimelineKind::Universe => Some(Timeline::new( TimelineKind::Universe, - FilterState::ready(vec![Filter::new() - .kinds([1]) - .limit(default_limit()) - .build()]), + FilterState::ready(universe_filter()), TimelineTab::no_replies(), )), TimelineKind::Thread(root_id) => Some(Timeline::thread(root_id)), - TimelineKind::Generic => { + TimelineKind::Generic(_filter_id) => { warn!("you can't convert a TimelineKind::Generic to a Timeline"); + // TODO: you actually can! just need to look up the filter id None } - TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(pk_src))) => { - let pk = match &pk_src { - PubkeySource::DeckAuthor => default_user?, - PubkeySource::Explicit(pk) => pk.bytes(), - }; + TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(pk))) => { + let contact_filter = Filter::new() + .authors([pk.bytes()]) + .kinds([3]) + .limit(1) + .build(); - let contact_filter = Filter::new().authors([pk]).kinds([3]).limit(1).build(); - - let txn = Transaction::new(ndb).expect("txn"); let results = ndb - .query(&txn, &[contact_filter.clone()], 1) + .query(txn, &[contact_filter.clone()], 1) .expect("contact query failed?"); let kind_fn = TimelineKind::last_per_pubkey; @@ -375,13 +498,13 @@ impl TimelineKind { if results.is_empty() { return Some(Timeline::new( - kind_fn(ListKind::contact_list(pk_src)), + kind_fn(ListKind::contact_list(pk)), FilterState::needs_remote(vec![contact_filter.clone()]), tabs, )); } - let list_kind = ListKind::contact_list(pk_src); + let list_kind = ListKind::contact_list(pk); match Timeline::last_per_pubkey(&results[0].note, &list_kind) { Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => { @@ -399,39 +522,29 @@ impl TimelineKind { } } - TimelineKind::Profile(pk_src) => { - let pk = match &pk_src { - PubkeySource::DeckAuthor => default_user?, - PubkeySource::Explicit(pk) => pk.bytes(), - }; - + TimelineKind::Profile(pk) => { let filter = Filter::new() - .authors([pk]) + .authors([pk.bytes()]) .kinds([1]) .limit(default_limit()) .build(); Some(Timeline::new( - TimelineKind::profile(pk_src), + TimelineKind::profile(pk), FilterState::ready(vec![filter]), TimelineTab::full_tabs(), )) } - TimelineKind::Notifications(pk_src) => { - let pk = match &pk_src { - PubkeySource::DeckAuthor => default_user?, - PubkeySource::Explicit(pk) => pk.bytes(), - }; - + TimelineKind::Notifications(pk) => { let notifications_filter = Filter::new() - .pubkeys([pk]) + .pubkeys([pk.bytes()]) .kinds([1]) .limit(default_limit()) .build(); Some(Timeline::new( - TimelineKind::notifications(pk_src), + TimelineKind::notifications(pk), FilterState::ready(vec![notifications_filter]), TimelineTab::only_notes_and_replies(), )) @@ -439,42 +552,11 @@ impl TimelineKind { TimelineKind::Hashtag(hashtag) => Some(Timeline::hashtag(hashtag)), - TimelineKind::List(ListKind::Contact(pk_src)) => { - let pk = match &pk_src { - PubkeySource::DeckAuthor => default_user?, - PubkeySource::Explicit(pk) => pk.bytes(), - }; - - let contact_filter = Filter::new().authors([pk]).kinds([3]).limit(1).build(); - - let txn = Transaction::new(ndb).expect("txn"); - let results = ndb - .query(&txn, &[contact_filter.clone()], 1) - .expect("contact query failed?"); - - if results.is_empty() { - return Some(Timeline::new( - TimelineKind::contact_list(pk_src), - FilterState::needs_remote(vec![contact_filter.clone()]), - TimelineTab::full_tabs(), - )); - } - - match Timeline::contact_list(&results[0].note, pk_src.clone(), default_user) { - Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => { - Some(Timeline::new( - TimelineKind::contact_list(pk_src), - FilterState::needs_remote(vec![contact_filter]), - TimelineTab::full_tabs(), - )) - } - Err(e) => { - error!("Unexpected error: {e}"); - None - } - Ok(tl) => Some(tl), - } - } + TimelineKind::List(ListKind::Contact(pk)) => Some(Timeline::new( + TimelineKind::contact_list(pk), + contact_filter_state(txn, ndb, &pk), + TimelineTab::full_tabs(), + )), } } @@ -490,7 +572,7 @@ impl TimelineKind { TimelineKind::Profile(_pubkey_source) => ColumnTitle::needs_db(self), TimelineKind::Thread(_root_id) => ColumnTitle::simple("Thread"), TimelineKind::Universe => ColumnTitle::simple("Universe"), - TimelineKind::Generic => ColumnTitle::simple("Custom"), + TimelineKind::Generic(_) => ColumnTitle::simple("Custom"), TimelineKind::Hashtag(hashtag) => ColumnTitle::formatted(hashtag.to_string()), } } @@ -506,26 +588,15 @@ impl<'a> TitleNeedsDb<'a> { TitleNeedsDb { kind } } - pub fn title<'txn>( - &self, - txn: &'txn Transaction, - ndb: &Ndb, - deck_author: Option<&Pubkey>, - ) -> &'txn str { - if let TimelineKind::Profile(pubkey_source) = self.kind { - if let Some(deck_author) = deck_author { - let pubkey = pubkey_source.to_pubkey(deck_author); - let profile = ndb.get_profile_by_pubkey(txn, pubkey); - let m_name = profile - .as_ref() - .ok() - .map(|p| crate::profile::get_display_name(Some(p)).name()); + pub fn title<'txn>(&self, txn: &'txn Transaction, ndb: &Ndb) -> &'txn str { + if let TimelineKind::Profile(pubkey) = self.kind { + let profile = ndb.get_profile_by_pubkey(txn, pubkey); + let m_name = profile + .as_ref() + .ok() + .map(|p| crate::profile::get_display_name(Some(p)).name()); - m_name.unwrap_or("Profile") - } else { - // why would be there be no deck author? weird - "nostrich" - } + m_name.unwrap_or("Profile") } else { "Unknown" } @@ -553,3 +624,65 @@ impl<'a> ColumnTitle<'a> { Self::NeedsDb(TitleNeedsDb::new(kind)) } } + +fn contact_filter_state(txn: &Transaction, ndb: &Ndb, pk: &Pubkey) -> FilterState { + let contact_filter = Filter::new() + .authors([pk.bytes()]) + .kinds([3]) + .limit(1) + .build(); + + let results = ndb + .query(txn, &[contact_filter.clone()], 1) + .expect("contact query failed?"); + + if results.is_empty() { + FilterState::needs_remote(vec![contact_filter.clone()]) + } else { + let with_hashtags = false; + match filter::filter_from_tags(&results[0].note, Some(pk.bytes()), with_hashtags) { + Err(notedeck::Error::Filter(FilterError::EmptyContactList)) => { + FilterState::needs_remote(vec![contact_filter]) + } + Err(err) => { + error!("Error getting contact filter state: {err}"); + FilterState::Broken(FilterError::EmptyContactList) + } + Ok(filter) => FilterState::ready(filter.into_follow_filter()), + } + } +} + +fn last_per_pubkey_filter_state(ndb: &Ndb, pk: &Pubkey) -> FilterState { + let contact_filter = Filter::new() + .authors([pk.bytes()]) + .kinds([3]) + .limit(1) + .build(); + + let txn = Transaction::new(ndb).expect("txn"); + let results = ndb + .query(&txn, &[contact_filter.clone()], 1) + .expect("contact query failed?"); + + if results.is_empty() { + FilterState::needs_remote(vec![contact_filter]) + } else { + let kind = 1; + let notes_per_pk = 1; + match filter::last_n_per_pubkey_from_tags(&results[0].note, kind, notes_per_pk) { + Err(notedeck::Error::Filter(FilterError::EmptyContactList)) => { + FilterState::needs_remote(vec![contact_filter]) + } + Err(err) => { + error!("Error getting contact filter state: {err}"); + FilterState::Broken(FilterError::EmptyContactList) + } + Ok(filter) => FilterState::ready(filter), + } + } +} + +fn universe_filter() -> Vec { + vec![Filter::new().kinds([1]).limit(default_limit()).build()] +} diff --git a/crates/notedeck_columns/src/timeline/mod.rs b/crates/notedeck_columns/src/timeline/mod.rs index 85a0c446..886bfea6 100644 --- a/crates/notedeck_columns/src/timeline/mod.rs +++ b/crates/notedeck_columns/src/timeline/mod.rs @@ -1,26 +1,19 @@ use crate::{ - column::Columns, - decks::DecksCache, error::Error, + multi_subscriber::MultiSubscriber, subscriptions::{self, SubKind, Subscriptions}, - thread::Thread, timeline::kind::ListKind, Result, }; use notedeck::{ - filter, CachedNote, FilterError, FilterState, FilterStates, NoteCache, NoteRef, RootNoteIdBuf, - UnknownIds, + filter, CachedNote, FilterError, FilterState, FilterStates, NoteCache, NoteRef, UnknownIds, }; -use std::fmt; -use std::sync::atomic::{AtomicU32, Ordering}; - use egui_virtual_list::VirtualList; use enostr::{PoolRelay, Pubkey, RelayPool}; -use nostrdb::{Filter, Ndb, Note, NoteKey, Subscription, Transaction}; +use nostrdb::{Filter, Ndb, Note, NoteKey, Transaction}; use std::cell::RefCell; -use std::hash::Hash; use std::rc::Rc; use tracing::{debug, error, info, warn}; @@ -29,17 +22,26 @@ pub mod cache; pub mod kind; pub mod route; -pub use cache::{TimelineCache, TimelineCacheKey}; -pub use kind::{ColumnTitle, PubkeySource, TimelineKind}; -pub use route::TimelineRoute; +pub use cache::TimelineCache; +pub use kind::{ColumnTitle, PubkeySource, ThreadSelection, TimelineKind}; -#[derive(Debug, Hash, Copy, Clone, Eq, PartialEq)] -pub struct TimelineId(u32); +//#[derive(Debug, Hash, Clone, Eq, PartialEq)] +//pub type TimelineId = TimelineKind; + +/* impl TimelineId { - pub fn new(id: u32) -> Self { + pub fn kind(&self) -> &TimelineKind { + &self.kind + } + + pub fn new(id: TimelineKind) -> Self { TimelineId(id) } + + pub fn profile(pubkey: Pubkey) -> Self { + TimelineId::new(TimelineKind::Profile(PubkeySource::pubkey(pubkey))) + } } impl fmt::Display for TimelineId { @@ -47,6 +49,7 @@ impl fmt::Display for TimelineId { write!(f, "TimelineId({})", self.0) } } +*/ #[derive(Copy, Clone, Eq, PartialEq, Debug, Default)] pub enum ViewFilter { @@ -186,7 +189,6 @@ impl TimelineTab { /// A column in a deck. Holds navigation state, loaded notes, column kind, etc. #[derive(Debug)] pub struct Timeline { - pub id: TimelineId, pub kind: TimelineKind, // We may not have the filter loaded yet, so let's make it an option so // that codepaths have to explicitly handle it @@ -194,35 +196,36 @@ pub struct Timeline { pub views: Vec, pub selected_view: usize, - pub subscription: Option, + pub subscription: Option, } impl Timeline { /// Create a timeline from a contact list - pub fn contact_list( - contact_list: &Note, - pk_src: PubkeySource, - deck_author: Option<&[u8; 32]>, - ) -> Result { - let our_pubkey = deck_author.map(|da| pk_src.to_pubkey_bytes(da)); + pub fn contact_list(contact_list: &Note, pubkey: &[u8; 32]) -> Result { let with_hashtags = false; - let filter = - filter::filter_from_tags(contact_list, our_pubkey, with_hashtags)?.into_follow_filter(); + let filter = filter::filter_from_tags(contact_list, Some(pubkey), with_hashtags)? + .into_follow_filter(); Ok(Timeline::new( - TimelineKind::contact_list(pk_src), + TimelineKind::contact_list(Pubkey::new(*pubkey)), FilterState::ready(filter), TimelineTab::full_tabs(), )) } - pub fn thread(note_id: RootNoteIdBuf) -> Self { - let filter = Thread::filters_raw(note_id.borrow()) - .iter_mut() - .map(|fb| fb.build()) - .collect(); + pub fn thread(selection: ThreadSelection) -> Self { + let filter = vec![ + nostrdb::Filter::new() + .kinds([1]) + .event(selection.root_id.bytes()) + .build(), + nostrdb::Filter::new() + .ids([selection.root_id.bytes()]) + .limit(1) + .build(), + ]; Timeline::new( - TimelineKind::Thread(note_id), + TimelineKind::Thread(selection), FilterState::ready(filter), TimelineTab::only_notes_and_replies(), ) @@ -234,7 +237,7 @@ impl Timeline { let filter = filter::last_n_per_pubkey_from_tags(list, kind, notes_per_pk)?; Ok(Timeline::new( - TimelineKind::last_per_pubkey(list_kind.clone()), + TimelineKind::last_per_pubkey(*list_kind), FilterState::ready(filter), TimelineTab::only_notes_and_replies(), )) @@ -254,25 +257,20 @@ impl Timeline { ) } - pub fn make_view_id(id: TimelineId, selected_view: usize) -> egui::Id { + pub fn make_view_id(id: &TimelineKind, selected_view: usize) -> egui::Id { egui::Id::new((id, selected_view)) } pub fn view_id(&self) -> egui::Id { - Timeline::make_view_id(self.id, self.selected_view) + Timeline::make_view_id(&self.kind, self.selected_view) } pub fn new(kind: TimelineKind, filter_state: FilterState, views: Vec) -> Self { - // global unique id for all new timelines - static UIDS: AtomicU32 = AtomicU32::new(0); - let filter = FilterStates::new(filter_state); - let subscription: Option = None; + let subscription: Option = None; let selected_view = 0; - let id = TimelineId::new(UIDS.fetch_add(1, Ordering::Relaxed)); Timeline { - id, kind, filter, views, @@ -417,6 +415,8 @@ impl Timeline { let sub = self .subscription + .as_ref() + .and_then(|s| s.local_subid) .ok_or(Error::App(notedeck::Error::no_active_sub()))?; let new_note_ids = ndb.poll_for_notes(sub, 500); @@ -484,10 +484,9 @@ pub fn setup_new_timeline( pool: &mut RelayPool, note_cache: &mut NoteCache, since_optimize: bool, - our_pk: Option<&Pubkey>, ) { // if we're ready, setup local subs - if is_timeline_ready(ndb, pool, note_cache, timeline, our_pk) { + if is_timeline_ready(ndb, pool, note_cache, timeline) { if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) { error!("setup_new_timeline: {err}"); } @@ -505,7 +504,7 @@ pub fn setup_new_timeline( pub fn send_initial_timeline_filters( ndb: &Ndb, since_optimize: bool, - columns: &mut Columns, + timeline_cache: &mut TimelineCache, subs: &mut Subscriptions, pool: &mut RelayPool, relay_id: &str, @@ -513,7 +512,7 @@ pub fn send_initial_timeline_filters( info!("Sending initial filters to {}", relay_id); let relay = &mut pool.relays.iter_mut().find(|r| r.url() == relay_id)?; - for timeline in columns.timelines_mut() { + for (_kind, timeline) in timeline_cache.timelines.iter_mut() { send_initial_timeline_filter(ndb, since_optimize, subs, relay, timeline); } @@ -527,7 +526,7 @@ pub fn send_initial_timeline_filter( relay: &mut PoolRelay, timeline: &mut Timeline, ) { - let filter_state = timeline.filter.get(relay.url()); + let filter_state = timeline.filter.get_mut(relay.url()); match filter_state { FilterState::Broken(err) => { @@ -567,7 +566,7 @@ pub fn send_initial_timeline_filter( if can_since_optimize && filter::should_since_optimize(lim, notes.len()) { filter = filter::since_optimize_filter(filter, notes); } else { - warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", filter); + warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", &timeline.kind); } filter @@ -596,7 +595,7 @@ fn fetch_contact_list( relay: &mut PoolRelay, timeline: &mut Timeline, ) { - let sub_kind = SubKind::FetchingContactList(timeline.id); + let sub_kind = SubKind::FetchingContactList(timeline.kind.clone()); let sub_id = subscriptions::new_sub_id(); let local_sub = ndb.subscribe(&filter).expect("sub"); @@ -621,9 +620,21 @@ fn setup_initial_timeline( ) -> Result<()> { // some timelines are one-shot and a refreshed, like last_per_pubkey algo feed if timeline.kind.should_subscribe_locally() { - timeline.subscription = Some(ndb.subscribe(filters)?); + let local_sub = ndb.subscribe(filters)?; + match &mut timeline.subscription { + None => { + timeline.subscription = Some(MultiSubscriber::with_initial_local_sub( + local_sub, + filters.to_vec(), + )); + } + + Some(msub) => { + msub.local_subid = Some(local_sub); + } + }; } - let txn = Transaction::new(ndb)?; + debug!( "querying nostrdb sub {:?} {:?}", timeline.subscription, timeline.filter @@ -634,6 +645,7 @@ fn setup_initial_timeline( lim += filter.limit().unwrap_or(1) as i32; } + let txn = Transaction::new(ndb)?; let notes: Vec = ndb .query(&txn, filters, lim)? .into_iter() @@ -648,15 +660,11 @@ fn setup_initial_timeline( pub fn setup_initial_nostrdb_subs( ndb: &Ndb, note_cache: &mut NoteCache, - decks_cache: &mut DecksCache, + timeline_cache: &mut TimelineCache, ) -> Result<()> { - for decks in decks_cache.get_all_decks_mut() { - for deck in decks.decks_mut() { - for timeline in deck.columns_mut().timelines_mut() { - if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) { - error!("setup_initial_nostrdb_subs: {err}"); - } - } + for (_kind, timeline) in timeline_cache.timelines.iter_mut() { + if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) { + error!("setup_initial_nostrdb_subs: {err}"); } } @@ -688,7 +696,6 @@ pub fn is_timeline_ready( pool: &mut RelayPool, note_cache: &mut NoteCache, timeline: &mut Timeline, - our_pk: Option<&Pubkey>, ) -> bool { // TODO: we should debounce the filter states a bit to make sure we have // seen all of the different contact lists from each relay @@ -721,11 +728,7 @@ pub fn is_timeline_ready( let filter = { let txn = Transaction::new(ndb).expect("txn"); let note = ndb.get_note_by_key(&txn, note_key).expect("note"); - let add_pk = timeline - .kind - .pubkey_source() - .as_ref() - .and_then(|pk_src| our_pk.map(|pk| pk_src.to_pubkey_bytes(pk))); + let add_pk = timeline.kind.pubkey().map(|pk| pk.bytes()); filter::filter_from_tags(¬e, add_pk, with_hashtags).map(|f| f.into_follow_filter()) }; diff --git a/crates/notedeck_columns/src/timeline/route.rs b/crates/notedeck_columns/src/timeline/route.rs index 89ff5ee8..ecbb595b 100644 --- a/crates/notedeck_columns/src/timeline/route.rs +++ b/crates/notedeck_columns/src/timeline/route.rs @@ -1,124 +1,44 @@ use crate::{ - column::Columns, - draft::Drafts, nav::RenderNavAction, profile::ProfileAction, - timeline::{TimelineCache, TimelineId, TimelineKind}, - ui::{ - self, - note::{NoteOptions, QuoteRepostView}, - profile::ProfileView, - }, + timeline::{TimelineCache, TimelineKind}, + ui::{self, note::NoteOptions, profile::ProfileView}, }; -use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter}; - -use enostr::{NoteId, Pubkey}; -use nostrdb::{Ndb, Transaction}; +use enostr::Pubkey; +use nostrdb::Ndb; use notedeck::{Accounts, ImageCache, MuteFun, NoteCache, UnknownIds}; -#[derive(Debug, Eq, PartialEq, Clone, Copy)] -pub enum TimelineRoute { - Timeline(TimelineId), - Thread(NoteId), - Profile(Pubkey), - Reply(NoteId), - Quote(NoteId), -} - -fn parse_pubkey<'a>(parser: &mut TokenParser<'a>) -> Result> { - let hex = parser.pull_token()?; - Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) -} - -fn parse_note_id<'a>(parser: &mut TokenParser<'a>) -> Result> { - let hex = parser.pull_token()?; - NoteId::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed) -} - -impl TokenSerializable for TimelineRoute { - fn serialize_tokens(&self, writer: &mut TokenWriter) { - match self { - TimelineRoute::Profile(pk) => { - writer.write_token("profile"); - writer.write_token(&pk.hex()); - } - TimelineRoute::Thread(note_id) => { - writer.write_token("thread"); - writer.write_token(¬e_id.hex()); - } - TimelineRoute::Reply(note_id) => { - writer.write_token("reply"); - writer.write_token(¬e_id.hex()); - } - TimelineRoute::Quote(note_id) => { - writer.write_token("quote"); - writer.write_token(¬e_id.hex()); - } - TimelineRoute::Timeline(_tlid) => { - todo!("tlid") - } - } - } - - fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result> { - TokenParser::alt( - parser, - &[ - |p| { - p.parse_token("profile")?; - Ok(TimelineRoute::Profile(parse_pubkey(p)?)) - }, - |p| { - p.parse_token("thread")?; - Ok(TimelineRoute::Thread(parse_note_id(p)?)) - }, - |p| { - p.parse_token("reply")?; - Ok(TimelineRoute::Reply(parse_note_id(p)?)) - }, - |p| { - p.parse_token("quote")?; - Ok(TimelineRoute::Quote(parse_note_id(p)?)) - }, - |_p| todo!("handle timeline parsing"), - ], - ) - } -} - #[allow(clippy::too_many_arguments)] pub fn render_timeline_route( ndb: &Ndb, - columns: &mut Columns, - drafts: &mut Drafts, img_cache: &mut ImageCache, unknown_ids: &mut UnknownIds, note_cache: &mut NoteCache, timeline_cache: &mut TimelineCache, accounts: &mut Accounts, - route: TimelineRoute, + kind: &TimelineKind, col: usize, textmode: bool, + depth: usize, ui: &mut egui::Ui, ) -> Option { - match route { - TimelineRoute::Timeline(timeline_id) => { - let note_options = { - let is_universe = if let Some(timeline) = columns.find_timeline(timeline_id) { - timeline.kind == TimelineKind::Universe - } else { - false - }; - - let mut options = NoteOptions::new(is_universe); - options.set_textmode(textmode); - options - }; + let note_options = { + let mut options = NoteOptions::new(kind == &TimelineKind::Universe); + options.set_textmode(textmode); + options + }; + match kind { + TimelineKind::List(_) + | TimelineKind::Algo(_) + | TimelineKind::Notifications(_) + | TimelineKind::Universe + | TimelineKind::Hashtag(_) + | TimelineKind::Generic(_) => { let note_action = ui::TimelineView::new( - timeline_id, - columns, + kind, + timeline_cache, ndb, note_cache, img_cache, @@ -130,89 +50,50 @@ pub fn render_timeline_route( note_action.map(RenderNavAction::NoteAction) } - TimelineRoute::Thread(id) => ui::ThreadView::new( + TimelineKind::Profile(pubkey) => { + if depth > 1 { + render_profile_route( + pubkey, + accounts, + ndb, + timeline_cache, + img_cache, + note_cache, + unknown_ids, + col, + ui, + &accounts.mutefun(), + ) + } else { + // we render profiles like timelines if they are at the root + let note_action = ui::TimelineView::new( + kind, + timeline_cache, + ndb, + note_cache, + img_cache, + note_options, + &accounts.mutefun(), + ) + .ui(ui); + + note_action.map(RenderNavAction::NoteAction) + } + } + + TimelineKind::Thread(id) => ui::ThreadView::new( timeline_cache, ndb, note_cache, unknown_ids, img_cache, - id.bytes(), + id.selected_or_root(), textmode, &accounts.mutefun(), ) .id_source(egui::Id::new(("threadscroll", col))) .ui(ui) .map(Into::into), - - TimelineRoute::Reply(id) => { - let txn = if let Ok(txn) = Transaction::new(ndb) { - txn - } else { - ui.label("Reply to unknown note"); - return None; - }; - - let note = if let Ok(note) = ndb.get_note_by_id(&txn, id.bytes()) { - note - } else { - ui.label("Reply to unknown note"); - return None; - }; - - let id = egui::Id::new(("post", col, note.key().unwrap())); - let poster = accounts.selected_or_first_nsec()?; - - let action = { - let draft = drafts.reply_mut(note.id()); - - let response = egui::ScrollArea::vertical().show(ui, |ui| { - ui::PostReplyView::new(ndb, poster, draft, note_cache, img_cache, ¬e) - .id_source(id) - .show(ui) - }); - - response.inner.action - }; - - action.map(Into::into) - } - - TimelineRoute::Profile(pubkey) => render_profile_route( - &pubkey, - accounts, - ndb, - timeline_cache, - img_cache, - note_cache, - unknown_ids, - col, - ui, - &accounts.mutefun(), - ), - - TimelineRoute::Quote(id) => { - let txn = Transaction::new(ndb).expect("txn"); - - let note = if let Ok(note) = ndb.get_note_by_id(&txn, id.bytes()) { - note - } else { - ui.label("Quote of unknown note"); - return None; - }; - - let id = egui::Id::new(("post", col, note.key().unwrap())); - - let poster = accounts.selected_or_first_nsec()?; - let draft = drafts.quote_mut(note.id()); - - let response = egui::ScrollArea::vertical().show(ui, |ui| { - QuoteRepostView::new(ndb, poster, note_cache, img_cache, draft, ¬e) - .id_source(id) - .show(ui) - }); - - response.inner.action.map(Into::into) - } } } @@ -262,22 +143,26 @@ mod tests { use enostr::NoteId; use tokenator::{TokenParser, TokenSerializable, TokenWriter}; + use crate::timeline::{ThreadSelection, TimelineKind}; + use enostr::Pubkey; + use notedeck::RootNoteIdBuf; + #[test] fn test_timeline_route_serialize() { - use super::TimelineRoute; + use super::TimelineKind; - { - let note_id_hex = "1c54e5b0c386425f7e017d9e068ddef8962eb2ce1bb08ed27e24b93411c12e60"; - let note_id = NoteId::from_hex(note_id_hex).unwrap(); - let data_str = format!("thread:{}", note_id_hex); - let data = &data_str.split(":").collect::>(); - let mut token_writer = TokenWriter::default(); - let mut parser = TokenParser::new(&data); - let parsed = TimelineRoute::parse_from_tokens(&mut parser).unwrap(); - let expected = TimelineRoute::Thread(note_id); - parsed.serialize_tokens(&mut token_writer); - assert_eq!(expected, parsed); - assert_eq!(token_writer.str(), data_str); - } + let note_id_hex = "1c54e5b0c386425f7e017d9e068ddef8962eb2ce1bb08ed27e24b93411c12e60"; + let note_id = NoteId::from_hex(note_id_hex).unwrap(); + let data_str = format!("thread:{}", note_id_hex); + let data = &data_str.split(":").collect::>(); + let mut token_writer = TokenWriter::default(); + let mut parser = TokenParser::new(&data); + let parsed = TimelineKind::parse(&mut parser, &Pubkey::new(*note_id.bytes())).unwrap(); + let expected = TimelineKind::Thread(ThreadSelection::from_root_id( + RootNoteIdBuf::new_unsafe(*note_id.bytes()), + )); + parsed.serialize_tokens(&mut token_writer); + assert_eq!(expected, parsed); + assert_eq!(token_writer.str(), data_str); } } diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index aeed68fb..0306b678 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -143,32 +143,39 @@ impl AddColumnOption { ndb: &Ndb, cur_account: Option<&UserAccount>, ) -> Option { + let txn = Transaction::new(ndb).unwrap(); match self { AddColumnOption::Algo(algo_option) => Some(AddColumnResponse::Algo(algo_option)), AddColumnOption::Universe => TimelineKind::Universe - .into_timeline(ndb, None) - .map(AddColumnResponse::Timeline), - AddColumnOption::Notification(pubkey) => TimelineKind::Notifications(pubkey) - .into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes())) + .into_timeline(&txn, ndb) .map(AddColumnResponse::Timeline), + AddColumnOption::Notification(pubkey) => { + TimelineKind::Notifications(*pubkey.to_pubkey(&cur_account.map(|kp| kp.pubkey)?)) + .into_timeline(&txn, ndb) + .map(AddColumnResponse::Timeline) + } AddColumnOption::UndecidedNotification => { Some(AddColumnResponse::UndecidedNotification) } - AddColumnOption::Contacts(pubkey) => { - let tlk = TimelineKind::contact_list(pubkey); - tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes())) + AddColumnOption::Contacts(pk_src) => { + let tlk = TimelineKind::contact_list( + *pk_src.to_pubkey(&cur_account.map(|kp| kp.pubkey)?), + ); + tlk.into_timeline(&txn, ndb) .map(AddColumnResponse::Timeline) } AddColumnOption::ExternalNotification => Some(AddColumnResponse::ExternalNotification), AddColumnOption::UndecidedHashtag => Some(AddColumnResponse::Hashtag), AddColumnOption::Hashtag(hashtag) => TimelineKind::Hashtag(hashtag) - .into_timeline(ndb, None) + .into_timeline(&txn, ndb) .map(AddColumnResponse::Timeline), AddColumnOption::UndecidedIndividual => Some(AddColumnResponse::UndecidedIndividual), AddColumnOption::ExternalIndividual => Some(AddColumnResponse::ExternalIndividual), AddColumnOption::Individual(pubkey_source) => { - let tlk = TimelineKind::profile(pubkey_source); - tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes())) + let tlk = TimelineKind::profile( + *pubkey_source.to_pubkey(&cur_account.map(|kp| kp.pubkey)?), + ); + tlk.into_timeline(&txn, ndb) .map(AddColumnResponse::Timeline) } } @@ -232,13 +239,17 @@ impl<'a> AddColumnView<'a> { }) } - fn algo_last_per_pk_ui(&mut self, ui: &mut Ui) -> Option { + fn algo_last_per_pk_ui( + &mut self, + ui: &mut Ui, + deck_author: Pubkey, + ) -> Option { let algo_option = ColumnOptionData { title: "Contact List", description: "Source the last note for each user in your contact list", icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"), option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Decided( - ListKind::contact_list(PubkeySource::DeckAuthor), + ListKind::contact_list(deck_author), ))), }; @@ -319,18 +330,22 @@ impl<'a> AddColumnView<'a> { } let resp = if let Some(keypair) = key_state.get_login_keypair() { - let txn = Transaction::new(self.ndb).expect("txn"); - if let Ok(profile) = self.ndb.get_profile_by_pubkey(&txn, keypair.pubkey.bytes()) { - egui::Frame::window(ui.style()) - .outer_margin(Margin { - left: 4.0, - right: 4.0, - top: 12.0, - bottom: 32.0, - }) - .show(ui, |ui| { - ProfilePreview::new(&profile, self.img_cache).ui(ui); - }); + { + let txn = Transaction::new(self.ndb).expect("txn"); + if let Ok(profile) = + self.ndb.get_profile_by_pubkey(&txn, keypair.pubkey.bytes()) + { + egui::Frame::window(ui.style()) + .outer_margin(Margin { + left: 4.0, + right: 4.0, + top: 12.0, + bottom: 32.0, + }) + .show(ui, |ui| { + ProfilePreview::new(&profile, self.img_cache).ui(ui); + }); + } } if ui.add(add_column_button()).clicked() { @@ -470,7 +485,7 @@ impl<'a> AddColumnView<'a> { title: "Contacts", description: "See notes from your contacts", icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"), - option: AddColumnOption::Contacts(source.clone()), + option: AddColumnOption::Contacts(source), }); } vec.push(ColumnOptionData { @@ -609,7 +624,13 @@ pub fn render_add_column_routes( AddColumnRoute::Base => add_column_view.ui(ui), AddColumnRoute::Algo(r) => match r { AddAlgoRoute::Base => add_column_view.algo_ui(ui), - AddAlgoRoute::LastPerPubkey => add_column_view.algo_last_per_pk_ui(ui), + AddAlgoRoute::LastPerPubkey => { + if let Some(deck_author) = ctx.accounts.get_selected_account() { + add_column_view.algo_last_per_pk_ui(ui, deck_author.pubkey) + } else { + None + } + } }, AddColumnRoute::UndecidedNotification => add_column_view.notifications_ui(ui), AddColumnRoute::ExternalNotification => add_column_view.external_notification_ui(ui), @@ -628,13 +649,16 @@ pub fn render_add_column_routes( ctx.pool, ctx.note_cache, app.since_optimize, - ctx.accounts - .get_selected_account() - .as_ref() - .map(|sa| &sa.pubkey), ); + app.columns_mut(ctx.accounts) - .add_timeline_to_column(col, timeline); + .column_mut(col) + .router_mut() + .route_to_replaced(Route::timeline(timeline.kind.clone())); + + app.timeline_cache + .timelines + .insert(timeline.kind.clone(), timeline); } AddColumnResponse::Algo(algo_option) => match algo_option { @@ -654,14 +678,8 @@ pub fn render_add_column_routes( // add it to our list of timelines AlgoOption::LastPerPubkey(Decision::Decided(list_kind)) => { let maybe_timeline = { - let default_user = ctx - .accounts - .get_selected_account() - .as_ref() - .map(|sa| sa.pubkey.bytes()); - - TimelineKind::last_per_pubkey(list_kind.clone()) - .into_timeline(ctx.ndb, default_user) + let txn = Transaction::new(ctx.ndb).unwrap(); + TimelineKind::last_per_pubkey(list_kind).into_timeline(&txn, ctx.ndb) }; if let Some(mut timeline) = maybe_timeline { @@ -672,14 +690,16 @@ pub fn render_add_column_routes( ctx.pool, ctx.note_cache, app.since_optimize, - ctx.accounts - .get_selected_account() - .as_ref() - .map(|sa| &sa.pubkey), ); app.columns_mut(ctx.accounts) - .add_timeline_to_column(col, timeline); + .column_mut(col) + .router_mut() + .route_to_replaced(Route::timeline(timeline.kind.clone())); + + app.timeline_cache + .timelines + .insert(timeline.kind.clone(), timeline); } else { // we couldn't fetch the timeline yet... let's let // the user know ? diff --git a/crates/notedeck_columns/src/ui/column/header.rs b/crates/notedeck_columns/src/ui/column/header.rs index 727d1f9e..88043909 100644 --- a/crates/notedeck_columns/src/ui/column/header.rs +++ b/crates/notedeck_columns/src/ui/column/header.rs @@ -5,7 +5,7 @@ use crate::nav::SwitchingAction; use crate::{ column::Columns, route::Route, - timeline::{ColumnTitle, TimelineId, TimelineKind, TimelineRoute}, + timeline::{ColumnTitle, TimelineKind}, ui::{ self, anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE}, @@ -22,7 +22,6 @@ pub struct NavTitle<'a> { ndb: &'a Ndb, img_cache: &'a mut ImageCache, columns: &'a Columns, - deck_author: Option<&'a Pubkey>, routes: &'a [Route], col_id: usize, } @@ -32,7 +31,6 @@ impl<'a> NavTitle<'a> { ndb: &'a Ndb, img_cache: &'a mut ImageCache, columns: &'a Columns, - deck_author: Option<&'a Pubkey>, routes: &'a [Route], col_id: usize, ) -> Self { @@ -40,7 +38,6 @@ impl<'a> NavTitle<'a> { ndb, img_cache, columns, - deck_author, routes, col_id, } @@ -123,14 +120,14 @@ impl<'a> NavTitle<'a> { // not it looks cool self.title_pfp(ui, prev, 32.0); - let column_title = prev.title(self.columns); + let column_title = prev.title(); let back_resp = match &column_title { ColumnTitle::Simple(title) => ui.add(Self::back_label(title, color)), ColumnTitle::NeedsDb(need_db) => { let txn = Transaction::new(self.ndb).unwrap(); - let title = need_db.title(&txn, self.ndb, self.deck_author); + let title = need_db.title(&txn, self.ndb); ui.add(Self::back_label(title, color)) } }; @@ -402,14 +399,11 @@ impl<'a> NavTitle<'a> { }) } - fn timeline_pfp(&mut self, ui: &mut egui::Ui, id: TimelineId, pfp_size: f32) { + fn timeline_pfp(&mut self, ui: &mut egui::Ui, id: &TimelineKind, pfp_size: f32) { let txn = Transaction::new(self.ndb).unwrap(); - if let Some(pfp) = self - .columns - .find_timeline(id) - .and_then(|tl| tl.kind.pubkey_source()) - .and_then(|pksrc| self.deck_author.map(|da| pksrc.to_pubkey(da))) + if let Some(pfp) = id + .pubkey() .and_then(|pk| self.pubkey_pfp(&txn, pk.bytes(), pfp_size)) { ui.add(pfp); @@ -422,34 +416,35 @@ impl<'a> NavTitle<'a> { fn title_pfp(&mut self, ui: &mut egui::Ui, top: &Route, pfp_size: f32) { match top { - Route::Timeline(tlr) => match tlr { - TimelineRoute::Timeline(tlid) => { - let is_hashtag = self - .columns - .find_timeline(*tlid) - .is_some_and(|tl| matches!(tl.kind, TimelineKind::Hashtag(_))); - - if is_hashtag { - ui.add( - egui::Image::new(egui::include_image!( - "../../../../../assets/icons/hashtag_icon_4x.png" - )) - .fit_to_exact_size(egui::vec2(pfp_size, pfp_size)), - ); - } else { - self.timeline_pfp(ui, *tlid, pfp_size); - } + Route::Timeline(kind) => match kind { + TimelineKind::Hashtag(_ht) => { + ui.add( + egui::Image::new(egui::include_image!( + "../../../../../assets/icons/hashtag_icon_4x.png" + )) + .fit_to_exact_size(egui::vec2(pfp_size, pfp_size)), + ); } - TimelineRoute::Thread(_note_id) => {} - TimelineRoute::Reply(_note_id) => {} - TimelineRoute::Quote(_note_id) => {} - - TimelineRoute::Profile(pubkey) => { + TimelineKind::Profile(pubkey) => { self.show_profile(ui, pubkey, pfp_size); } + + TimelineKind::Thread(_) => { + // no pfp for threads + } + + TimelineKind::Universe + | TimelineKind::Algo(_) + | TimelineKind::Notifications(_) + | TimelineKind::Generic(_) + | TimelineKind::List(_) => { + self.timeline_pfp(ui, kind, pfp_size); + } }, + Route::Reply(_) => {} + Route::Quote(_) => {} Route::Accounts(_as) => {} Route::ComposeNote => {} Route::AddColumn(_add_col_route) => {} @@ -480,7 +475,7 @@ impl<'a> NavTitle<'a> { } fn title_label(&self, ui: &mut egui::Ui, top: &Route) { - let column_title = top.title(self.columns); + let column_title = top.title(); match &column_title { ColumnTitle::Simple(title) => { @@ -489,7 +484,7 @@ impl<'a> NavTitle<'a> { ColumnTitle::NeedsDb(need_db) => { let txn = Transaction::new(self.ndb).unwrap(); - let title = need_db.title(&txn, self.ndb, self.deck_author); + let title = need_db.title(&txn, self.ndb); ui.add(Self::title_label_value(title)); } }; diff --git a/crates/notedeck_columns/src/ui/profile/mod.rs b/crates/notedeck_columns/src/ui/profile/mod.rs index ff3474bf..c466895d 100644 --- a/crates/notedeck_columns/src/ui/profile/mod.rs +++ b/crates/notedeck_columns/src/ui/profile/mod.rs @@ -5,7 +5,7 @@ pub mod preview; pub use edit::EditProfileView; use egui::load::TexturePoll; use egui::{vec2, Color32, Label, Layout, Rect, RichText, Rounding, ScrollArea, Sense, Stroke}; -use enostr::{Pubkey, PubkeyRef}; +use enostr::Pubkey; use nostrdb::{Ndb, ProfileRecord, Transaction}; pub use picture::ProfilePic; pub use preview::ProfilePreview; @@ -15,7 +15,7 @@ use crate::{ actionbar::NoteAction, colors, images, profile::get_display_name, - timeline::{TimelineCache, TimelineCacheKey}, + timeline::{TimelineCache, TimelineKind}, ui::{ note::NoteOptions, timeline::{tabs_ui, TimelineTabView}, @@ -90,7 +90,7 @@ impl<'a> ProfileView<'a> { self.ndb, self.note_cache, &txn, - TimelineCacheKey::Profile(PubkeyRef::new(self.pubkey.bytes())), + &TimelineKind::Profile(*self.pubkey), ) .get_ptr(); diff --git a/crates/notedeck_columns/src/ui/side_panel.rs b/crates/notedeck_columns/src/ui/side_panel.rs index b66e4721..b864206a 100644 --- a/crates/notedeck_columns/src/ui/side_panel.rs +++ b/crates/notedeck_columns/src/ui/side_panel.rs @@ -288,7 +288,7 @@ impl<'a> DesktopSidePanel<'a> { if router .routes() .iter() - .any(|&r| r == Route::Accounts(AccountsRoute::Accounts)) + .any(|r| r == &Route::Accounts(AccountsRoute::Accounts)) { // return if we are already routing to accounts router.go_back(); @@ -297,7 +297,7 @@ impl<'a> DesktopSidePanel<'a> { } } SidePanelAction::Settings => { - if router.routes().iter().any(|&r| r == Route::Relays) { + if router.routes().iter().any(|r| r == &Route::Relays) { // return if we are already routing to accounts router.go_back(); } else { @@ -308,7 +308,7 @@ impl<'a> DesktopSidePanel<'a> { if router .routes() .iter() - .any(|&r| matches!(r, Route::AddColumn(_))) + .any(|r| matches!(r, Route::AddColumn(_))) { router.go_back(); } else { @@ -316,7 +316,7 @@ impl<'a> DesktopSidePanel<'a> { } } SidePanelAction::ComposeNote => { - if router.routes().iter().any(|&r| r == Route::ComposeNote) { + if router.routes().iter().any(|r| r == &Route::ComposeNote) { router.go_back(); } else { router.route_to(Route::ComposeNote); @@ -331,7 +331,7 @@ impl<'a> DesktopSidePanel<'a> { info!("Clicked expand side panel button"); } SidePanelAction::Support => { - if router.routes().iter().any(|&r| r == Route::Support) { + if router.routes().iter().any(|r| r == &Route::Support) { router.go_back(); } else { support.refresh(); @@ -339,7 +339,7 @@ impl<'a> DesktopSidePanel<'a> { } } SidePanelAction::NewDeck => { - if router.routes().iter().any(|&r| r == Route::NewDeck) { + if router.routes().iter().any(|r| r == &Route::NewDeck) { router.go_back(); } else { router.route_to(Route::NewDeck); @@ -351,7 +351,7 @@ impl<'a> DesktopSidePanel<'a> { ))) } SidePanelAction::EditDeck(index) => { - if router.routes().iter().any(|&r| r == Route::EditDeck(index)) { + if router.routes().iter().any(|r| r == &Route::EditDeck(index)) { router.go_back(); } else { switching_response = Some(crate::nav::SwitchingAction::Decks( diff --git a/crates/notedeck_columns/src/ui/thread.rs b/crates/notedeck_columns/src/ui/thread.rs index 735c03ba..6a5c2627 100644 --- a/crates/notedeck_columns/src/ui/thread.rs +++ b/crates/notedeck_columns/src/ui/thread.rs @@ -1,6 +1,6 @@ use crate::{ actionbar::NoteAction, - timeline::{TimelineCache, TimelineCacheKey}, + timeline::{ThreadSelection, TimelineCache, TimelineKind}, ui::note::NoteOptions, }; @@ -83,7 +83,7 @@ impl<'a> ThreadView<'a> { self.ndb, self.note_cache, &txn, - TimelineCacheKey::Thread(root_id), + &TimelineKind::Thread(ThreadSelection::from_root_id(root_id.to_owned())), ) .get_ptr(); diff --git a/crates/notedeck_columns/src/ui/timeline.rs b/crates/notedeck_columns/src/ui/timeline.rs index 83506b08..a0db8bae 100644 --- a/crates/notedeck_columns/src/ui/timeline.rs +++ b/crates/notedeck_columns/src/ui/timeline.rs @@ -3,8 +3,7 @@ use std::f32::consts::PI; use crate::actionbar::NoteAction; use crate::timeline::TimelineTab; use crate::{ - column::Columns, - timeline::{TimelineId, ViewFilter}, + timeline::{TimelineCache, TimelineKind, ViewFilter}, ui, ui::note::NoteOptions, }; @@ -19,8 +18,8 @@ use tracing::{error, warn}; use super::anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE}; pub struct TimelineView<'a> { - timeline_id: TimelineId, - columns: &'a mut Columns, + timeline_id: &'a TimelineKind, + timeline_cache: &'a mut TimelineCache, ndb: &'a Ndb, note_cache: &'a mut NoteCache, img_cache: &'a mut ImageCache, @@ -31,8 +30,8 @@ pub struct TimelineView<'a> { impl<'a> TimelineView<'a> { pub fn new( - timeline_id: TimelineId, - columns: &'a mut Columns, + timeline_id: &'a TimelineKind, + timeline_cache: &'a mut TimelineCache, ndb: &'a Ndb, note_cache: &'a mut NoteCache, img_cache: &'a mut ImageCache, @@ -43,7 +42,7 @@ impl<'a> TimelineView<'a> { TimelineView { ndb, timeline_id, - columns, + timeline_cache, note_cache, img_cache, reverse, @@ -57,7 +56,7 @@ impl<'a> TimelineView<'a> { ui, self.ndb, self.timeline_id, - self.columns, + self.timeline_cache, self.note_cache, self.img_cache, self.reverse, @@ -76,8 +75,8 @@ impl<'a> TimelineView<'a> { fn timeline_ui( ui: &mut egui::Ui, ndb: &Ndb, - timeline_id: TimelineId, - columns: &mut Columns, + timeline_id: &TimelineKind, + timeline_cache: &mut TimelineCache, note_cache: &mut NoteCache, img_cache: &mut ImageCache, reversed: bool, @@ -92,7 +91,7 @@ fn timeline_ui( */ let scroll_id = { - let timeline = if let Some(timeline) = columns.find_timeline_mut(timeline_id) { + let timeline = if let Some(timeline) = timeline_cache.timelines.get_mut(timeline_id) { timeline } else { error!("tried to render timeline in column, but timeline was missing"); @@ -142,7 +141,7 @@ fn timeline_ui( } let scroll_output = scroll_area.show(ui, |ui| { - let timeline = if let Some(timeline) = columns.find_timeline_mut(timeline_id) { + let timeline = if let Some(timeline) = timeline_cache.timelines.get(timeline_id) { timeline } else { error!("tried to render timeline in column, but timeline was missing"); diff --git a/crates/notedeck_columns/src/unknowns.rs b/crates/notedeck_columns/src/unknowns.rs index 4dbfc1bd..211ce92e 100644 --- a/crates/notedeck_columns/src/unknowns.rs +++ b/crates/notedeck_columns/src/unknowns.rs @@ -1,4 +1,4 @@ -use crate::{column::Columns, Result}; +use crate::{timeline::TimelineCache, Result}; use nostrdb::{Ndb, NoteKey, Transaction}; use notedeck::{CachedNote, NoteCache, UnknownIds}; use tracing::error; @@ -6,12 +6,12 @@ use tracing::error; pub fn update_from_columns( txn: &Transaction, unknown_ids: &mut UnknownIds, - columns: &Columns, + timeline_cache: &TimelineCache, ndb: &Ndb, note_cache: &mut NoteCache, ) -> bool { let before = unknown_ids.ids().len(); - if let Err(e) = get_unknown_ids(txn, unknown_ids, columns, ndb, note_cache) { + if let Err(e) = get_unknown_ids(txn, unknown_ids, timeline_cache, ndb, note_cache) { error!("UnknownIds::update {e}"); } let after = unknown_ids.ids().len(); @@ -27,7 +27,7 @@ pub fn update_from_columns( pub fn get_unknown_ids( txn: &Transaction, unknown_ids: &mut UnknownIds, - columns: &Columns, + timeline_cache: &TimelineCache, ndb: &Ndb, note_cache: &mut NoteCache, ) -> Result<()> { @@ -36,7 +36,7 @@ pub fn get_unknown_ids( let mut new_cached_notes: Vec<(NoteKey, CachedNote)> = vec![]; - for timeline in columns.timelines() { + for (_kind, timeline) in timeline_cache.timelines.iter() { for noteref in timeline.all_or_any_notes() { let note = ndb.get_note_by_key(txn, noteref.key)?; let note_key = note.key().unwrap(); diff --git a/crates/tokenator/Cargo.toml b/crates/tokenator/Cargo.toml index 38a4d16f..a1ca14af 100644 --- a/crates/tokenator/Cargo.toml +++ b/crates/tokenator/Cargo.toml @@ -5,3 +5,4 @@ edition = "2021" description = "A simple library for parsing a serializing string tokens" [dependencies] +hex = { workspace = true } diff --git a/crates/tokenator/src/lib.rs b/crates/tokenator/src/lib.rs index 0206b69a..103314aa 100644 --- a/crates/tokenator/src/lib.rs +++ b/crates/tokenator/src/lib.rs @@ -218,3 +218,15 @@ pub trait TokenSerializable: Sized { fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result>; fn serialize_tokens(&self, writer: &mut TokenWriter); } + +/// Parse a 32 byte hex string +pub fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> { + use hex; + + let hexid = parser.pull_token()?; + hex::decode(hexid) + .map_err(|_| ParseError::HexDecodeFailed)? + .as_slice() + .try_into() + .map_err(|_| ParseError::HexDecodeFailed) +} diff --git a/shell.nix b/shell.nix index ca9e81b3..735838b0 100644 --- a/shell.nix +++ b/shell.nix @@ -14,7 +14,6 @@ mkShell ({ #cargo-edit #cargo-watch rustup - rustfmt libiconv pkg-config #cmake From ac10c7e5b22599048465c3a19ec9b7c0c56b8431 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 5 Feb 2025 18:43:09 -0800 Subject: [PATCH 18/18] hashtags: click hashtags to open them Fixes: https://github.com/damus-io/notedeck/issues/695 Fixes: https://github.com/damus-io/notedeck/issues/713 Changelog-Added: Add ability to click hashtags Signed-off-by: William Casarin --- crates/notedeck_columns/src/actionbar.rs | 75 +++---------------- crates/notedeck_columns/src/timeline/kind.rs | 2 +- crates/notedeck_columns/src/timeline/mod.rs | 2 +- crates/notedeck_columns/src/ui/mention.rs | 6 +- .../notedeck_columns/src/ui/note/contents.rs | 13 +++- crates/notedeck_columns/src/ui/note/mod.rs | 25 +++++-- 6 files changed, 44 insertions(+), 79 deletions(-) diff --git a/crates/notedeck_columns/src/actionbar.rs b/crates/notedeck_columns/src/actionbar.rs index 04f23336..db676512 100644 --- a/crates/notedeck_columns/src/actionbar.rs +++ b/crates/notedeck_columns/src/actionbar.rs @@ -1,20 +1,19 @@ use crate::{ column::Columns, route::{Route, Router}, - timeline::{ThreadSelection, TimelineCache, TimelineKind}, + timeline::{TimelineCache, TimelineKind}, }; -use enostr::{NoteId, Pubkey, RelayPool}; +use enostr::{NoteId, RelayPool}; use nostrdb::{Ndb, NoteKey, Transaction}; -use notedeck::{note::root_note_id_from_selected_id, NoteCache, RootIdError, UnknownIds}; +use notedeck::{NoteCache, UnknownIds}; use tracing::error; -#[derive(Debug, Eq, PartialEq, Copy, Clone)] +#[derive(Debug, Eq, PartialEq, Clone)] pub enum NoteAction { Reply(NoteId), Quote(NoteId), - OpenThread(NoteId), - OpenProfile(Pubkey), + OpenTimeline(TimelineKind), } pub struct NewNotes { @@ -26,52 +25,6 @@ pub enum TimelineOpenResult { NewNotes(NewNotes), } -/// open_thread is called when a note is selected and we need to navigate -/// to a thread It is responsible for managing the subscription and -/// making sure the thread is up to date. In a sense, it's a model for -/// the thread view. We don't have a concept of model/view/controller etc -/// in egui, but this is the closest thing to that. -#[allow(clippy::too_many_arguments)] -fn open_thread( - ndb: &Ndb, - txn: &Transaction, - router: &mut Router, - note_cache: &mut NoteCache, - pool: &mut RelayPool, - timeline_cache: &mut TimelineCache, - selected_note: &[u8; 32], -) -> Option { - router.route_to(Route::thread( - ThreadSelection::from_note_id(ndb, note_cache, txn, NoteId::new(*selected_note)).ok()?, - )); - - match root_note_id_from_selected_id(ndb, note_cache, txn, selected_note) { - Ok(root_id) => timeline_cache.open( - ndb, - note_cache, - txn, - pool, - &TimelineKind::Thread(ThreadSelection::from_root_id(root_id.to_owned())), - ), - - Err(RootIdError::NoteNotFound) => { - error!( - "open_thread: note not found: {}", - hex::encode(selected_note) - ); - None - } - - Err(RootIdError::NoRootId) => { - error!( - "open_thread: note has no root id: {}", - hex::encode(selected_note) - ); - None - } - } -} - impl NoteAction { #[allow(clippy::too_many_arguments)] pub fn execute( @@ -89,19 +42,9 @@ impl NoteAction { None } - NoteAction::OpenThread(note_id) => open_thread( - ndb, - txn, - router, - note_cache, - pool, - timeline_cache, - note_id.bytes(), - ), - - NoteAction::OpenProfile(pubkey) => { - router.route_to(Route::profile(*pubkey)); - timeline_cache.open(ndb, note_cache, txn, pool, &TimelineKind::Profile(*pubkey)) + NoteAction::OpenTimeline(kind) => { + router.route_to(Route::Timeline(kind.to_owned())); + timeline_cache.open(ndb, note_cache, txn, pool, kind) } NoteAction::Quote(note_id) => { @@ -114,7 +57,7 @@ impl NoteAction { /// Execute the NoteAction and process the TimelineOpenResult #[allow(clippy::too_many_arguments)] pub fn execute_and_process_result( - self, + &self, ndb: &Ndb, columns: &mut Columns, col: usize, diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs index 189d1134..7281461e 100644 --- a/crates/notedeck_columns/src/timeline/kind.rs +++ b/crates/notedeck_columns/src/timeline/kind.rs @@ -434,7 +434,7 @@ impl TimelineKind { TimelineKind::Hashtag(hashtag) => FilterState::ready(vec![Filter::new() .kinds([1]) .limit(filter::default_limit()) - .tags([hashtag.clone()], 't') + .tags([hashtag.to_lowercase()], 't') .build()]), TimelineKind::Algo(algo_timeline) => match algo_timeline { diff --git a/crates/notedeck_columns/src/timeline/mod.rs b/crates/notedeck_columns/src/timeline/mod.rs index 886bfea6..94777e67 100644 --- a/crates/notedeck_columns/src/timeline/mod.rs +++ b/crates/notedeck_columns/src/timeline/mod.rs @@ -247,7 +247,7 @@ impl Timeline { let filter = Filter::new() .kinds([1]) .limit(filter::default_limit()) - .tags([hashtag.clone()], 't') + .tags([hashtag.to_lowercase()], 't') .build(); Timeline::new( diff --git a/crates/notedeck_columns/src/ui/mention.rs b/crates/notedeck_columns/src/ui/mention.rs index 407c70c7..83e53ac5 100644 --- a/crates/notedeck_columns/src/ui/mention.rs +++ b/crates/notedeck_columns/src/ui/mention.rs @@ -1,5 +1,5 @@ use crate::ui; -use crate::{actionbar::NoteAction, profile::get_display_name}; +use crate::{actionbar::NoteAction, profile::get_display_name, timeline::TimelineKind}; use egui::Sense; use enostr::Pubkey; use nostrdb::{Ndb, Transaction}; @@ -89,7 +89,9 @@ fn mention_ui( let note_action = if resp.clicked() { ui::show_pointer(ui); - Some(NoteAction::OpenProfile(Pubkey::new(*pk))) + Some(NoteAction::OpenTimeline(TimelineKind::profile( + Pubkey::new(*pk), + ))) } else if resp.hovered() { ui::show_pointer(ui); None diff --git a/crates/notedeck_columns/src/ui/note/contents.rs b/crates/notedeck_columns/src/ui/note/contents.rs index 8d698866..a6c6bd30 100644 --- a/crates/notedeck_columns/src/ui/note/contents.rs +++ b/crates/notedeck_columns/src/ui/note/contents.rs @@ -1,10 +1,9 @@ -use crate::actionbar::NoteAction; -use crate::images::ImageType; use crate::ui::{ self, note::{NoteOptions, NoteResponse}, ProfilePic, }; +use crate::{actionbar::NoteAction, images::ImageType, timeline::TimelineKind}; use egui::{Color32, Hyperlink, Image, RichText}; use nostrdb::{BlockType, Mention, Ndb, Note, NoteKey, Transaction}; use tracing::warn; @@ -198,7 +197,15 @@ fn render_note_contents( BlockType::Hashtag => { #[cfg(feature = "profiling")] puffin::profile_scope!("hashtag contents"); - ui.colored_label(link_color, format!("#{}", block.as_str())); + let resp = ui.colored_label(link_color, format!("#{}", block.as_str())); + + if resp.clicked() { + note_action = Some(NoteAction::OpenTimeline(TimelineKind::Hashtag( + block.as_str().to_string(), + ))); + } else if resp.hovered() { + ui::show_pointer(ui); + } } BlockType::Url => { diff --git a/crates/notedeck_columns/src/ui/note/mod.rs b/crates/notedeck_columns/src/ui/note/mod.rs index dda30143..a37d3443 100644 --- a/crates/notedeck_columns/src/ui/note/mod.rs +++ b/crates/notedeck_columns/src/ui/note/mod.rs @@ -17,6 +17,7 @@ pub use reply_description::reply_desc; use crate::{ actionbar::NoteAction, profile::get_display_name, + timeline::{ThreadSelection, TimelineKind}, ui::{self, View}, }; @@ -354,8 +355,9 @@ impl<'a> NoteView<'a> { ui.vertical(|ui| { ui.horizontal(|ui| { if self.pfp(note_key, &profile, ui).clicked() { - note_action = - Some(NoteAction::OpenProfile(Pubkey::new(*self.note.pubkey()))); + note_action = Some(NoteAction::OpenTimeline(TimelineKind::profile( + Pubkey::new(*self.note.pubkey()), + ))); }; let size = ui.available_size(); @@ -415,7 +417,7 @@ impl<'a> NoteView<'a> { ui.add(&mut contents); if let Some(action) = contents.action() { - note_action = Some(*action); + note_action = Some(action.clone()); } if self.options().has_actionbar() { @@ -430,7 +432,9 @@ impl<'a> NoteView<'a> { // main design ui.with_layout(egui::Layout::left_to_right(egui::Align::TOP), |ui| { if self.pfp(note_key, &profile, ui).clicked() { - note_action = Some(NoteAction::OpenProfile(Pubkey::new(*self.note.pubkey()))); + note_action = Some(NoteAction::OpenTimeline(TimelineKind::Profile( + Pubkey::new(*self.note.pubkey()), + ))); }; ui.with_layout(egui::Layout::top_down(egui::Align::LEFT), |ui| { @@ -480,7 +484,7 @@ impl<'a> NoteView<'a> { ui.add(&mut contents); if let Some(action) = contents.action() { - note_action = Some(*action); + note_action = Some(action.clone()); } if self.options().has_actionbar() { @@ -496,7 +500,16 @@ impl<'a> NoteView<'a> { }; let note_action = if note_hitbox_clicked(ui, hitbox_id, &response.rect, maybe_hitbox) { - Some(NoteAction::OpenThread(NoteId::new(*self.note.id()))) + if let Ok(selection) = ThreadSelection::from_note_id( + self.ndb, + self.note_cache, + self.note.txn().unwrap(), + NoteId::new(*self.note.id()), + ) { + Some(NoteAction::OpenTimeline(TimelineKind::Thread(selection))) + } else { + None + } } else { note_action };