timeline: add nip50 search timelines

Fixes: https://github.com/damus-io/notedeck/issues/456
Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2025-02-22 19:36:19 -08:00
parent 62a1571dea
commit e09df3e7c3
9 changed files with 294 additions and 4 deletions

25
Cargo.lock generated
View File

@@ -2726,7 +2726,7 @@ dependencies = [
[[package]] [[package]]
name = "nostrdb" name = "nostrdb"
version = "0.5.1" version = "0.5.1"
source = "git+https://github.com/damus-io/nostrdb-rs?rev=ad3b345416d17ec75362fbfe82309c8196f5ad4b#ad3b345416d17ec75362fbfe82309c8196f5ad4b" source = "git+https://github.com/damus-io/nostrdb-rs?rev=e6b7ba82a2278ec92c6ecd507082c03c0a20842c#e6b7ba82a2278ec92c6ecd507082c03c0a20842c"
dependencies = [ dependencies = [
"bindgen", "bindgen",
"cc", "cc",
@@ -2800,6 +2800,7 @@ name = "notedeck_columns"
version = "0.3.1" version = "0.3.1"
dependencies = [ dependencies = [
"base64 0.22.1", "base64 0.22.1",
"bech32",
"bitflags 2.6.0", "bitflags 2.6.0",
"dirs", "dirs",
"eframe", "eframe",
@@ -2821,6 +2822,7 @@ dependencies = [
"puffin 0.19.1 (git+https://github.com/jb55/puffin?rev=70ff86d5503815219b01a009afd3669b7903a057)", "puffin 0.19.1 (git+https://github.com/jb55/puffin?rev=70ff86d5503815219b01a009afd3669b7903a057)",
"puffin_egui", "puffin_egui",
"rfd", "rfd",
"rmpv",
"security-framework", "security-framework",
"serde", "serde",
"serde_derive", "serde_derive",
@@ -3865,6 +3867,27 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "rmp"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4"
dependencies = [
"byteorder",
"num-traits",
"paste",
]
[[package]]
name = "rmpv"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58450723cd9ee93273ce44a20b6ec4efe17f8ed2e3631474387bfdecf18bb2a9"
dependencies = [
"num-traits",
"rmp",
]
[[package]] [[package]]
name = "roxmltree" name = "roxmltree"
version = "0.19.0" version = "0.19.0"

View File

@@ -12,6 +12,7 @@ members = [
[workspace.dependencies] [workspace.dependencies]
base32 = "0.4.0" base32 = "0.4.0"
base64 = "0.22.1" base64 = "0.22.1"
rmpv = "1.3.0"
bech32 = { version = "0.11", default-features = false } bech32 = { version = "0.11", default-features = false }
bitflags = "2.5.0" bitflags = "2.5.0"
dirs = "5.0.1" dirs = "5.0.1"
@@ -30,7 +31,7 @@ indexmap = "2.6.0"
log = "0.4.17" log = "0.4.17"
nostr = { version = "0.37.0", default-features = false, features = ["std", "nip49"] } nostr = { version = "0.37.0", default-features = false, features = ["std", "nip49"] }
mio = { version = "1.0.3", features = ["os-poll", "net"] } mio = { version = "1.0.3", features = ["os-poll", "net"] }
nostrdb = { git = "https://github.com/damus-io/nostrdb-rs", rev = "ad3b345416d17ec75362fbfe82309c8196f5ad4b" } nostrdb = { git = "https://github.com/damus-io/nostrdb-rs", rev = "e6b7ba82a2278ec92c6ecd507082c03c0a20842c" }
#nostrdb = "0.5.2" #nostrdb = "0.5.2"
notedeck = { path = "crates/notedeck" } notedeck = { path = "crates/notedeck" }
notedeck_chrome = { path = "crates/notedeck_chrome" } notedeck_chrome = { path = "crates/notedeck_chrome" }

View File

@@ -11,6 +11,8 @@ description = "A tweetdeck-style notedeck app"
crate-type = ["lib", "cdylib"] crate-type = ["lib", "cdylib"]
[dependencies] [dependencies]
rmpv = { workspace = true }
bech32 = { workspace = true }
notedeck = { workspace = true } notedeck = { workspace = true }
tokenator = { workspace = true } tokenator = { workspace = true }
bitflags = { workspace = true } bitflags = { workspace = true }

View File

@@ -27,6 +27,7 @@ mod profile;
mod profile_state; mod profile_state;
pub mod relay_pool_manager; pub mod relay_pool_manager;
mod route; mod route;
mod search;
mod subscriptions; mod subscriptions;
mod support; mod support;
mod test_data; mod test_data;

View File

@@ -322,6 +322,7 @@ impl fmt::Display for Route {
TimelineKind::Notifications(_) => write!(f, "Notifications"), TimelineKind::Notifications(_) => write!(f, "Notifications"),
TimelineKind::Universe => write!(f, "Universe"), TimelineKind::Universe => write!(f, "Universe"),
TimelineKind::Generic(_) => write!(f, "Custom"), TimelineKind::Generic(_) => write!(f, "Custom"),
TimelineKind::Search(_) => write!(f, "Search"),
TimelineKind::Hashtag(ht) => write!(f, "Hashtag ({})", ht), TimelineKind::Hashtag(ht) => write!(f, "Hashtag ({})", ht),
TimelineKind::Thread(_id) => write!(f, "Thread"), TimelineKind::Thread(_id) => write!(f, "Thread"),
TimelineKind::Profile(_id) => write!(f, "Profile"), TimelineKind::Profile(_id) => write!(f, "Profile"),

View File

@@ -0,0 +1,233 @@
use enostr::Pubkey;
use nostrdb::{Filter, FilterBuilder};
use rmpv::Value;
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
#[derive(Debug, Eq, PartialEq, Clone, Hash)]
pub struct SearchQuery {
author: Option<Pubkey>,
search: String,
}
impl TokenSerializable for SearchQuery {
fn serialize_tokens(&self, writer: &mut TokenWriter) {
writer.write_token(&self.to_nfilter())
}
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
if let Some(query) = SearchQuery::from_nfilter(parser.pull_token()?) {
Ok(query)
} else {
Err(ParseError::DecodeFailed)
}
}
}
impl SearchQuery {
/// Convert the query to a filter-compatible MessagePack value
fn to_msgpack_value(&self) -> Value {
let mut values: Vec<(Value, Value)> = Vec::with_capacity(2);
let search_str: &str = &self.search;
values.push(("search".into(), search_str.into()));
if let Some(pubkey) = self.author() {
values.push((
"authors".into(),
Value::Array(vec![Value::Binary(pubkey.bytes().to_vec())]),
))
}
Value::Map(values)
}
pub fn to_nfilter(&self) -> String {
let hrp = bech32::Hrp::parse_unchecked("nfilter");
let msgpack_value = self.to_msgpack_value();
let mut buf = vec![];
rmpv::encode::write_value(&mut buf, &msgpack_value)
.expect("expected nfilter to encode ok. too big?");
bech32::encode::<bech32::Bech32>(hrp, &buf).expect("expected bech32 nfilter to encode ok")
}
fn decode_value(value: &Value) -> Option<Self> {
let mut search: Option<String> = None;
let mut author: Option<Pubkey> = None;
let values = if let Value::Map(values) = value {
values
} else {
return None;
};
for (key, value) in values {
let key_str: &str = if let Value::String(s) = key {
s.as_str()?
} else {
continue;
};
if key_str == "search" {
if let Value::String(search_str) = value {
search = search_str.clone().into_str();
} else {
continue;
}
} else if key_str == "authors" {
let authors = if let Value::Array(authors) = value {
authors
} else {
continue;
};
let author_value = if let Some(author_value) = authors.first() {
author_value
} else {
continue;
};
let author_bytes: &[u8] = if let Value::Binary(author_bytes) = author_value {
author_bytes
} else {
continue;
};
let pubkey = Pubkey::new(author_bytes.try_into().ok()?);
author = Some(pubkey);
}
}
let search = search?;
Some(Self { search, author })
}
pub fn filter(&self) -> FilterBuilder {
Filter::new().search(&self.search).kinds([1])
}
pub fn from_nfilter(nfilter: &str) -> Option<Self> {
let (hrp, msgpack_data) = bech32::decode(nfilter).ok()?;
if hrp.as_str() != "nfilter" {
return None;
}
let value = rmpv::decode::read_value(&mut &msgpack_data[..]).ok()?;
Self::decode_value(&value)
}
pub fn author(&self) -> Option<&Pubkey> {
self.author.as_ref()
}
}
#[cfg(test)]
mod tests {
use super::*;
use enostr::Pubkey;
use rmpv::Value;
use tokenator::{TokenParser, TokenSerializable, TokenWriter};
fn test_pubkey() -> Pubkey {
let bytes: [u8; 32] = [1; 32]; // Example public key
Pubkey::new(bytes)
}
#[test]
fn test_to_msgpack_value() {
let query = SearchQuery {
author: Some(test_pubkey()),
search: "nostrdb".to_string(),
};
let msgpack_value = query.to_msgpack_value();
if let Value::Map(values) = msgpack_value {
assert!(values
.iter()
.any(|(k, v)| *k == Value::String("search".into())
&& *v == Value::String("nostrdb".into())));
assert!(values
.iter()
.any(|(k, _v)| *k == Value::String("authors".into())));
} else {
panic!("Failed to encode SearchQuery to MessagePack");
}
}
#[test]
fn test_to_nfilter() {
let query = SearchQuery {
author: Some(test_pubkey()),
search: "nostrdb".to_string(),
};
let encoded = query.to_nfilter();
assert!(encoded.starts_with("nfilter"), "nfilter encoding failed");
}
#[test]
fn test_from_nfilter() {
let query = SearchQuery {
author: Some(test_pubkey()),
search: "nostrdb".to_string(),
};
let encoded = query.to_nfilter();
let decoded = SearchQuery::from_nfilter(&encoded).expect("Failed to decode nfilter");
assert_eq!(query, decoded);
}
#[test]
fn test_nfilter_roundtrip() {
let queries = vec![
SearchQuery {
author: None,
search: "nostrdb".to_string(),
},
SearchQuery {
author: Some(test_pubkey()),
search: "test".to_string(),
},
];
for query in queries {
let encoded = query.to_nfilter();
let decoded =
SearchQuery::from_nfilter(&encoded).expect("Failed to decode valid nfilter");
assert_eq!(query, decoded, "Roundtrip encoding/decoding failed");
}
}
#[test]
fn test_invalid_nfilter() {
let invalid_nfilter = "nfilter1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq";
assert!(SearchQuery::from_nfilter(invalid_nfilter).is_none());
}
#[test]
fn test_invalid_hrp() {
let invalid_nfilter = "invalid1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq";
assert!(SearchQuery::from_nfilter(invalid_nfilter).is_none());
}
#[test]
fn test_parse_from_tokens() {
let query = SearchQuery {
author: Some(test_pubkey()),
search: "nostrdb".to_string(),
};
let mut writer = TokenWriter::default();
query.serialize_tokens(&mut writer);
let tokens = [writer.str()];
let mut parser = TokenParser::new(&tokens);
let parsed =
SearchQuery::parse_from_tokens(&mut parser).expect("Failed to parse from tokens");
assert_eq!(query, parsed);
}
#[test]
fn test_parse_from_invalid_tokens() {
let mut parser = TokenParser::new(&[]);
assert!(SearchQuery::parse_from_tokens(&mut parser).is_err());
}
}

View File

@@ -1,5 +1,8 @@
use crate::error::Error; use crate::{
use crate::timeline::{Timeline, TimelineTab}; error::Error,
search::SearchQuery,
timeline::{Timeline, TimelineTab},
};
use enostr::{Filter, NoteId, Pubkey}; use enostr::{Filter, NoteId, Pubkey};
use nostrdb::{Ndb, Transaction}; use nostrdb::{Ndb, Transaction};
use notedeck::{ use notedeck::{
@@ -197,6 +200,8 @@ impl Eq for ThreadSelection {}
pub enum TimelineKind { pub enum TimelineKind {
List(ListKind), List(ListKind),
Search(SearchQuery),
/// The last not per pubkey /// The last not per pubkey
Algo(AlgoTimeline), Algo(AlgoTimeline),
@@ -263,6 +268,7 @@ impl Display for TimelineKind {
TimelineKind::Universe => f.write_str("Universe"), TimelineKind::Universe => f.write_str("Universe"),
TimelineKind::Hashtag(_) => f.write_str("Hashtag"), TimelineKind::Hashtag(_) => f.write_str("Hashtag"),
TimelineKind::Thread(_) => f.write_str("Thread"), TimelineKind::Thread(_) => f.write_str("Thread"),
TimelineKind::Search(_) => f.write_str("Search"),
} }
} }
} }
@@ -278,6 +284,7 @@ impl TimelineKind {
TimelineKind::Generic(_) => None, TimelineKind::Generic(_) => None,
TimelineKind::Hashtag(_ht) => None, TimelineKind::Hashtag(_ht) => None,
TimelineKind::Thread(_ht) => None, TimelineKind::Thread(_ht) => None,
TimelineKind::Search(query) => query.author(),
} }
} }
@@ -293,11 +300,15 @@ impl TimelineKind {
TimelineKind::Generic(_) => true, TimelineKind::Generic(_) => true,
TimelineKind::Hashtag(_ht) => true, TimelineKind::Hashtag(_ht) => true,
TimelineKind::Thread(_ht) => true, TimelineKind::Thread(_ht) => true,
TimelineKind::Search(_q) => true,
} }
} }
// NOTE!!: if you just added a TimelineKind enum, make sure to update
// the parser below as well
pub fn serialize_tokens(&self, writer: &mut TokenWriter) { pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
match self { match self {
TimelineKind::Search(query) => query.serialize_tokens(writer),
TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer), TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer),
TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer), TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer),
TimelineKind::Notifications(pk) => { TimelineKind::Notifications(pk) => {
@@ -418,6 +429,8 @@ impl TimelineKind {
// TODO: probably should set default limit here // TODO: probably should set default limit here
pub fn filters(&self, txn: &Transaction, ndb: &Ndb) -> FilterState { pub fn filters(&self, txn: &Transaction, ndb: &Ndb) -> FilterState {
match self { match self {
TimelineKind::Search(s) => FilterState::ready(search_filter(s)),
TimelineKind::Universe => FilterState::ready(universe_filter()), TimelineKind::Universe => FilterState::ready(universe_filter()),
TimelineKind::List(list_k) => match list_k { TimelineKind::List(list_k) => match list_k {
@@ -468,6 +481,15 @@ impl TimelineKind {
pub fn into_timeline(self, txn: &Transaction, ndb: &Ndb) -> Option<Timeline> { pub fn into_timeline(self, txn: &Transaction, ndb: &Ndb) -> Option<Timeline> {
match self { match self {
TimelineKind::Search(s) => {
let filter = FilterState::ready(search_filter(&s));
Some(Timeline::new(
TimelineKind::Search(s),
filter,
TimelineTab::full_tabs(),
))
}
TimelineKind::Universe => Some(Timeline::new( TimelineKind::Universe => Some(Timeline::new(
TimelineKind::Universe, TimelineKind::Universe,
FilterState::ready(universe_filter()), FilterState::ready(universe_filter()),
@@ -562,6 +584,7 @@ impl TimelineKind {
pub fn to_title(&self) -> ColumnTitle<'_> { pub fn to_title(&self) -> ColumnTitle<'_> {
match self { match self {
TimelineKind::Search(_query) => ColumnTitle::simple("Search"),
TimelineKind::List(list_kind) => match list_kind { TimelineKind::List(list_kind) => match list_kind {
ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts"), ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts"),
}, },
@@ -683,6 +706,10 @@ fn last_per_pubkey_filter_state(ndb: &Ndb, pk: &Pubkey) -> FilterState {
} }
} }
fn search_filter(s: &SearchQuery) -> Vec<Filter> {
vec![s.filter().limit(default_limit()).build()]
}
fn universe_filter() -> Vec<Filter> { fn universe_filter() -> Vec<Filter> {
vec![Filter::new().kinds([1]).limit(default_limit()).build()] vec![Filter::new().kinds([1]).limit(default_limit()).build()]
} }

View File

@@ -29,6 +29,7 @@ pub fn render_timeline_route(
match kind { match kind {
TimelineKind::List(_) TimelineKind::List(_)
| TimelineKind::Search(_)
| TimelineKind::Algo(_) | TimelineKind::Algo(_)
| TimelineKind::Notifications(_) | TimelineKind::Notifications(_)
| TimelineKind::Universe | TimelineKind::Universe

View File

@@ -436,6 +436,7 @@ impl<'a> NavTitle<'a> {
TimelineKind::Universe TimelineKind::Universe
| TimelineKind::Algo(_) | TimelineKind::Algo(_)
| TimelineKind::Search(_)
| TimelineKind::Notifications(_) | TimelineKind::Notifications(_)
| TimelineKind::Generic(_) | TimelineKind::Generic(_)
| TimelineKind::List(_) => { | TimelineKind::List(_) => {