remove duplicate filter types

only use nostrdb::Filter

Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2024-08-04 10:40:59 -07:00
parent 8c458f8f78
commit 9328ef2dff
18 changed files with 156 additions and 225 deletions

13
Cargo.lock generated
View File

@@ -1114,8 +1114,8 @@ dependencies = [
"env_logger 0.11.3", "env_logger 0.11.3",
"ewebsock", "ewebsock",
"hex", "hex",
"log",
"nostr", "nostr",
"nostrdb",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@@ -2296,7 +2296,6 @@ dependencies = [
[[package]] [[package]]
name = "nostrdb" name = "nostrdb"
version = "0.3.4" version = "0.3.4"
source = "git+https://github.com/damus-io/nostrdb-rs?rev=04e5917b44b0112ecfd0eb93e8a1e2c81fce1d75#04e5917b44b0112ecfd0eb93e8a1e2c81fce1d75"
dependencies = [ dependencies = [
"bindgen", "bindgen",
"cc", "cc",
@@ -2343,6 +2342,7 @@ dependencies = [
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
"tracing-wasm", "tracing-wasm",
"uuid",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"winit", "winit",
] ]
@@ -4237,6 +4237,15 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314"
dependencies = [
"getrandom",
]
[[package]] [[package]]
name = "valuable" name = "valuable"
version = "0.1.0" version = "0.1.0"

View File

@@ -11,6 +11,11 @@ default-run = "notedeck"
[lib] [lib]
crate-type = ["lib", "cdylib"] crate-type = ["lib", "cdylib"]
[workspace.dependencies]
#nostrdb = { git = "https://github.com/damus-io/nostrdb-rs", rev = "04e5917b44b0112ecfd0eb93e8a1e2c81fce1d75" }
nostrdb = { path = "/Users/jb55/dev/github/damus-io/nostrdb-rs" }
#nostrdb = "0.3.4"
[dependencies] [dependencies]
#egui-android = { git = "https://github.com/jb55/egui-android.git" } #egui-android = { git = "https://github.com/jb55/egui-android.git" }
egui = { git = "https://github.com/emilk/egui", rev = "fcb7764e48ce00f8f8e58da10f937410d65b0bfb" } egui = { git = "https://github.com/emilk/egui", rev = "fcb7764e48ce00f8f8e58da10f937410d65b0bfb" }
@@ -33,14 +38,13 @@ serde_json = "1.0.89"
env_logger = "0.10.0" env_logger = "0.10.0"
puffin_egui = { version = "0.27.0", optional = true } puffin_egui = { version = "0.27.0", optional = true }
puffin = { version = "0.19.0", optional = true } puffin = { version = "0.19.0", optional = true }
nostrdb = { git = "https://github.com/damus-io/nostrdb-rs", rev = "04e5917b44b0112ecfd0eb93e8a1e2c81fce1d75" }
#nostrdb = { path = "/Users/jb55/dev/github/damus-io/nostrdb-rs" }
#nostrdb = "0.3.4"
hex = "0.4.3" hex = "0.4.3"
base32 = "0.4.0" base32 = "0.4.0"
strum = "0.26" strum = "0.26"
strum_macros = "0.26" strum_macros = "0.26"
bitflags = "2.5.0" bitflags = "2.5.0"
uuid = { version = "1.10.0", features = ["v4"] }
nostrdb = { workspace = true }
[target.'cfg(target_os = "macos")'.dependencies] [target.'cfg(target_os = "macos")'.dependencies]
security-framework = "2.11.0" security-framework = "2.11.0"

View File

@@ -10,8 +10,8 @@ ewebsock = { version = "0.2.0", features = ["tls"] }
serde_derive = "1" serde_derive = "1"
serde = { version = "1", features = ["derive"] } # You only need this if you want app persistence serde = { version = "1", features = ["derive"] } # You only need this if you want app persistence
serde_json = "1.0.89" serde_json = "1.0.89"
tracing = "0.1.37" nostrdb = { workspace = true }
nostr = { version = "0.30.0" } nostr = { version = "0.30.0" }
hex = "0.4.3" hex = "0.4.3"
log = "0.4.20" tracing = "0.1.40"
env_logger = "0.11.1" env_logger = "0.11.1"

View File

@@ -1,8 +1,9 @@
use crate::{Filter, Note}; use crate::{Error, Note};
use nostrdb::Filter;
use serde_json::json; use serde_json::json;
/// Messages sent by clients, received by relays /// Messages sent by clients, received by relays
#[derive(Debug, Eq, PartialEq)] #[derive(Debug)]
pub enum ClientMessage { pub enum ClientMessage {
Event { Event {
note: Note, note: Note,
@@ -34,23 +35,25 @@ impl ClientMessage {
ClientMessage::Close { sub_id } ClientMessage::Close { sub_id }
} }
pub fn to_json(&self) -> String { pub fn to_json(&self) -> Result<String, Error> {
match self { Ok(match self {
Self::Event { note } => json!(["EVENT", note]).to_string(), Self::Event { note } => json!(["EVENT", note]).to_string(),
Self::Raw(raw) => raw.clone(), Self::Raw(raw) => raw.clone(),
Self::Req { sub_id, filters } => { Self::Req { sub_id, filters } => {
let mut json = json!(["REQ", sub_id]); if filters.is_empty() {
let mut filters = json!(filters); format!("[\"REQ\",\"{}\",{{ }}]", sub_id)
} else if filters.len() == 1 {
if let Some(json) = json.as_array_mut() { let filters_json_str = filters[0].json()?;
if let Some(filters) = filters.as_array_mut() { format!("[\"REQ\",\"{}\",{}]", sub_id, filters_json_str)
json.append(filters); } else {
} let filters_json_str: Result<Vec<String>, Error> = filters
.into_iter()
.map(|f| f.json().map_err(Into::<Error>::into))
.collect();
format!("[\"REQ\",\"{}\",{}]", sub_id, filters_json_str?.join(","))
} }
json.to_string()
} }
Self::Close { sub_id } => json!(["CLOSE", sub_id]).to_string(), Self::Close { sub_id } => json!(["CLOSE", sub_id]).to_string(),
} })
} }
} }

View File

@@ -14,6 +14,7 @@ pub enum Error {
InvalidPublicKey, InvalidPublicKey,
// Secp(secp256k1::Error), // Secp(secp256k1::Error),
Json(serde_json::Error), Json(serde_json::Error),
Nostrdb(nostrdb::Error),
Generic(String), Generic(String),
} }
@@ -29,6 +30,7 @@ impl std::cmp::PartialEq for Error {
// This is slightly wrong but whatevs // This is slightly wrong but whatevs
(Error::Json(..), Error::Json(..)) => true, (Error::Json(..), Error::Json(..)) => true,
(Error::Generic(left), Error::Generic(right)) => left == right, (Error::Generic(left), Error::Generic(right)) => left == right,
(Error::Nostrdb(left), Error::Nostrdb(right)) => left == right,
//(Error::Secp(left), Error::Secp(right)) => left == right, //(Error::Secp(left), Error::Secp(right)) => left == right,
_ => false, _ => false,
} }
@@ -47,6 +49,7 @@ impl fmt::Display for Error {
Self::InvalidPublicKey => write!(f, "invalid public key"), Self::InvalidPublicKey => write!(f, "invalid public key"),
//Self::Secp(e) => write!(f, "{e}"), //Self::Secp(e) => write!(f, "{e}"),
Self::Json(e) => write!(f, "{e}"), Self::Json(e) => write!(f, "{e}"),
Self::Nostrdb(e) => write!(f, "{e}"),
Self::Generic(e) => write!(f, "{e}"), Self::Generic(e) => write!(f, "{e}"),
} }
} }
@@ -85,3 +88,9 @@ impl From<serde_json::Error> for Error {
Error::Json(e) Error::Json(e)
} }
} }
impl From<nostrdb::Error> for Error {
fn from(e: nostrdb::Error) -> Self {
Error::Nostrdb(e)
}
}

View File

@@ -1,91 +1 @@
use crate::{NoteId, Pubkey}; pub type Filter = nostrdb::Filter;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
pub struct Filter {
#[serde(skip_serializing_if = "Option::is_none")]
pub ids: Option<Vec<NoteId>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub authors: Option<Vec<Pubkey>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub kinds: Option<Vec<u64>>,
#[serde(rename = "#e")]
#[serde(skip_serializing_if = "Option::is_none")]
pub events: Option<Vec<NoteId>>,
#[serde(rename = "#p")]
#[serde(skip_serializing_if = "Option::is_none")]
pub pubkeys: Option<Vec<Pubkey>>,
#[serde(rename = "#t")]
#[serde(skip_serializing_if = "Option::is_none")]
pub hashtags: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub since: Option<u64>, // unix timestamp seconds
#[serde(skip_serializing_if = "Option::is_none")]
pub until: Option<u64>, // unix timestamp seconds
#[serde(skip_serializing_if = "Option::is_none")]
pub limit: Option<u16>,
}
impl Filter {
pub fn new() -> Filter {
Filter {
ids: None,
authors: None,
kinds: None,
events: None,
pubkeys: None,
hashtags: None,
since: None,
until: None,
limit: None,
}
}
pub fn default_limit() -> u16 {
250
}
pub fn default_remote_limit() -> u16 {
150
}
pub fn ids(mut self, ids: Vec<NoteId>) -> Self {
self.ids = Some(ids);
self
}
pub fn authors(mut self, authors: Vec<Pubkey>) -> Self {
self.authors = Some(authors);
self
}
pub fn kinds(mut self, kinds: Vec<u64>) -> Self {
self.kinds = Some(kinds);
self
}
pub fn events(mut self, events: Vec<NoteId>) -> Self {
self.events = Some(events);
self
}
pub fn pubkeys(mut self, pubkeys: Vec<Pubkey>) -> Self {
self.pubkeys = Some(pubkeys);
self
}
pub fn since(mut self, since: u64) -> Self {
self.since = Some(since);
self
}
pub fn until(mut self, until: u64) -> Self {
self.until = Some(until);
self
}
pub fn limit(mut self, limit: u16) -> Self {
self.limit = Some(limit);
self
}
}

View File

@@ -1,9 +1,9 @@
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::Error; use crate::Error;
use log::debug;
use nostr::bech32::Hrp; use nostr::bech32::Hrp;
use std::fmt; use std::fmt;
use tracing::debug;
#[derive(Debug, Eq, PartialEq, Clone, Hash)] #[derive(Debug, Eq, PartialEq, Clone, Hash)]
pub struct Pubkey([u8; 32]); pub struct Pubkey([u8; 32]);

View File

@@ -161,7 +161,7 @@ mod tests {
#[test] #[test]
fn test_handle_valid_event() -> Result<()> { fn test_handle_valid_event() -> Result<()> {
use log::debug; use tracing::debug;
env_logger::init(); env_logger::init();
let valid_event_msg = r#"["EVENT", "random_string", {"id":"70b10f70c1318967eddf12527799411b1a9780ad9c43858f5e5fcd45486a13a5","pubkey":"379e863e8357163b5bce5d2688dc4f1dcc2d505222fb8d74db600f30535dfdfe","created_at":1612809991,"kind":1,"tags":[],"content":"test","sig":"273a9cd5d11455590f4359500bccb7a89428262b96b3ea87a756b770964472f8c3e87f5d5e64d8d2e859a71462a3f477b554565c4f2f326cb01dd7620db71502"}]"#; let valid_event_msg = r#"["EVENT", "random_string", {"id":"70b10f70c1318967eddf12527799411b1a9780ad9c43858f5e5fcd45486a13a5","pubkey":"379e863e8357163b5bce5d2688dc4f1dcc2d505222fb8d74db600f30535dfdfe","created_at":1612809991,"kind":1,"tags":[],"content":"test","sig":"273a9cd5d11455590f4359500bccb7a89428262b96b3ea87a756b770964472f8c3e87f5d5e64d8d2e859a71462a3f477b554565c4f2f326cb01dd7620db71502"}]"#;

View File

@@ -1,9 +1,10 @@
use ewebsock::{WsMessage, WsReceiver, WsSender}; use ewebsock::{WsMessage, WsReceiver, WsSender};
use crate::{ClientMessage, Filter, Result}; use crate::{ClientMessage, Result};
use log::info; use nostrdb::Filter;
use std::fmt; use std::fmt;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use tracing::{debug, error, info};
pub mod message; pub mod message;
pub mod pool; pub mod pool;
@@ -60,7 +61,18 @@ impl Relay {
} }
pub fn send(&mut self, msg: &ClientMessage) { pub fn send(&mut self, msg: &ClientMessage) {
let txt = WsMessage::Text(msg.to_json()); let json = match msg.to_json() {
Ok(json) => {
debug!("sending {} to {}", json, self.url);
json
}
Err(e) => {
error!("error serializing json for filter: {e}");
return;
}
};
let txt = WsMessage::Text(json);
self.sender.send(txt); self.sender.send(txt);
} }

View File

@@ -1,5 +1,6 @@
use crate::relay::{Relay, RelayStatus}; use crate::relay::{Relay, RelayStatus};
use crate::{ClientMessage, Result}; use crate::{ClientMessage, Result};
use nostrdb::Filter;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
@@ -71,6 +72,12 @@ impl RelayPool {
} }
} }
pub fn subscribe(&mut self, subid: String, filter: Vec<Filter>) {
for relay in &mut self.relays {
relay.relay.subscribe(subid.clone(), filter.clone());
}
}
/// Keep relay connectiongs alive by pinging relays that haven't been /// Keep relay connectiongs alive by pinging relays that haven't been
/// pinged in awhile. Adjust ping rate with [`ping_rate`]. /// pinged in awhile. Adjust ping rate with [`ping_rate`].
pub fn keepalive_ping(&mut self, wakeup: impl Fn() + Send + Sync + Clone + 'static) { pub fn keepalive_ping(&mut self, wakeup: impl Fn() + Send + Sync + Clone + 'static) {

View File

@@ -1,4 +1,4 @@
[{"limit": 1000, {"limit": 1000,
"kinds": [ "kinds": [
1 1
], ],
@@ -39,4 +39,4 @@
"womeninnostr", "womeninnostr",
"osrs" "osrs"
] ]
}] }

View File

@@ -1 +1 @@
[{"limit": 500, "kinds":[1], "#p": ["32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]}] {"limit": 500, "kinds":[1], "#p": ["32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]}

File diff suppressed because one or more lines are too long

View File

@@ -6,7 +6,8 @@ use crate::{
}; };
use enostr::NoteId; use enostr::NoteId;
use nostrdb::Transaction; use nostrdb::Transaction;
use tracing::{info, warn}; use tracing::{error, info};
use uuid::Uuid;
#[derive(Debug, Eq, PartialEq, Copy, Clone)] #[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum BarAction { pub enum BarAction {
@@ -45,11 +46,11 @@ fn open_thread(
let root_id = crate::note::root_note_id_from_selected_id(app, txn, selected_note); let root_id = crate::note::root_note_id_from_selected_id(app, txn, selected_note);
let thread_res = app.threads.thread_mut(&app.ndb, txn, root_id); let thread_res = app.threads.thread_mut(&app.ndb, txn, root_id);
// The thread is stale, let's update it
let (thread, result) = match thread_res { let (thread, result) = match thread_res {
ThreadResult::Stale(thread) => { ThreadResult::Stale(thread) => {
// The thread is stale, let's update it
let notes = Thread::new_notes(&thread.view.notes, root_id, txn, &app.ndb); let notes = Thread::new_notes(&thread.view.notes, root_id, txn, &app.ndb);
let br = if notes.is_empty() { let bar_result = if notes.is_empty() {
None None
} else { } else {
Some(BarResult::new_thread_notes( Some(BarResult::new_thread_notes(
@@ -63,8 +64,9 @@ fn open_thread(
// are already borrowing it mutably. Let's pass it as a // are already borrowing it mutably. Let's pass it as a
// result instead // result instead
// //
// thread.view.insert(&notes); // thread.view.insert(&notes); <-- no
(thread, br) //
(thread, bar_result)
} }
ThreadResult::Fresh(thread) => (thread, None), ThreadResult::Fresh(thread) => (thread, None),
@@ -73,18 +75,27 @@ fn open_thread(
// only start a subscription on nav and if we don't have // only start a subscription on nav and if we don't have
// an active subscription for this thread. // an active subscription for this thread.
if thread.subscription().is_none() { if thread.subscription().is_none() {
*thread.subscription_mut() = app.ndb.subscribe(Thread::filters(root_id)).ok(); let filters = Thread::filters(root_id);
*thread.subscription_mut() = app.ndb.subscribe(filters.clone()).ok();
if thread.remote_subscription().is_some() {
error!("Found active remote subscription when it was not expected");
} else {
let subid = Uuid::new_v4().to_string();
*thread.remote_subscription_mut() = Some(subid.clone());
app.pool.subscribe(subid, filters);
}
match thread.subscription() { match thread.subscription() {
Some(_sub) => { Some(_sub) => {
thread.subscribers += 1; thread.subscribers += 1;
info!( info!(
"Locally subscribing to thread. {} total active subscriptions, {} on this thread", "Locally/remotely subscribing to thread. {} total active subscriptions, {} on this thread",
app.ndb.subscription_count(), app.ndb.subscription_count(),
thread.subscribers, thread.subscribers,
); );
} }
None => warn!( None => error!(
"Error subscribing locally to selected note '{}''s thread", "Error subscribing locally to selected note '{}''s thread",
hex::encode(selected_note) hex::encode(selected_note)
), ),

View File

@@ -17,15 +17,14 @@ use crate::ui::{self, AccountSelectionWidget, DesktopGlobalPopup};
use crate::ui::{DesktopSidePanel, RelayView, View}; use crate::ui::{DesktopSidePanel, RelayView, View};
use crate::Result; use crate::Result;
use egui_nav::{Nav, NavAction}; use egui_nav::{Nav, NavAction};
use enostr::{Keypair, RelayPool, SecretKey}; use enostr::{ClientMessage, Keypair, RelayEvent, RelayMessage, RelayPool, SecretKey};
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
use egui::{Context, Frame, Style}; use egui::{Context, Frame, Style};
use egui_extras::{Size, StripBuilder}; use egui_extras::{Size, StripBuilder};
use enostr::{ClientMessage, Filter, Pubkey, RelayEvent, RelayMessage}; use nostrdb::{BlockType, Config, Filter, Mention, Ndb, Note, NoteKey, Transaction};
use nostrdb::{BlockType, Config, Mention, Ndb, Note, NoteKey, Transaction};
use std::collections::HashSet; use std::collections::HashSet;
use std::hash::Hash; use std::hash::Hash;
@@ -102,23 +101,27 @@ fn send_initial_filters(damus: &mut Damus, relay_url: &str) {
let relay = &mut relay.relay; let relay = &mut relay.relay;
if relay.url == relay_url { if relay.url == relay_url {
for timeline in &damus.timelines { for timeline in &damus.timelines {
let mut filter = timeline.filter.clone(); let filter = timeline.filter.clone();
for f in &mut filter { let new_filters = filter.into_iter().map(|f| {
// limit the size of remote filters // limit the size of remote filters
let default_limit = enostr::Filter::default_remote_limit(); let default_limit = crate::filter::default_remote_limit();
let lim = f.limit.unwrap_or(default_limit); let mut lim = f.limit().unwrap_or(default_limit);
let mut filter = f;
if lim > default_limit { if lim > default_limit {
f.limit = Some(default_limit); lim = default_limit;
filter = filter.limit_mut(lim);
} }
let notes = timeline.notes(ViewFilter::NotesAndReplies); let notes = timeline.notes(ViewFilter::NotesAndReplies);
if crate::filter::should_since_optimize(f.limit, notes.len()) { if crate::filter::should_since_optimize(lim, notes.len()) {
crate::filter::since_optimize_filter(f, notes); filter = crate::filter::since_optimize_filter(filter, notes);
} else { } else {
warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", f); warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", filter);
} }
}
relay.subscribe(format!("initial{}", c), filter); filter
}).collect();
relay.subscribe(format!("initial{}", c), new_filters);
c += 1; c += 1;
} }
return; return;
@@ -347,11 +350,7 @@ fn setup_profiling() {
fn setup_initial_nostrdb_subs(damus: &mut Damus) -> Result<()> { fn setup_initial_nostrdb_subs(damus: &mut Damus) -> Result<()> {
let timelines = damus.timelines.len(); let timelines = damus.timelines.len();
for i in 0..timelines { for i in 0..timelines {
let filters: Vec<nostrdb::Filter> = damus.timelines[i] let filters = damus.timelines[i].filter.clone();
.filter
.iter()
.map(crate::filter::convert_enostr_filter)
.collect();
damus.timelines[i].subscription = Some(damus.ndb.subscribe(filters.clone())?); damus.timelines[i].subscription = Some(damus.ndb.subscribe(filters.clone())?);
let txn = Transaction::new(&damus.ndb)?; let txn = Transaction::new(&damus.ndb)?;
debug!( debug!(
@@ -363,8 +362,8 @@ fn setup_initial_nostrdb_subs(damus: &mut Damus) -> Result<()> {
&txn, &txn,
filters, filters,
damus.timelines[i].filter[0] damus.timelines[i].filter[0]
.limit .limit()
.unwrap_or(enostr::Filter::default_limit()) as i32, .unwrap_or(crate::filter::default_limit()) as i32,
)?; )?;
let filters = { let filters = {
@@ -465,22 +464,16 @@ fn get_unknown_ids_filter(ids: &[UnknownId<'_>]) -> Option<Vec<Filter>> {
let mut filters: Vec<Filter> = vec![]; let mut filters: Vec<Filter> = vec![];
let pks: Vec<Pubkey> = ids let pks: Vec<&[u8; 32]> = ids.iter().flat_map(|id| id.is_pubkey()).collect();
.iter()
.flat_map(|id| id.is_pubkey().map(Pubkey::new))
.collect();
if !pks.is_empty() { if !pks.is_empty() {
let pk_filter = Filter::new().authors(pks).kinds(vec![0]); let pk_filter = Filter::new().authors(pks).kinds([0]).build();
filters.push(pk_filter); filters.push(pk_filter);
} }
let note_ids: Vec<enostr::NoteId> = ids let note_ids: Vec<&[u8; 32]> = ids.iter().flat_map(|id| id.is_id()).collect();
.iter()
.flat_map(|id| id.is_id().map(|id| enostr::NoteId::new(*id)))
.collect();
if !note_ids.is_empty() { if !note_ids.is_empty() {
filters.push(Filter::new().ids(note_ids)); filters.push(Filter::new().ids(note_ids).build());
} }
Some(filters) Some(filters)
@@ -589,8 +582,8 @@ fn parse_args(args: &[String]) -> Args {
continue; continue;
}; };
if let Ok(filter) = serde_json::from_str(filter) { if let Ok(filter) = Filter::from_json(filter) {
res.timelines.push(Timeline::new(filter)); res.timelines.push(Timeline::new(vec![filter]));
} else { } else {
error!("failed to parse filter '{}'", filter); error!("failed to parse filter '{}'", filter);
} }
@@ -628,8 +621,11 @@ fn parse_args(args: &[String]) -> Args {
continue; continue;
}; };
if let Ok(filter) = serde_json::from_slice(&data) { if let Some(filter) = std::str::from_utf8(&data)
res.timelines.push(Timeline::new(filter)); .ok()
.and_then(|s| Filter::from_json(s).ok())
{
res.timelines.push(Timeline::new(vec![filter]));
} else { } else {
error!("failed to parse filter in '{}'", filter_file); error!("failed to parse filter in '{}'", filter_file);
} }
@@ -639,8 +635,8 @@ fn parse_args(args: &[String]) -> Args {
} }
if res.timelines.is_empty() { if res.timelines.is_empty() {
let filter = serde_json::from_str(include_str!("../queries/timeline.json")).unwrap(); let filter = Filter::from_json(include_str!("../queries/timeline.json")).unwrap();
res.timelines.push(Timeline::new(filter)); res.timelines.push(Timeline::new(vec![filter]));
} }
res res
@@ -749,8 +745,8 @@ impl Damus {
pub fn mock<P: AsRef<Path>>(data_path: P, is_mobile: bool) -> Self { pub fn mock<P: AsRef<Path>>(data_path: P, is_mobile: bool) -> Self {
let mut timelines: Vec<Timeline> = vec![]; let mut timelines: Vec<Timeline> = vec![];
let filter = serde_json::from_str(include_str!("../queries/global.json")).unwrap(); let filter = Filter::from_json(include_str!("../queries/global.json")).unwrap();
timelines.push(Timeline::new(filter)); timelines.push(Timeline::new(vec![filter]));
let imgcache_dir = data_path.as_ref().join(ImageCache::rel_datadir()); let imgcache_dir = data_path.as_ref().join(ImageCache::rel_datadir());
let _ = std::fs::create_dir_all(imgcache_dir.clone()); let _ = std::fs::create_dir_all(imgcache_dir.clone());

View File

@@ -1,72 +1,32 @@
use crate::note::NoteRef; use crate::note::NoteRef;
use nostrdb::Filter;
pub fn should_since_optimize(limit: Option<u16>, num_notes: usize) -> bool { pub fn should_since_optimize(limit: u64, num_notes: usize) -> bool {
let limit = limit.unwrap_or(enostr::Filter::default_limit()) as usize;
// rough heuristic for bailing since optimization if we don't have enough notes // rough heuristic for bailing since optimization if we don't have enough notes
limit <= num_notes limit as usize <= num_notes
} }
pub fn since_optimize_filter_with(filter: &mut enostr::Filter, notes: &[NoteRef], since_gap: u64) { pub fn since_optimize_filter_with(filter: Filter, notes: &[NoteRef], since_gap: u64) -> Filter {
// Get the latest entry in the events // Get the latest entry in the events
if notes.is_empty() { if notes.is_empty() {
return; return filter;
} }
// get the latest note // get the latest note
let latest = notes[0]; let latest = notes[0];
let since = latest.created_at - since_gap; let since = latest.created_at - since_gap;
// update the filters filter.since_mut(since)
filter.since = Some(since);
} }
pub fn since_optimize_filter(filter: &mut enostr::Filter, notes: &[NoteRef]) { pub fn since_optimize_filter(filter: Filter, notes: &[NoteRef]) -> Filter {
since_optimize_filter_with(filter, notes, 60); since_optimize_filter_with(filter, notes, 60)
} }
pub fn convert_enostr_filter(filter: &enostr::Filter) -> nostrdb::Filter { pub fn default_limit() -> u64 {
let mut nfilter = nostrdb::Filter::new(); 250
}
if let Some(ref ids) = filter.ids {
nfilter = nfilter.ids(ids.iter().map(|a| *a.bytes()).collect()); pub fn default_remote_limit() -> u64 {
} 150
if let Some(ref authors) = filter.authors {
let authors: Vec<[u8; 32]> = authors.iter().map(|a| *a.bytes()).collect();
nfilter = nfilter.authors(authors);
}
if let Some(ref kinds) = filter.kinds {
nfilter = nfilter.kinds(kinds.clone());
}
// #e
if let Some(ref events) = filter.events {
nfilter = nfilter.events(events.iter().map(|a| *a.bytes()).collect());
}
// #p
if let Some(ref pubkeys) = filter.pubkeys {
nfilter = nfilter.pubkeys(pubkeys.iter().map(|a| *a.bytes()).collect());
}
// #t
if let Some(ref hashtags) = filter.hashtags {
nfilter = nfilter.tags(hashtags.clone(), 't');
}
if let Some(since) = filter.since {
nfilter = nfilter.since(since);
}
if let Some(until) = filter.until {
nfilter = nfilter.until(until);
}
if let Some(limit) = filter.limit {
nfilter = nfilter.limit(limit.into());
}
nfilter.build()
} }

View File

@@ -10,6 +10,7 @@ use tracing::{debug, warn};
pub struct Thread { pub struct Thread {
pub view: TimelineTab, pub view: TimelineTab,
sub: Option<Subscription>, sub: Option<Subscription>,
remote_sub: Option<String>,
pub subscribers: i32, pub subscribers: i32,
} }
@@ -28,11 +29,13 @@ impl Thread {
let mut view = TimelineTab::new_with_capacity(ViewFilter::NotesAndReplies, cap); let mut view = TimelineTab::new_with_capacity(ViewFilter::NotesAndReplies, cap);
view.notes = notes; view.notes = notes;
let sub: Option<Subscription> = None; let sub: Option<Subscription> = None;
let remote_sub: Option<String> = None;
let subscribers: i32 = 0; let subscribers: i32 = 0;
Thread { Thread {
view, view,
sub, sub,
remote_sub,
subscribers, subscribers,
} }
} }
@@ -83,14 +86,22 @@ impl Thread {
self.sub.as_ref() self.sub.as_ref()
} }
pub fn remote_subscription(&self) -> Option<&String> {
self.remote_sub.as_ref()
}
pub fn remote_subscription_mut(&mut self) -> &mut Option<String> {
&mut self.remote_sub
}
pub fn subscription_mut(&mut self) -> &mut Option<Subscription> { pub fn subscription_mut(&mut self) -> &mut Option<Subscription> {
&mut self.sub &mut self.sub
} }
fn filters_raw(root: &[u8; 32]) -> Vec<FilterBuilder> { fn filters_raw(root: &[u8; 32]) -> Vec<FilterBuilder> {
vec![ vec![
nostrdb::Filter::new().kinds(vec![1]).event(root), nostrdb::Filter::new().kinds([1]).event(root),
nostrdb::Filter::new().ids(vec![*root]).limit(1), nostrdb::Filter::new().ids([root]).limit(1),
] ]
} }

View File

@@ -7,8 +7,7 @@ use crate::{Damus, Result};
use crate::route::Route; use crate::route::Route;
use egui_virtual_list::VirtualList; use egui_virtual_list::VirtualList;
use enostr::Filter; use nostrdb::{Filter, Note, Subscription, Transaction};
use nostrdb::{Note, Subscription, Transaction};
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::HashSet; use std::collections::HashSet;
use std::rc::Rc; use std::rc::Rc;