local nostrdb subscriptions working
Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
52
src/app.rs
52
src/app.rs
@@ -10,6 +10,7 @@ use egui::widgets::Spinner;
|
||||
use egui::{Context, Frame, ImageSource, Margin, TextureHandle, TextureId};
|
||||
use egui_extras::Size;
|
||||
use enostr::{ClientMessage, EventId, Filter, Profile, Pubkey, RelayEvent, RelayMessage};
|
||||
use nostrdb::{Config, Ndb, Subscription};
|
||||
use poll_promise::Promise;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::hash::{Hash, Hasher};
|
||||
@@ -48,25 +49,31 @@ pub struct Damus {
|
||||
compose: String,
|
||||
|
||||
pool: RelayPool,
|
||||
home_sub: Option<Subscription>,
|
||||
|
||||
all_events: HashMap<EventId, Event>,
|
||||
events: Vec<EventId>,
|
||||
|
||||
img_cache: ImageCache,
|
||||
ndb: Ndb,
|
||||
|
||||
frame_history: crate::frame_history::FrameHistory,
|
||||
}
|
||||
|
||||
impl Default for Damus {
|
||||
fn default() -> Self {
|
||||
let mut config = Config::new();
|
||||
config.set_ingester_threads(2);
|
||||
Self {
|
||||
state: DamusState::Initializing,
|
||||
contacts: Contacts::new(),
|
||||
all_events: HashMap::new(),
|
||||
pool: RelayPool::new(),
|
||||
home_sub: None,
|
||||
events: vec![],
|
||||
img_cache: HashMap::new(),
|
||||
n_panels: 1,
|
||||
ndb: Ndb::new(".", &config).expect("ndb"),
|
||||
compose: "".to_string(),
|
||||
frame_history: FrameHistory::default(),
|
||||
}
|
||||
@@ -92,14 +99,19 @@ fn relay_setup(pool: &mut RelayPool, ctx: &egui::Context) {
|
||||
}
|
||||
}
|
||||
|
||||
fn send_initial_filters(pool: &mut RelayPool, relay_url: &str) {
|
||||
let filter = Filter::new().limit(100).kinds(vec![1, 42]).pubkeys(
|
||||
fn get_home_filter() -> Filter {
|
||||
Filter::new().limit(100).kinds(vec![1, 42]).pubkeys(
|
||||
[
|
||||
Pubkey::from_hex("32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245")
|
||||
.unwrap(),
|
||||
]
|
||||
.into(),
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
fn send_initial_filters(pool: &mut RelayPool, relay_url: &str) {
|
||||
let filter = get_home_filter();
|
||||
info!("Sending initial filters to {}", relay_url);
|
||||
|
||||
let subid = "initial";
|
||||
for relay in &mut pool.relays {
|
||||
@@ -129,15 +141,23 @@ fn try_process_event(damus: &mut Damus, ctx: &egui::Context) {
|
||||
while let Some(ev) = damus.pool.try_recv() {
|
||||
let relay = ev.relay.to_owned();
|
||||
|
||||
match ev.event {
|
||||
match (&ev.event).into() {
|
||||
RelayEvent::Opened => send_initial_filters(&mut damus.pool, &relay),
|
||||
// TODO: handle reconnects
|
||||
RelayEvent::Closed => warn!("{} connection closed", &relay),
|
||||
RelayEvent::Error(e) => error!("{}", e),
|
||||
RelayEvent::Other(msg) => debug!("other event {:?}", &msg),
|
||||
RelayEvent::Message(msg) => process_message(damus, &relay, msg),
|
||||
RelayEvent::Message(msg) => process_message(damus, &relay, &msg),
|
||||
}
|
||||
}
|
||||
|
||||
// do we have any new processed events?
|
||||
if let Some(ref sub) = damus.home_sub {
|
||||
let new_notes = damus.ndb.poll_for_notes(sub, 50);
|
||||
if new_notes.len() > 0 {
|
||||
info!("{} new notes! {:?}", new_notes.len(), new_notes);
|
||||
}
|
||||
}
|
||||
//info!("recv {:?}", ev)
|
||||
}
|
||||
|
||||
#[cfg(feature = "profiling")]
|
||||
@@ -145,6 +165,12 @@ fn setup_profiling() {
|
||||
puffin::set_scopes_on(true); // tell puffin to collect data
|
||||
}
|
||||
|
||||
fn setup_initial_nostrdb_subs(damus: &mut Damus) -> Result<()> {
|
||||
let filter: nostrdb::Filter = crate::filter::convert_enostr_filter(&get_home_filter());
|
||||
damus.home_sub = Some(damus.ndb.subscribe(filter)?);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_damus(damus: &mut Damus, ctx: &egui::Context) {
|
||||
if damus.state == DamusState::Initializing {
|
||||
#[cfg(feature = "profiling")]
|
||||
@@ -154,6 +180,7 @@ fn update_damus(damus: &mut Damus, ctx: &egui::Context) {
|
||||
damus.pool = RelayPool::new();
|
||||
relay_setup(&mut damus.pool, ctx);
|
||||
damus.state = DamusState::Initialized;
|
||||
setup_initial_nostrdb_subs(damus).expect("home subscription failed");
|
||||
}
|
||||
|
||||
try_process_event(damus, ctx);
|
||||
@@ -196,15 +223,15 @@ fn process_metadata_event(damus: &mut Damus, ev: &Event) {
|
||||
}
|
||||
}
|
||||
|
||||
fn process_event(damus: &mut Damus, _subid: &str, event: Event) {
|
||||
fn process_event(damus: &mut Damus, _subid: &str, event: &str) {
|
||||
#[cfg(feature = "profiling")]
|
||||
puffin::profile_function!();
|
||||
|
||||
if damus.all_events.get(&event.id).is_some() {
|
||||
return;
|
||||
}
|
||||
//info!("processing event {}", event);
|
||||
damus.ndb.process_event(&event);
|
||||
|
||||
let kind = event.kind;
|
||||
/*
|
||||
let kind = event.kind();
|
||||
if kind == 0 {
|
||||
process_metadata_event(damus, &event);
|
||||
} else if kind == 1 {
|
||||
@@ -212,6 +239,7 @@ fn process_event(damus: &mut Damus, _subid: &str, event: Event) {
|
||||
damus.all_events.insert(cloned_id.clone(), event);
|
||||
damus.events.insert(0, cloned_id);
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
fn get_unknown_author_ids(damus: &Damus) -> Vec<Pubkey> {
|
||||
@@ -247,7 +275,7 @@ fn handle_eose(damus: &mut Damus, subid: &str, relay_url: &str) {
|
||||
}
|
||||
}
|
||||
|
||||
fn process_message(damus: &mut Damus, relay: &str, msg: RelayMessage) {
|
||||
fn process_message(damus: &mut Damus, relay: &str, msg: &RelayMessage) {
|
||||
match msg {
|
||||
RelayMessage::Event(subid, ev) => process_event(damus, &subid, ev),
|
||||
RelayMessage::Notice(msg) => warn!("Notice from {}: {}", relay, msg),
|
||||
|
||||
@@ -3,6 +3,7 @@ use shatter::parser;
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
Nostr(enostr::Error),
|
||||
Ndb(nostrdb::Error),
|
||||
Shatter(parser::Error),
|
||||
Image(image::error::ImageError),
|
||||
Generic(String),
|
||||
@@ -20,6 +21,12 @@ impl From<parser::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nostrdb::Error> for Error {
|
||||
fn from(e: nostrdb::Error) -> Self {
|
||||
Error::Ndb(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<image::error::ImageError> for Error {
|
||||
fn from(err: image::error::ImageError) -> Self {
|
||||
Error::Image(err)
|
||||
|
||||
@@ -1,37 +1,36 @@
|
||||
impl From<enostr::Filter> for nostrdb::Filter {}
|
||||
fn from(filter: enostr::Filter) -> Self {
|
||||
let mut nfilter = nostrdb::Filter::new();
|
||||
pub fn convert_enostr_filter(filter: &enostr::Filter) -> nostrdb::Filter {
|
||||
let mut nfilter = nostrdb::Filter::new();
|
||||
|
||||
if let Some(ids) = filter.ids {
|
||||
nfilter.ids(ids)
|
||||
}
|
||||
|
||||
if let Some(authors) = filter.authors {
|
||||
nfilter.authors(authors)
|
||||
}
|
||||
|
||||
if let Some(kinds) = filter.kinds {
|
||||
nfilter.kinds(kinds)
|
||||
}
|
||||
|
||||
// #e
|
||||
if let Some(events) = filter.events {
|
||||
nfilter.tags(events, 'e')
|
||||
}
|
||||
|
||||
// #p
|
||||
if let Some(pubkeys) = filter.pubkeys {
|
||||
nfilter.pubkeys(pubkeys)
|
||||
}
|
||||
|
||||
if let Some(since) = filter.since {
|
||||
nfilter.since(since)
|
||||
}
|
||||
|
||||
if let Some(limit) = filter.limit {
|
||||
nfilter.limit(limit)
|
||||
}
|
||||
|
||||
nfilter
|
||||
if let Some(ref ids) = filter.ids {
|
||||
nfilter.ids(ids.iter().map(|a| *a.bytes()).collect());
|
||||
}
|
||||
|
||||
if let Some(ref authors) = filter.authors {
|
||||
let authors: Vec<[u8; 32]> = authors.iter().map(|a| a.bytes()).collect();
|
||||
nfilter.authors(authors);
|
||||
}
|
||||
|
||||
if let Some(ref kinds) = filter.kinds {
|
||||
nfilter.kinds(kinds.clone());
|
||||
}
|
||||
|
||||
// #e
|
||||
if let Some(ref events) = filter.events {
|
||||
nfilter.events(events.iter().map(|a| *a.bytes()).collect());
|
||||
}
|
||||
|
||||
// #p
|
||||
if let Some(ref pubkeys) = filter.pubkeys {
|
||||
nfilter.pubkeys(pubkeys.iter().map(|a| a.bytes()).collect());
|
||||
}
|
||||
|
||||
if let Some(since) = filter.since {
|
||||
nfilter.since(since);
|
||||
}
|
||||
|
||||
if let Some(limit) = filter.limit {
|
||||
nfilter.limit(limit.into());
|
||||
}
|
||||
|
||||
nfilter
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ mod abbrev;
|
||||
mod fonts;
|
||||
mod images;
|
||||
mod result;
|
||||
mod filter;
|
||||
mod ui;
|
||||
mod frame_history;
|
||||
|
||||
|
||||
23
src/timeline.rs
Normal file
23
src/timeline.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
pub fn binary_search<T: Ord>(a: &[T], item: &T) -> usize {
|
||||
let mut low = 0;
|
||||
let mut high = a.len();
|
||||
|
||||
while low < high {
|
||||
let mid = low + (high - low) / 2;
|
||||
if item <= &a[mid] {
|
||||
high = mid;
|
||||
} else {
|
||||
low = mid + 1;
|
||||
}
|
||||
}
|
||||
|
||||
low
|
||||
}
|
||||
|
||||
pub fn binary_insertion_sort<T: Ord>(vec: &mut Vec<T>) {
|
||||
for i in 1..vec.len() {
|
||||
let val = vec.remove(i);
|
||||
let pos = binary_search(&vec[0..i], &val);
|
||||
vec.insert(pos, val);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user