Improve Follow pack timeline loading logic in the Universe view

Signed-off-by: Daniel D’Aquino <daniel@daquino.me>
This commit is contained in:
Daniel D’Aquino
2025-09-15 16:12:13 -07:00
parent 2185984ed7
commit 0582892cae
8 changed files with 130 additions and 40 deletions

View File

@@ -43,6 +43,10 @@ class RelayPool {
private let network_monitor = NWPathMonitor()
private let network_monitor_queue = DispatchQueue(label: "io.damus.network_monitor")
private var last_network_status: NWPath.Status = .unsatisfied
/// The limit of maximum concurrent subscriptions. Any subscriptions beyond this limit will be paused until subscriptions clear
/// This is to avoid error states and undefined behaviour related to hitting subscription limits on the relays, by letting those wait instead with the principle that slower is better than broken.
static let MAX_CONCURRENT_SUBSCRIPTION_LIMIT = 10 // This number is only an educated guess at this point.
func close() {
disconnect()
@@ -102,10 +106,17 @@ class RelayPool {
}
@MainActor
func register_handler(sub_id: String, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) {
func register_handler(sub_id: String, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) async {
while handlers.count > Self.MAX_CONCURRENT_SUBSCRIPTION_LIMIT {
Log.debug("%s: Too many subscriptions, waiting for subscription pool to clear", for: .networking, sub_id)
try? await Task.sleep(for: .seconds(1))
}
Log.debug("%s: Subscription pool cleared", for: .networking, sub_id)
for handler in handlers {
// don't add duplicate handlers
if handler.sub_id == sub_id {
assertionFailure("Duplicate handlers are not allowed. Proper error handling for this has not been built yet.")
Log.error("Duplicate handlers are not allowed. Error handling for this has not been built yet.", for: .networking)
return
}
}

View File

@@ -142,7 +142,7 @@ struct SaveKeysView: View {
add_rw_relay(self.pool, relay)
}
self.pool.register_handler(sub_id: "signup", handler: handle_event)
Task { await self.pool.register_handler(sub_id: "signup", handler: handle_event) }
self.loading = true

View File

@@ -0,0 +1,42 @@
//
// CondensedProfilePicturesViewModel.swift
// damus
//
// Created by Daniel DAquino on 2025-09-15.
//
import Combine
import Foundation
class CondensedProfilePicturesViewModel: ObservableObject {
let state: DamusState
let pubkeys: [Pubkey]
let maxPictures: Int
var shownPubkeys: [Pubkey] {
return Array(pubkeys.prefix(maxPictures))
}
var loadingTask: Task<Void, Never>? = nil
init(state: DamusState, pubkeys: [Pubkey], maxPictures: Int) {
self.state = state
self.pubkeys = pubkeys
self.maxPictures = min(maxPictures, pubkeys.count)
}
func load() {
loadingTask?.cancel()
loadingTask = Task { try? await loadingTask() }
}
func loadingTask() async throws {
let filter = NostrFilter(kinds: [.metadata], authors: shownPubkeys)
let _ = await state.nostrNetwork.reader.query(filters: [filter])
for await _ in state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) {
// NO-OP, we just need it to be loaded into NostrDB.
try Task.checkCancellation()
}
DispatchQueue.main.async {
// Cause the view to re-render with the newly loaded profiles
self.objectWillChange.send()
}
}
}

View File

@@ -8,26 +8,26 @@
import SwiftUI
struct CondensedProfilePicturesView: View {
let state: DamusState
let pubkeys: [Pubkey]
let maxPictures: Int
let model: CondensedProfilePicturesViewModel
init(state: DamusState, pubkeys: [Pubkey], maxPictures: Int) {
self.state = state
self.pubkeys = pubkeys
self.maxPictures = min(maxPictures, pubkeys.count)
self.model = CondensedProfilePicturesViewModel(state: state, pubkeys: pubkeys, maxPictures: maxPictures)
}
var body: some View {
// Using ZStack to make profile pictures floating and stacked on top of each other.
ZStack {
ForEach((0..<maxPictures).reversed(), id: \.self) { index in
ProfilePicView(pubkey: pubkeys[index], size: 32.0, highlight: .none, profiles: state.profiles, disable_animation: state.settings.disable_animation)
ForEach((0..<model.maxPictures).reversed(), id: \.self) { index in
ProfilePicView(pubkey: model.pubkeys[index], size: 32.0, highlight: .none, profiles: model.state.profiles, disable_animation: model.state.settings.disable_animation)
.offset(x: CGFloat(index) * 20)
}
}
// Padding is needed so that other components drawn adjacent to this view don't get drawn on top.
.padding(.trailing, CGFloat((maxPictures - 1) * 20))
.padding(.trailing, CGFloat((model.maxPictures - 1) * 20))
.onAppear {
self.model.load()
}
}
}

View File

@@ -10,9 +10,11 @@ import Foundation
/// The data model for the SearchHome view, typically something global-like
class SearchHomeModel: ObservableObject {
var events: EventHolder
var followPackEvents: EventHolder
@Published var loading: Bool = false
var seen_pubkey: Set<Pubkey> = Set()
var follow_pack_seen_pubkey: Set<Pubkey> = Set()
let damus_state: DamusState
let base_subid = UUID().description
let follow_pack_subid = UUID().description
@@ -25,6 +27,9 @@ class SearchHomeModel: ObservableObject {
self.events = EventHolder(on_queue: { ev in
preload_events(state: damus_state, events: [ev])
})
self.followPackEvents = EventHolder(on_queue: { ev in
preload_events(state: damus_state, events: [ev])
})
}
func get_base_filter() -> NostrFilter {
@@ -40,6 +45,12 @@ class SearchHomeModel: ObservableObject {
self.objectWillChange.send()
}
@MainActor
func reload() async {
self.events.reset()
await self.load()
}
func load() async {
DispatchQueue.main.async {
self.loading = true
@@ -51,16 +62,23 @@ class SearchHomeModel: ObservableObject {
var follow_list_filter = NostrFilter(kinds: [.follow_list])
follow_list_filter.until = UInt32(Date.now.timeIntervalSince1970)
for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [get_base_filter(), follow_list_filter], to: to_relays) {
for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [follow_list_filter], to: to_relays) {
await noteLender.justUseACopy({ await self.handleFollowPackEvent($0) })
}
for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [get_base_filter()], to: to_relays) {
await noteLender.justUseACopy({ await self.handleEvent($0) })
}
guard let txn = NdbTxn(ndb: damus_state.ndb) else { return }
let allEvents = events.all_events + followPackEvents.all_events
let task = load_profiles(context: "universe", load: .from_events(allEvents), damus_state: damus_state, txn: txn)
try? await task?.value
DispatchQueue.main.async {
self.loading = false
}
guard let txn = NdbTxn(ndb: damus_state.ndb) else { return }
load_profiles(context: "universe", load: .from_events(events.all_events), damus_state: damus_state, txn: txn)
}
@MainActor
@@ -76,6 +94,20 @@ class SearchHomeModel: ObservableObject {
}
}
}
@MainActor
func handleFollowPackEvent(_ ev: NostrEvent) {
if ev.known_kind == .follow_list && should_show_event(state: damus_state, ev: ev) && !ev.is_reply() {
if !damus_state.settings.multiple_events_per_pubkey && follow_pack_seen_pubkey.contains(ev.pubkey) {
return
}
follow_pack_seen_pubkey.insert(ev.pubkey)
if self.followPackEvents.insert(ev) {
self.objectWillChange.send()
}
}
}
}
func find_profiles_to_fetch<Y>(profiles: Profiles, load: PubkeysToLoad, cache: EventCache, txn: NdbTxn<Y>) -> [Pubkey] {
@@ -113,28 +145,23 @@ enum PubkeysToLoad {
case from_keys([Pubkey])
}
func load_profiles<Y>(context: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn<Y>) {
func load_profiles<Y>(context: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn<Y>) -> Task<Void, any Error>? {
let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events, txn: txn)
guard !authors.isEmpty else {
return
return nil
}
Task {
return Task {
print("load_profiles[\(context)]: requesting \(authors.count) profiles from relay pool")
let filter = NostrFilter(kinds: [.metadata], authors: authors)
for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) {
for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) {
let now = UInt64(Date.now.timeIntervalSince1970)
switch item {
case .event(let lender):
lender.justUseACopy({ event in
if event.known_kind == .metadata {
damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now)
}
})
case .eose:
break
try noteLender.borrow { event in
if event.known_kind == .metadata {
damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now)
}
}
}

View File

@@ -54,7 +54,7 @@ struct SearchHomeView: View {
loading: $model.loading,
damus: damus_state,
show_friend_icon: true,
filter:content_filter(FilterState.posts),
filter: content_filter(FilterState.posts),
content: {
AnyView(VStack(alignment: .leading) {
HStack {
@@ -66,7 +66,7 @@ struct SearchHomeView: View {
.padding(.top)
.padding(.horizontal)
FollowPackTimelineView<AnyView>(events: model.events, loading: $model.loading, damus: damus_state, show_friend_icon: true,filter:content_filter(FilterState.follow_list)
FollowPackTimelineView<AnyView>(events: model.followPackEvents, loading: $model.loading, damus: damus_state, show_friend_icon: true, filter: content_filter(FilterState.follow_list)
).padding(.bottom)
Divider()
@@ -83,20 +83,10 @@ struct SearchHomeView: View {
}.padding(.bottom, 50))
}
)
.refreshable {
// Fetch new information by unsubscribing and resubscribing to the relay
loadingTask?.cancel()
loadingTask = Task { await model.load() }
}
}
var SearchContent: some View {
SearchResultsView(damus_state: damus_state, search: $search)
.refreshable {
// Fetch new information by unsubscribing and resubscribing to the relay
loadingTask?.cancel()
loadingTask = Task { await model.load() }
}
}
var MainContent: some View {
@@ -136,6 +126,12 @@ struct SearchHomeView: View {
.onDisappear {
loadingTask?.cancel()
}
.refreshable {
// Fetch new information by unsubscribing and resubscribing to the relay
loadingTask?.cancel()
loadingTask = Task { await model.reload() }
try? await loadingTask?.value
}
}
}

View File

@@ -95,4 +95,10 @@ class EventHolder: ObservableObject, ScrollQueue {
self.incoming = []
}
@MainActor
func reset() {
self.incoming = []
self.events = []
}
}