From 578d47356d8dabd9e1696bc697fea474a4b8d482 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 15 Aug 2025 17:01:26 -0700 Subject: [PATCH 01/91] Make RelayPool private to NostrNetworkManager and migrate usages MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 175 +-------- .../NostrNetworkManager.swift | 178 ++++++++- .../SubscriptionManager.swift | 4 +- damus/Core/Nostr/RelayPool.swift | 14 + damus/Core/Nostr/RelayURL.swift | 2 +- damus/Core/Storage/DamusState.swift | 2 +- .../ActionBar/Models/ActionBarModel.swift | 2 +- .../ActionBar/Views/EventDetailBar.swift | 2 +- damus/Features/Chat/Models/ThreadModel.swift | 80 ++-- damus/Features/Events/EventLoaderView.swift | 39 +- damus/Features/Events/EventMenu.swift | 2 +- .../Features/Events/Models/EventsModel.swift | 50 +-- .../Models/LoadableNostrEventView.swift | 4 +- .../FollowPack/Models/FollowPackModel.swift | 66 ++-- .../Follows/Models/FollowersModel.swift | 73 ++-- .../Follows/Models/FollowingModel.swift | 22 +- .../NIP05/Models/NIP05DomainEventsModel.swift | 59 ++- .../Views/OnboardingSuggestionsView.swift | 2 +- .../Features/Posting/Models/DraftsModel.swift | 4 +- damus/Features/Posting/Views/PostView.swift | 4 +- .../Profile/Models/ProfileModel.swift | 216 +++++------ .../Views/ProfileActionSheetView.swift | 2 +- .../Features/Profile/Views/ProfileView.swift | 1 + .../Features/Relays/Models/RelayFilters.swift | 6 - .../Relays/Views/RelayConfigView.swift | 4 +- .../Relays/Views/RelayDetailView.swift | 17 +- .../Relays/Views/RelayFilterView.swift | 2 +- .../Relays/Views/RelayStatusView.swift | 2 +- damus/Features/Relays/Views/RelayToggle.swift | 2 +- damus/Features/Relays/Views/RelayView.swift | 10 +- .../Relays/Views/UserRelaysView.swift | 2 +- .../Search/Models/SearchHomeModel.swift | 119 +++--- .../Features/Search/Models/SearchModel.swift | 82 ++-- .../Search/Views/SearchHomeView.swift | 13 +- .../Search/Views/SearchingEventView.swift | 9 +- .../Settings/Views/FirstAidSettingsView.swift | 2 +- .../Features/Timeline/Models/HomeModel.swift | 360 ++++++++++-------- .../Models/WalletConnect/WalletConnect+.swift | 20 +- .../Wallet/Views/SendPaymentView.swift | 2 +- damus/Features/Zaps/Models/ZapsModel.swift | 62 ++- .../Zaps/Views/CustomizeZapView.swift | 4 +- damus/Features/Zaps/Views/NoteZapButton.swift | 10 +- damus/Shared/Utilities/Router.swift | 3 +- .../ActionViewController.swift | 2 +- share extension/ShareViewController.swift | 2 +- 45 files changed, 849 insertions(+), 889 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index 239694586..f9411c9ff 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -381,6 +381,8 @@ struct ContentView: View { self.confirm_mute = true } .onReceive(handle_notify(.attached_wallet)) { nwc in + try? damus_state.nostrNetwork.userRelayList.load() // Reload relay list to apply changes + // update the lightning address on our profile when we attach a // wallet with an associated guard let ds = self.damus_state, @@ -472,7 +474,7 @@ struct ContentView: View { } } .onReceive(handle_notify(.disconnect_relays)) { () in - damus_state.nostrNetwork.pool.disconnect() + damus_state.nostrNetwork.disconnect() } .onReceive(NotificationCenter.default.publisher(for: UIApplication.willEnterForegroundNotification)) { obj in print("txn: 📙 DAMUS ACTIVE NOTIFY") @@ -518,7 +520,7 @@ struct ContentView: View { break case .active: print("txn: 📙 DAMUS ACTIVE") - damus_state.nostrNetwork.pool.ping() + damus_state.nostrNetwork.ping() @unknown default: break } @@ -717,8 +719,7 @@ struct ContentView: View { // Purple API is an experimental feature. If not enabled, do not connect `StoreObserver` with Purple API to avoid leaking receipts } - damus_state.nostrNetwork.pool.register_handler(sub_id: sub_id, handler: home.handle_event) - damus_state.nostrNetwork.connect() + if #available(iOS 17, *) { if damus_state.settings.developer_mode && damus_state.settings.reset_tips_on_launch { @@ -734,6 +735,11 @@ struct ContentView: View { Log.error("Failed to configure tips: %s", for: .tips, error.localizedDescription) } } + damus_state.nostrNetwork.connect() + // TODO: Move this to a better spot. Not sure what is the best signal to listen to for sending initial filters + DispatchQueue.main.asyncAfter(deadline: .now() + 3, execute: { + self.home.send_initial_filters() + }) } func music_changed(_ state: MusicState) { @@ -943,169 +949,11 @@ enum FindEventType { } enum FoundEvent { + // TODO: Why not return the profile record itself? Right now the code probably just wants to trigger ndb to ingest the profile record and be available at ndb in parallel, but it would be cleaner if the function that uses this simply does that ndb query on their behalf. case profile(Pubkey) case event(NostrEvent) } -/// Finds an event from NostrDB if it exists, or from the network -/// -/// This is the callback version. There is also an asyc/await version of this function. -/// -/// - Parameters: -/// - state: Damus state -/// - query_: The query, including the event being looked for, and the relays to use when looking -/// - callback: The function to call with results -func find_event(state: DamusState, query query_: FindEvent, callback: @escaping (FoundEvent?) -> ()) { - return find_event_with_subid(state: state, query: query_, subid: UUID().description, callback: callback) -} - -/// Finds an event from NostrDB if it exists, or from the network -/// -/// This is a the async/await version of `find_event`. Use this when using callbacks is impossible or cumbersome. -/// -/// - Parameters: -/// - state: Damus state -/// - query_: The query, including the event being looked for, and the relays to use when looking -/// - callback: The function to call with results -func find_event(state: DamusState, query query_: FindEvent) async -> FoundEvent? { - await withCheckedContinuation { continuation in - find_event(state: state, query: query_) { event in - var already_resumed = false - if !already_resumed { // Ensure we do not resume twice, as it causes a crash - continuation.resume(returning: event) - already_resumed = true - } - } - } -} - -func find_event_with_subid(state: DamusState, query query_: FindEvent, subid: String, callback: @escaping (FoundEvent?) -> ()) { - - var filter: NostrFilter? = nil - let find_from = query_.find_from - let query = query_.type - - switch query { - case .profile(let pubkey): - if let profile_txn = state.ndb.lookup_profile(pubkey), - let record = profile_txn.unsafeUnownedValue, - record.profile != nil - { - callback(.profile(pubkey)) - return - } - filter = NostrFilter(kinds: [.metadata], limit: 1, authors: [pubkey]) - - case .event(let evid): - if let ev = state.events.lookup(evid) { - callback(.event(ev)) - return - } - - filter = NostrFilter(ids: [evid], limit: 1) - } - - var attempts: Int = 0 - var has_event = false - guard let filter else { return } - - state.nostrNetwork.pool.subscribe_to(sub_id: subid, filters: [filter], to: find_from) { relay_id, res in - guard case .nostr_event(let ev) = res else { - return - } - - guard ev.subid == subid else { - return - } - - switch ev { - case .ok: - break - case .event(_, let ev): - has_event = true - state.nostrNetwork.pool.unsubscribe(sub_id: subid) - - switch query { - case .profile: - if ev.known_kind == .metadata { - callback(.profile(ev.pubkey)) - } - case .event: - callback(.event(ev)) - } - case .eose: - if !has_event { - attempts += 1 - if attempts >= state.nostrNetwork.pool.our_descriptors.count { - callback(nil) // If we could not find any events in any of the relays we are connected to, send back nil - } - } - state.nostrNetwork.pool.unsubscribe(sub_id: subid, to: [relay_id]) // We are only finding an event once, so close subscription on eose - case .notice: - break - case .auth: - break - } - } -} - - -/// Finds a replaceable event based on an `naddr` address. -/// -/// This is the callback version of the function. There is another function that makes use of async/await -/// -/// - Parameters: -/// - damus_state: The Damus state -/// - naddr: the `naddr` address -/// - callback: A function to handle the found event -func naddrLookup(damus_state: DamusState, naddr: NAddr, callback: @escaping (NostrEvent?) -> ()) { - let nostrKinds: [NostrKind]? = NostrKind(rawValue: naddr.kind).map { [$0] } - - let filter = NostrFilter(kinds: nostrKinds, authors: [naddr.author]) - - let subid = UUID().description - - damus_state.nostrNetwork.pool.subscribe_to(sub_id: subid, filters: [filter], to: nil) { relay_id, res in - guard case .nostr_event(let ev) = res else { - damus_state.nostrNetwork.pool.unsubscribe(sub_id: subid, to: [relay_id]) - return - } - - if case .event(_, let ev) = ev { - for tag in ev.tags { - if(tag.count >= 2 && tag[0].string() == "d"){ - if (tag[1].string() == naddr.identifier){ - damus_state.nostrNetwork.pool.unsubscribe(sub_id: subid, to: [relay_id]) - callback(ev) - return - } - } - } - } - damus_state.nostrNetwork.pool.unsubscribe(sub_id: subid, to: [relay_id]) - } -} - -/// Finds a replaceable event based on an `naddr` address. -/// -/// This is the async/await version of the function. Another version of this function which makes use of callback functions also exists . -/// -/// - Parameters: -/// - damus_state: The Damus state -/// - naddr: the `naddr` address -/// - callback: A function to handle the found event -func naddrLookup(damus_state: DamusState, naddr: NAddr) async -> NostrEvent? { - await withCheckedContinuation { continuation in - var already_resumed = false - naddrLookup(damus_state: damus_state, naddr: naddr) { event in - if !already_resumed { // Ensure we do not resume twice, as it causes a crash - continuation.resume(returning: event) - already_resumed = true - } - } - } -} - func timeline_name(_ timeline: Timeline?) -> String { guard let timeline else { return "" @@ -1260,4 +1108,3 @@ func logout(_ state: DamusState?) state?.close() notify(.logout) } - diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index d879ba49c..0e435f6d7 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -24,7 +24,7 @@ class NostrNetworkManager { /// ## Implementation notes /// /// - This will be marked `private` in the future to prevent other code from accessing the relay pool directly. Code outside this layer should use a higher level interface - let pool: RelayPool // TODO: Make this private and make higher level interface for classes outside the NostrNetworkManager + private let pool: RelayPool // TODO: Make this private and make higher level interface for classes outside the NostrNetworkManager /// A delegate that allows us to interact with the rest of app without introducing hard or circular dependencies private var delegate: Delegate /// Manages the user's relay list, controls RelayPool's connected relays @@ -51,6 +51,14 @@ class NostrNetworkManager { func connect() { self.userRelayList.connect() } + + func disconnect() { + self.pool.disconnect() + } + + func ping() { + self.pool.ping() + } func relaysForEvent(event: NostrEvent) -> [RelayURL] { // TODO(tyiu) Ideally this list would be sorted by the event author's outbox relay preferences @@ -61,6 +69,174 @@ class NostrNetworkManager { return [] } + + // TODO: ORGANIZE THESE + + // MARK: - Communication with the Nostr Network + /// ## Implementation notes + /// + /// - This class hides the relay pool on purpose to avoid other code from dealing with complex relay + nostrDB logic. + /// - Instead, we provide an easy to use interface so that normal code can just get the info they want. + /// - This is also to help us migrate to the relay model. + // TODO: Define a better interface. This is a temporary scaffold to replace direct relay pool access. After that is done, we can refactor this interface to be cleaner and reduce non-sense. + + func sendToNostrDB(event: NostrEvent) { + self.pool.send_raw_to_local_ndb(.typical(.event(event))) + } + + func send(event: NostrEvent) { + self.pool.send(.event(event)) + } + + func query(filters: [NostrFilter], to: [RelayURL]? = nil) async -> [NostrEvent] { + var events: [NostrEvent] = [] + for await item in self.reader.subscribe(filters: filters, to: to) { + switch item { + case .event(let borrow): + try? borrow { event in + events.append(event.toOwned()) + } + case .eose: + break + } + } + return events + } + + /// Finds a replaceable event based on an `naddr` address. + /// + /// - Parameters: + /// - naddr: the `naddr` address + func lookup(naddr: NAddr) async -> NostrEvent? { + var nostrKinds: [NostrKind]? = NostrKind(rawValue: naddr.kind).map { [$0] } + + let filter = NostrFilter(kinds: nostrKinds, authors: [naddr.author]) + + for await item in self.reader.subscribe(filters: [filter]) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + if event?.referenced_params.first?.param.string() == naddr.identifier { + return event + } + case .eose: + break + } + } + return nil + } + + // TODO: Improve this. This is mostly intact to keep compatibility with its predecessor, but we can do better + func findEvent(query: FindEvent) async -> FoundEvent? { + var filter: NostrFilter? = nil + let find_from = query.find_from + let query = query.type + + switch query { + case .profile(let pubkey): + if let profile_txn = delegate.ndb.lookup_profile(pubkey), + let record = profile_txn.unsafeUnownedValue, + record.profile != nil + { + return .profile(pubkey) + } + filter = NostrFilter(kinds: [.metadata], limit: 1, authors: [pubkey]) + case .event(let evid): + if let event = delegate.ndb.lookup_note(evid)?.unsafeUnownedValue?.to_owned() { + return .event(event) + } + filter = NostrFilter(ids: [evid], limit: 1) + } + + var attempts: Int = 0 + var has_event = false + guard let filter else { return nil } + + for await item in self.reader.subscribe(filters: [filter], to: find_from) { + switch item { + case .event(let borrow): + var result: FoundEvent? = nil + try? borrow { event in + switch query { + case .profile: + if event.known_kind == .metadata { + result = .profile(event.pubkey) + } + case .event: + result = .event(event.toOwned()) + } + } + return result + case .eose: + return nil + } + } + return nil + } + + func getRelay(_ id: RelayURL) -> RelayPool.Relay? { + pool.get_relay(id) + } + + var connectedRelays: [RelayPool.Relay] { + self.pool.relays + } + + var ourRelayDescriptors: [RelayPool.RelayDescriptor] { + self.pool.our_descriptors + } + + func relayURLsThatSawNote(id: NoteId) -> Set? { + return self.pool.seen[id] + } + + func determineToRelays(filters: RelayFilters) -> [RelayURL] { + return self.pool.our_descriptors + .map { $0.url } + .filter { !filters.is_filtered(timeline: .search, relay_id: $0) } + } + + // MARK: NWC + // TODO: Move this to NWCManager + + @discardableResult + func nwcPay(url: WalletConnectURL, post: PostBox, invoice: String, delay: TimeInterval? = 5.0, on_flush: OnFlush? = nil, zap_request: NostrEvent? = nil) -> NostrEvent? { + WalletConnect.pay(url: url, pool: self.pool, post: post, invoice: invoice, zap_request: nil) + } + + func requestTransactionList(url: WalletConnectURL, delay: TimeInterval? = 0.0, on_flush: OnFlush? = nil) { + WalletConnect.request_transaction_list(url: url, pool: self.pool, post: self.postbox, delay: delay, on_flush: on_flush) + } + + func requestBalanceInformation(url: WalletConnectURL, delay: TimeInterval? = 0.0, on_flush: OnFlush? = nil) { + WalletConnect.request_balance_information(url: url, pool: self.pool, post: self.postbox, delay: delay, on_flush: on_flush) + } + + /// Send a donation zap to the Damus team + func send_donation_zap(nwc: WalletConnectURL, percent: Int, base_msats: Int64) async { + let percent_f = Double(percent) / 100.0 + let donations_msats = Int64(percent_f * Double(base_msats)) + + let payreq = LNUrlPayRequest(allowsNostr: true, commentAllowed: nil, nostrPubkey: "", callback: "https://sendsats.lol/@damus") + guard let invoice = await fetch_zap_invoice(payreq, zapreq: nil, msats: donations_msats, zap_type: .non_zap, comment: nil) else { + // we failed... oh well. no donation for us. + print("damus-donation failed to fetch invoice") + return + } + + print("damus-donation donating...") + WalletConnect.pay(url: nwc, pool: self.pool, post: self.postbox, invoice: invoice, zap_request: nil, delay: nil) + } + + + // MARK: - App lifecycle functions + + func close() { + pool.close() + } } diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 536c897f2..9de44e8ba 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -30,10 +30,10 @@ extension NostrNetworkManager { /// /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to /// - Returns: An async stream of nostr data - func subscribe(filters: [NostrFilter]) -> AsyncStream { + func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { return AsyncStream { continuation in let streamTask = Task { - for await item in self.pool.subscribe(filters: filters) { + for await item in self.pool.subscribe(filters: filters, to: desiredRelays) { switch item { case .eose: continuation.yield(.eose) case .event(let nostrEvent): diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 5f74727aa..be9e1be97 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -19,6 +19,11 @@ struct QueuedRequest { let skip_ephemeral: Bool } +struct SeenEvent: Hashable { + let relay_id: RelayURL + let evid: NoteId +} + /// Establishes and manages connections and subscriptions to a list of relays. class RelayPool { private(set) var relays: [Relay] = [] @@ -31,6 +36,8 @@ class RelayPool { var keypair: Keypair? var message_received_function: (((String, RelayDescriptor)) -> Void)? var message_sent_function: (((String, Relay)) -> Void)? + var delegate: Delegate? + private(set) var signal: SignalModel = SignalModel() private let network_monitor = NWPathMonitor() private let network_monitor_queue = DispatchQueue(label: "io.damus.network_monitor") @@ -410,3 +417,10 @@ func add_rw_relay(_ pool: RelayPool, _ url: RelayURL) { } +extension RelayPool { + protocol Delegate { + func latestRelayListChanged(_ newEvent: NdbNote) + } +} + + diff --git a/damus/Core/Nostr/RelayURL.swift b/damus/Core/Nostr/RelayURL.swift index 53f6fbe10..2ced393ee 100644 --- a/damus/Core/Nostr/RelayURL.swift +++ b/damus/Core/Nostr/RelayURL.swift @@ -7,7 +7,7 @@ import Foundation -public struct RelayURL: Hashable, Equatable, Codable, CodingKeyRepresentable, Identifiable, Comparable, CustomStringConvertible { +public struct RelayURL: Hashable, Equatable, Codable, CodingKeyRepresentable, Identifiable, Comparable, CustomStringConvertible, Sendable { private(set) var url: URL public var id: URL { diff --git a/damus/Core/Storage/DamusState.swift b/damus/Core/Storage/DamusState.swift index dbabdf3f1..1155799e4 100644 --- a/damus/Core/Storage/DamusState.swift +++ b/damus/Core/Storage/DamusState.swift @@ -164,7 +164,7 @@ class DamusState: HeadlessDamusState { try await self.push_notification_client.revoke_token() } wallet.disconnect() - nostrNetwork.pool.close() + nostrNetwork.close() ndb.close() } diff --git a/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift b/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift index 971639ed1..7c9b06d9a 100644 --- a/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift +++ b/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift @@ -58,7 +58,7 @@ class ActionBarModel: ObservableObject { self.our_zap = damus.zaps.our_zaps[evid]?.first self.our_reply = damus.replies.our_reply(evid) self.our_quote_repost = damus.quote_reposts.our_events[evid] - self.relays = (damus.nostrNetwork.pool.seen[evid] ?? []).count + self.relays = (damus.nostrNetwork.relayURLsThatSawNote(id: evid) ?? []).count self.objectWillChange.send() } diff --git a/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift b/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift index 038979943..604c4e306 100644 --- a/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift +++ b/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift @@ -61,7 +61,7 @@ struct EventDetailBar: View { } if bar.relays > 0 { - let relays = Array(state.nostrNetwork.pool.seen[target] ?? []) + let relays = Array(state.nostrNetwork.relayURLsThatSawNote(id: target) ?? []) NavigationLink(value: Route.UserRelays(relays: relays)) { let nounString = pluralizedString(key: "relays_count", count: bar.relays) let noun = Text(nounString).foregroundColor(.gray) diff --git a/damus/Features/Chat/Models/ThreadModel.swift b/damus/Features/Chat/Models/ThreadModel.swift index 0e21e586f..332b11de9 100644 --- a/damus/Features/Chat/Models/ThreadModel.swift +++ b/damus/Features/Chat/Models/ThreadModel.swift @@ -56,12 +56,7 @@ class ThreadModel: ObservableObject { /// The damus state, needed to access the relay pool and load the thread events let damus_state: DamusState - private let profiles_subid = UUID().description - private let base_subid = UUID().description - private let meta_subid = UUID().description - private var subids: [String] { - return [profiles_subid, base_subid, meta_subid] - } + private var listener: Task? // MARK: Initialization @@ -86,17 +81,6 @@ class ThreadModel: ObservableObject { // MARK: Relay pool subscription management - /// Unsubscribe from events in the relay pool. Call this when unloading the view - func unsubscribe() { - self.damus_state.nostrNetwork.pool.remove_handler(sub_id: base_subid) - self.damus_state.nostrNetwork.pool.remove_handler(sub_id: meta_subid) - self.damus_state.nostrNetwork.pool.remove_handler(sub_id: profiles_subid) - self.damus_state.nostrNetwork.pool.unsubscribe(sub_id: base_subid) - self.damus_state.nostrNetwork.pool.unsubscribe(sub_id: meta_subid) - self.damus_state.nostrNetwork.pool.unsubscribe(sub_id: profiles_subid) - Log.info("unsubscribing to thread %s with sub_id %s", for: .render, original_event.id.hex(), base_subid) - } - /// Subscribe to events in this thread. Call this when loading the view. func subscribe() { var meta_events = NostrFilter() @@ -127,10 +111,27 @@ class ThreadModel: ObservableObject { let base_filters = [event_filter, ref_events] let meta_filters = [meta_events, quote_events] - - Log.info("subscribing to thread %s with sub_id %s", for: .render, original_event.id.hex(), base_subid) - damus_state.nostrNetwork.pool.subscribe(sub_id: base_subid, filters: base_filters, handler: handle_event) - damus_state.nostrNetwork.pool.subscribe(sub_id: meta_subid, filters: meta_filters, handler: handle_event) + + self.listener?.cancel() + self.listener = Task { + Log.info("subscribing to thread %s ", for: .render, original_event.id.hex()) + for await item in damus_state.nostrNetwork.reader.subscribe(filters: base_filters + meta_filters) { + switch item { + case .event(let borrow): + try? borrow { event in + handle_event(ev: event.toOwned()) + } + case .eose: + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + load_profiles(context: "thread", load: .from_events(Array(event_map.events)), damus_state: damus_state, txn: txn) + } + } + } + } + + func unsubscribe() { + self.listener?.cancel() + self.listener = nil } /// Adds an event to this thread. @@ -175,35 +176,20 @@ class ThreadModel: ObservableObject { /// /// Marked as private because it is this class' responsibility to load events, not the view's. Simplify the interface @MainActor - private func handle_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - let (sub_id, done) = handle_subid_event(pool: damus_state.nostrNetwork.pool, relay_id: relay_id, ev: ev) { sid, ev in - guard subids.contains(sid) else { - return + private func handle_event(ev: NostrEvent) { + if ev.known_kind == .zap { + process_zap_event(state: damus_state, ev: ev) { zap in + } - - if ev.known_kind == .zap { - process_zap_event(state: damus_state, ev: ev) { zap in - - } - } else if ev.is_textlike { - // handle thread quote reposts, we just count them instead of - // adding them to the thread - if let target = ev.is_quote_repost, target == self.selected_event.id { - //let _ = self.damus_state.quote_reposts.add_event(ev, target: target) - } else { - self.add_event(ev, keypair: damus_state.keypair) - } + } else if ev.is_textlike { + // handle thread quote reposts, we just count them instead of + // adding them to the thread + if let target = ev.is_quote_repost, target == self.selected_event.id { + //let _ = self.damus_state.quote_reposts.add_event(ev, target: target) + } else { + self.add_event(ev, keypair: damus_state.keypair) } } - - guard done, let sub_id, subids.contains(sub_id) else { - return - } - - if sub_id == self.base_subid { - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } - load_profiles(context: "thread", profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(Array(event_map.events)), damus_state: damus_state, txn: txn) - } } // MARK: External control interface diff --git a/damus/Features/Events/EventLoaderView.swift b/damus/Features/Events/EventLoaderView.swift index 6028895b7..511314ca2 100644 --- a/damus/Features/Events/EventLoaderView.swift +++ b/damus/Features/Events/EventLoaderView.swift @@ -13,6 +13,7 @@ struct EventLoaderView: View { let event_id: NoteId @State var event: NostrEvent? @State var subscription_uuid: String = UUID().description + @State var loadingTask: Task? = nil let content: (NostrEvent) -> Content init(damus_state: DamusState, event_id: NoteId, @ViewBuilder content: @escaping (NostrEvent) -> Content) { @@ -24,34 +25,24 @@ struct EventLoaderView: View { } func unsubscribe() { - damus_state.nostrNetwork.pool.unsubscribe(sub_id: subscription_uuid) + self.loadingTask?.cancel() } func subscribe(filters: [NostrFilter]) { - damus_state.nostrNetwork.pool.register_handler(sub_id: subscription_uuid, handler: handle_event) - damus_state.nostrNetwork.pool.send(.subscribe(.init(filters: filters, sub_id: subscription_uuid))) - } - - func handle_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - guard case .nostr_event(let nostr_response) = ev else { - return - } - - guard case .event(let id, let nostr_event) = nostr_response else { - return - } - - guard id == subscription_uuid else { - return - } - - if event != nil { - return + self.loadingTask?.cancel() + self.loadingTask = Task { + for await item in await damus_state.nostrNetwork.reader.subscribe(filters: filters) { + switch item { + case .event(let borrow): + try? borrow { ev in + event = ev.toOwned() + } + break + case .eose: + break + } + } } - - event = nostr_event - - unsubscribe() } func load() { diff --git a/damus/Features/Events/EventMenu.swift b/damus/Features/Events/EventMenu.swift index c43eb6a1f..28691fdd2 100644 --- a/damus/Features/Events/EventMenu.swift +++ b/damus/Features/Events/EventMenu.swift @@ -152,7 +152,7 @@ struct MenuItems: View { profileModel.subscribeToFindRelays() } .onDisappear() { - profileModel.unsubscribeFindRelays() + profileModel.findRelaysListener?.cancel() } } } diff --git a/damus/Features/Events/Models/EventsModel.swift b/damus/Features/Events/Models/EventsModel.swift index e2d5fef11..381a62700 100644 --- a/damus/Features/Events/Models/EventsModel.swift +++ b/damus/Features/Events/Models/EventsModel.swift @@ -11,10 +11,10 @@ class EventsModel: ObservableObject { let state: DamusState let target: NoteId let kind: QueryKind - let sub_id = UUID().uuidString let profiles_id = UUID().uuidString var events: EventHolder @Published var loading: Bool + var loadingTask: Task? enum QueryKind { case kind(NostrKind) @@ -68,13 +68,29 @@ class EventsModel: ObservableObject { } func subscribe() { - state.nostrNetwork.pool.subscribe(sub_id: sub_id, - filters: [get_filter()], - handler: handle_nostr_event) + loadingTask?.cancel() + loadingTask = Task { + for await item in state.nostrNetwork.reader.subscribe(filters: [get_filter()]) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + if events.insert(event) { objectWillChange.send() } + case .eose: + break + } + } + self.loading = false + guard let txn = NdbTxn(ndb: self.state.ndb) else { return } + load_profiles(context: "events_model", load: .from_events(events.all_events), damus_state: state, txn: txn) + } } func unsubscribe() { - state.nostrNetwork.pool.unsubscribe(sub_id: sub_id) + loadingTask?.cancel() } private func handle_event(relay_id: RelayURL, ev: NostrEvent) { @@ -82,28 +98,4 @@ class EventsModel: ObservableObject { objectWillChange.send() } } - - func handle_nostr_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - guard case .nostr_event(let nev) = ev, nev.subid == self.sub_id - else { - return - } - - switch nev { - case .event(_, let ev): - handle_event(relay_id: relay_id, ev: ev) - case .notice: - break - case .ok: - break - case .auth: - break - case .eose: - self.loading = false - guard let txn = NdbTxn(ndb: self.state.ndb) else { - return - } - load_profiles(context: "events_model", profiles_subid: profiles_id, relay_id: relay_id, load: .from_events(events.all_events), damus_state: state, txn: txn) - } - } } diff --git a/damus/Features/Events/Models/LoadableNostrEventView.swift b/damus/Features/Events/Models/LoadableNostrEventView.swift index 6d4b4ed3d..af9bf3487 100644 --- a/damus/Features/Events/Models/LoadableNostrEventView.swift +++ b/damus/Features/Events/Models/LoadableNostrEventView.swift @@ -50,7 +50,7 @@ class LoadableNostrEventViewModel: ObservableObject { /// Asynchronously find an event from NostrDB or from the network (if not available on NostrDB) private func loadEvent(noteId: NoteId) async -> NostrEvent? { - let res = await find_event(state: damus_state, query: .event(evid: noteId)) + let res = await damus_state.nostrNetwork.findEvent(query: .event(evid: noteId)) guard let res, case .event(let ev) = res else { return nil } return ev } @@ -78,7 +78,7 @@ class LoadableNostrEventViewModel: ObservableObject { return .unknown_or_unsupported_kind } case .naddr(let naddr): - guard let event = await naddrLookup(damus_state: damus_state, naddr: naddr) else { return .not_found } + guard let event = await damus_state.nostrNetwork.lookup(naddr: naddr) else { return .not_found } return .loaded(route: Route.Thread(thread: ThreadModel(event: event, damus_state: damus_state))) } } diff --git a/damus/Features/FollowPack/Models/FollowPackModel.swift b/damus/Features/FollowPack/Models/FollowPackModel.swift index 31e2bdc71..b542b50cd 100644 --- a/damus/Features/FollowPack/Models/FollowPackModel.swift +++ b/damus/Features/FollowPack/Models/FollowPackModel.swift @@ -13,7 +13,7 @@ class FollowPackModel: ObservableObject { @Published var loading: Bool = false let damus_state: DamusState - let subid = UUID().description + var listener: Task? = nil let limit: UInt32 = 500 init(damus_state: DamusState) { @@ -25,52 +25,40 @@ class FollowPackModel: ObservableObject { func subscribe(follow_pack_users: [Pubkey]) { loading = true - let to_relays = determine_to_relays(pool: damus_state.nostrNetwork.pool, filters: damus_state.relay_filters) - var filter = NostrFilter(kinds: [.text, .chat]) - filter.until = UInt32(Date.now.timeIntervalSince1970) - filter.authors = follow_pack_users - filter.limit = 500 - - damus_state.nostrNetwork.pool.subscribe(sub_id: subid, filters: [filter], handler: handle_event, to: to_relays) + self.listener = Task { + await self.listenForUpdates(follow_pack_users: follow_pack_users) + } } func unsubscribe(to: RelayURL? = nil) { loading = false - damus_state.nostrNetwork.pool.unsubscribe(sub_id: subid, to: to.map { [$0] }) + self.listener?.cancel() } - - func handle_event(relay_id: RelayURL, conn_ev: NostrConnectionEvent) { - guard case .nostr_event(let event) = conn_ev else { - return - } + + func listenForUpdates(follow_pack_users: [Pubkey]) async { + let to_relays = damus_state.nostrNetwork.determineToRelays(filters: damus_state.relay_filters) + var filter = NostrFilter(kinds: [.text, .chat]) + filter.until = UInt32(Date.now.timeIntervalSince1970) + filter.authors = follow_pack_users + filter.limit = 500 - switch event { - case .event(let sub_id, let ev): - guard sub_id == self.subid else { - return - } - if ev.is_textlike && should_show_event(state: damus_state, ev: ev) && !ev.is_reply() - { - if self.events.insert(ev) { - self.objectWillChange.send() + for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter], to: to_relays) { + switch item { + case .event(borrow: let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() } + guard let event else { return } + if event.is_textlike && should_show_event(state: damus_state, ev: event) && !event.is_reply() + { + if self.events.insert(event) { + self.objectWillChange.send() + } + } + case .eose: + continue } - case .notice(let msg): - print("follow pack notice: \(msg)") - case .ok: - break - case .eose(let sub_id): - loading = false - - if sub_id == self.subid { - unsubscribe(to: relay_id) - - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } - } - - break - case .auth: - break } } } diff --git a/damus/Features/Follows/Models/FollowersModel.swift b/damus/Features/Follows/Models/FollowersModel.swift index 24f052a1d..990cd0d12 100644 --- a/damus/Features/Follows/Models/FollowersModel.swift +++ b/damus/Features/Follows/Models/FollowersModel.swift @@ -14,8 +14,8 @@ class FollowersModel: ObservableObject { @Published var contacts: [Pubkey]? = nil var has_contact: Set = Set() - let sub_id: String = UUID().description - let profiles_id: String = UUID().description + var listener: Task? = nil + var profilesListener: Task? = nil var count: Int? { guard let contacts = self.contacts else { @@ -36,12 +36,27 @@ class FollowersModel: ObservableObject { func subscribe() { let filter = get_filter() let filters = [filter] - //print_filters(relay_id: "following", filters: [filters]) - self.damus_state.nostrNetwork.pool.subscribe(sub_id: sub_id, filters: filters, handler: handle_event) + self.listener?.cancel() + self.listener = Task { + for await item in await damus_state.nostrNetwork.reader.subscribe(filters: filters) { + switch item { + case .event(let borrow): + try? borrow { event in + self.handle_event(ev: event.toOwned()) + } + case .eose: + guard let txn = NdbTxn(ndb: self.damus_state.ndb) else { return } + load_profiles(txn: txn) + } + } + } } func unsubscribe() { - self.damus_state.nostrNetwork.pool.unsubscribe(sub_id: sub_id) + self.listener?.cancel() + self.profilesListener?.cancel() + self.listener = nil + self.profilesListener = nil } func handle_contact_event(_ ev: NostrEvent) { @@ -53,7 +68,7 @@ class FollowersModel: ObservableObject { has_contact.insert(ev.pubkey) } - func load_profiles(relay_id: RelayURL, txn: NdbTxn) { + func load_profiles(txn: NdbTxn) { let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? [], txn: txn) if authors.isEmpty { return @@ -61,38 +76,24 @@ class FollowersModel: ObservableObject { let filter = NostrFilter(kinds: [.metadata], authors: authors) - damus_state.nostrNetwork.pool.subscribe_to(sub_id: profiles_id, filters: [filter], to: [relay_id], handler: handle_event) - } - - func handle_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - guard case .nostr_event(let nev) = ev else { - return - } - switch nev { - case .event(let sub_id, let ev): - guard sub_id == self.sub_id || sub_id == self.profiles_id else { - return + self.profilesListener?.cancel() + self.profilesListener = Task { + for await item in await damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { + switch item { + case .event(let borrow): + try? borrow { event in + self.handle_event(ev: event.toOwned()) + } + case .eose: break + } } - - if ev.known_kind == .contacts { - handle_contact_event(ev) - } - case .notice(let msg): - print("followingmodel notice: \(msg)") - - case .eose(let sub_id): - if sub_id == self.sub_id { - guard let txn = NdbTxn(ndb: self.damus_state.ndb) else { return } - load_profiles(relay_id: relay_id, txn: txn) - } else if sub_id == self.profiles_id { - damus_state.nostrNetwork.pool.unsubscribe(sub_id: profiles_id, to: [relay_id]) - } - - case .ok: - break - case .auth: - break + } + } + + func handle_event(ev: NostrEvent) { + if ev.known_kind == .contacts { + handle_contact_event(ev) } } } diff --git a/damus/Features/Follows/Models/FollowingModel.swift b/damus/Features/Follows/Models/FollowingModel.swift index db7c1275e..59a547ac6 100644 --- a/damus/Features/Follows/Models/FollowingModel.swift +++ b/damus/Features/Follows/Models/FollowingModel.swift @@ -14,7 +14,7 @@ class FollowingModel { let contacts: [Pubkey] let hashtags: [Hashtag] - let sub_id: String = UUID().description + private var listener: Task? = nil init(damus_state: DamusState, contacts: [Pubkey], hashtags: [Hashtag]) { self.damus_state = damus_state @@ -41,19 +41,17 @@ class FollowingModel { return } let filters = [filter] - //print_filters(relay_id: "following", filters: [filters]) - self.damus_state.nostrNetwork.pool.subscribe(sub_id: sub_id, filters: filters, handler: handle_event) + self.listener?.cancel() + self.listener = Task { + for await item in self.damus_state.nostrNetwork.reader.subscribe(filters: filters) { + // don't need to do anything here really + continue + } + } } func unsubscribe() { - if !needs_sub { - return - } - print("unsubscribing from following \(sub_id)") - self.damus_state.nostrNetwork.pool.unsubscribe(sub_id: sub_id) - } - - func handle_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - // don't need to do anything here really + self.listener?.cancel() + self.listener = nil } } diff --git a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift index 55ab6f079..8c0d4cc5c 100644 --- a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift +++ b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift @@ -15,8 +15,7 @@ class NIP05DomainEventsModel: ObservableObject { let domain: String var filter: NostrFilter - let sub_id = UUID().description - let profiles_subid = UUID().description + var loadingTask: Task? let limit: UInt32 = 500 init(state: DamusState, domain: String) { @@ -29,6 +28,20 @@ class NIP05DomainEventsModel: ObservableObject { } @MainActor func subscribe() { + print("subscribing to notes from friends of friends with '\(domain)' NIP-05 domain") + loadingTask = Task { + await streamItems() + } + loading = true + } + + func unsubscribe() { + loadingTask?.cancel() + loading = false + print("unsubscribing from notes from friends of friends with '\(domain)' NIP-05 domain") + } + + func streamItems() async { filter.limit = self.limit filter.kinds = [.text, .longform, .highlight] @@ -50,16 +63,19 @@ class NIP05DomainEventsModel: ObservableObject { } filter.authors = Array(authors) - print("subscribing to notes from friends of friends with '\(domain)' NIP-05 domain with sub_id \(sub_id)") - state.nostrNetwork.pool.register_handler(sub_id: sub_id, handler: handle_event) - loading = true - state.nostrNetwork.pool.send(.subscribe(.init(filters: [filter], sub_id: sub_id))) - } - - func unsubscribe() { - state.nostrNetwork.pool.unsubscribe(sub_id: sub_id) - loading = false - print("unsubscribing from notes from friends of friends with '\(domain)' NIP-05 domain with sub_id \(sub_id)") + + for await item in state.nostrNetwork.reader.subscribe(filters: [filter]) { + switch item { + case .event(borrow: let borrow): + try? borrow { event in + self.add_event(event.toOwned()) + guard let txn = NdbTxn(ndb: state.ndb) else { return } + load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) + } + case .eose: + continue + } + } } func add_event(_ ev: NostrEvent) { @@ -75,23 +91,4 @@ class NIP05DomainEventsModel: ObservableObject { objectWillChange.send() } } - - func handle_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - let (sub_id, done) = handle_subid_event(pool: state.nostrNetwork.pool, relay_id: relay_id, ev: ev) { sub_id, ev in - if sub_id == self.sub_id && ev.is_textlike && ev.should_show_event { - self.add_event(ev) - } - } - - guard done else { - return - } - - self.loading = false - - if sub_id == self.sub_id { - guard let txn = NdbTxn(ndb: state.ndb) else { return } - load_profiles(context: "search", profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state, txn: txn) - } - } } diff --git a/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift b/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift index faf1ba8f0..264ecff4b 100644 --- a/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift +++ b/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift @@ -56,7 +56,7 @@ struct OnboardingSuggestionsView: View { // - We don't have other mechanisms to allow the user to edit this yet // // Therefore, it is better to just save it locally, and retrieve this once we build out https://github.com/damus-io/damus/issues/3042 - model.damus_state.nostrNetwork.pool.send_raw_to_local_ndb(.typical(.event(event))) + model.damus_state.nostrNetwork.sendToNostrDB(event: event) } var body: some View { diff --git a/damus/Features/Posting/Models/DraftsModel.swift b/damus/Features/Posting/Models/DraftsModel.swift index b80ae6333..ab478691e 100644 --- a/damus/Features/Posting/Models/DraftsModel.swift +++ b/damus/Features/Posting/Models/DraftsModel.swift @@ -224,7 +224,7 @@ class Drafts: ObservableObject { } /// Saves the drafts tracked by this class persistently using NostrDB + UserDefaults - func save(damus_state: DamusState) { + func save(damus_state: DamusState) async { var draft_events: [NdbNote] = [] post_artifact_block: if let post_artifacts = self.post { let nip37_draft = try? post_artifacts.to_nip37_draft(action: .posting(.user(damus_state.pubkey)), damus_state: damus_state) @@ -254,7 +254,7 @@ class Drafts: ObservableObject { // TODO: Once it is time to implement draft syncing with relays, please consider the following: // - Privacy: Sending drafts to the network leaks metadata about app activity, and may break user expectations // - Down-sync conflict resolution: Consider how to solve conflicts for different draft versions holding the same ID (e.g. edited in Damus, then another client, then Damus again) - damus_state.nostrNetwork.pool.send_raw_to_local_ndb(.typical(.event(draft_event))) + damus_state.nostrNetwork.sendToNostrDB(event: draft_event) } damus_state.settings.draft_event_ids = draft_events.map({ $0.id.hex() }) diff --git a/damus/Features/Posting/Views/PostView.swift b/damus/Features/Posting/Views/PostView.swift index 18fe208b8..cfec7833c 100644 --- a/damus/Features/Posting/Views/PostView.swift +++ b/damus/Features/Posting/Views/PostView.swift @@ -105,7 +105,7 @@ struct PostView: View { self.prompt_view = prompt_view self.placeholder_messages = placeholder_messages ?? [POST_PLACEHOLDER] self.initial_text_suffix = initial_text_suffix - self.autoSaveModel = AutoSaveIndicatorView.AutoSaveViewModel(save: { damus_state.drafts.save(damus_state: damus_state) }) + self.autoSaveModel = AutoSaveIndicatorView.AutoSaveViewModel(save: { await damus_state.drafts.save(damus_state: damus_state) }) } @Environment(\.dismiss) var dismiss @@ -231,7 +231,7 @@ struct PostView: View { damus_state.drafts.post = nil } - damus_state.drafts.save(damus_state: damus_state) + Task{ await damus_state.drafts.save(damus_state: damus_state) } } func load_draft() -> Bool { diff --git a/damus/Features/Profile/Models/ProfileModel.swift b/damus/Features/Profile/Models/ProfileModel.swift index 0bfb55199..70d52c864 100644 --- a/damus/Features/Profile/Models/ProfileModel.swift +++ b/damus/Features/Profile/Models/ProfileModel.swift @@ -23,17 +23,21 @@ class ProfileModel: ObservableObject, Equatable { return nil } + private let MAX_SHARE_RELAYS = 4 + var events: EventHolder let pubkey: Pubkey let damus: DamusState var seen_event: Set = Set() - var sub_id = UUID().description - var prof_subid = UUID().description - var conversations_subid = UUID().description - var findRelay_subid = UUID().description + + var findRelaysListener: Task? = nil + var listener: Task? = nil + var profileListener: Task? = nil + var conversationListener: Task? = nil + var conversation_events: Set = Set() - + init(pubkey: Pubkey, damus: DamusState) { self.pubkey = pubkey self.damus = damus @@ -46,7 +50,7 @@ class ProfileModel: ObservableObject, Equatable { guard let contacts = self.contacts else { return false } - + return contacts.referenced_pubkeys.contains(pubkey) } @@ -60,39 +64,53 @@ class ProfileModel: ObservableObject, Equatable { static func == (lhs: ProfileModel, rhs: ProfileModel) -> Bool { return lhs.pubkey == rhs.pubkey } - + func hash(into hasher: inout Hasher) { hasher.combine(pubkey) } - func unsubscribe() { - print("unsubscribing from profile \(pubkey) with sub_id \(sub_id)") - damus.nostrNetwork.pool.unsubscribe(sub_id: sub_id) - damus.nostrNetwork.pool.unsubscribe(sub_id: prof_subid) - if pubkey != damus.pubkey { - damus.nostrNetwork.pool.unsubscribe(sub_id: conversations_subid) - } - } - func subscribe() { - var text_filter = NostrFilter(kinds: [.text, .longform, .highlight]) - var profile_filter = NostrFilter(kinds: [.contacts, .metadata, .boost]) - var relay_list_filter = NostrFilter(kinds: [.relay_list], authors: [pubkey]) - - profile_filter.authors = [pubkey] - - text_filter.authors = [pubkey] - text_filter.limit = 500 - - print("subscribing to textlike events from profile \(pubkey) with sub_id \(sub_id)") - //print_filters(relay_id: "profile", filters: [[text_filter], [profile_filter]]) - damus.nostrNetwork.pool.subscribe(sub_id: sub_id, filters: [text_filter], handler: handle_event) - damus.nostrNetwork.pool.subscribe(sub_id: prof_subid, filters: [profile_filter, relay_list_filter], handler: handle_event) - - subscribe_to_conversations() + print("subscribing to profile \(pubkey)") + listener?.cancel() + listener = Task { + var text_filter = NostrFilter(kinds: [.text, .longform, .highlight]) + text_filter.authors = [pubkey] + text_filter.limit = 500 + for await item in damus.nostrNetwork.reader.subscribe(filters: [text_filter]) { + switch item { + case .event(let borrow): + try? borrow { event in + handleNostrEvent(event.toOwned()) + } + case .eose: break + } + } + guard let txn = NdbTxn(ndb: damus.ndb) else { return } + load_profiles(context: "profile", load: .from_events(events.events), damus_state: damus, txn: txn) + progress += 1 + } + profileListener?.cancel() + profileListener = Task { + var profile_filter = NostrFilter(kinds: [.contacts, .metadata, .boost]) + profile_filter.authors = [pubkey] + for await item in damus.nostrNetwork.reader.subscribe(filters: [profile_filter]) { + switch item { + case .event(let borrow): + try? borrow { event in + handleNostrEvent(event.toOwned()) + } + case .eose: break + } + } + progress += 1 + } + conversationListener?.cancel() + conversationListener = Task { + await listenToConversations() + } } - - private func subscribe_to_conversations() { + + func listenToConversations() async { // Only subscribe to conversation events if the profile is not us. guard pubkey != damus.pubkey else { return @@ -102,10 +120,35 @@ class ProfileModel: ObservableObject, Equatable { let limit: UInt32 = 500 let conversations_filter_them = NostrFilter(kinds: conversation_kinds, pubkeys: [damus.pubkey], limit: limit, authors: [pubkey]) let conversations_filter_us = NostrFilter(kinds: conversation_kinds, pubkeys: [pubkey], limit: limit, authors: [damus.pubkey]) - print("subscribing to conversation events from and to profile \(pubkey) with sub_id \(conversations_subid)") - damus.nostrNetwork.pool.subscribe(sub_id: conversations_subid, filters: [conversations_filter_them, conversations_filter_us], handler: handle_event) + print("subscribing to conversation events from and to profile \(pubkey)") + for await item in self.damus.nostrNetwork.reader.subscribe(filters: [conversations_filter_them, conversations_filter_us]) { + switch item { + case .event(borrow: let borrow): + try? borrow { ev in + if !seen_event.contains(ev.id) { + let event = ev.toOwned() + Task { await self.add_event(event) } + conversation_events.insert(ev.id) + } + else if !conversation_events.contains(ev.id) { + conversation_events.insert(ev.id) + } + } + case .eose: + continue + } + } } - + + func unsubscribe() { + listener?.cancel() + listener = nil + profileListener?.cancel() + profileListener = nil + conversationListener?.cancel() + conversationListener = nil + } + func handle_profile_contact_event(_ ev: NostrEvent) { process_contact_event(state: damus, ev: ev) @@ -120,8 +163,13 @@ class ProfileModel: ObservableObject, Equatable { self.following = count_pubkeys(ev.tags) self.legacy_relay_list = decode_json_relays(ev.content) } + + @MainActor + func add_event(_ ev: NostrEvent) { + guard ev.should_show_event else { + return + } - private func add_event(_ ev: NostrEvent) { if ev.is_textlike || ev.known_kind == .boost { if self.events.insert(ev) { self.objectWillChange.send() @@ -134,72 +182,13 @@ class ProfileModel: ObservableObject, Equatable { } seen_event.insert(ev.id) } - - // Ensure the event public key matches the public key(s) we are querying. - // This is done to protect against a relay not properly filtering events by the pubkey - // See https://github.com/damus-io/damus/issues/1846 for more information - private func relay_filtered_correctly(_ ev: NostrEvent, subid: String?) -> Bool { - if subid == self.conversations_subid { - switch ev.pubkey { - case self.pubkey: - return ev.referenced_pubkeys.contains(damus.pubkey) - case damus.pubkey: - return ev.referenced_pubkeys.contains(self.pubkey) - default: - return false - } - } - - return self.pubkey == ev.pubkey - } - - private func handle_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - switch ev { - case .ws_connection_event: - return - case .nostr_event(let resp): - guard resp.subid == self.sub_id || resp.subid == self.prof_subid || resp.subid == self.conversations_subid else { - return - } - switch resp { - case .ok: - break - case .event(_, let ev): - guard ev.should_show_event else { - break - } - - if !seen_event.contains(ev.id) { - guard relay_filtered_correctly(ev, subid: resp.subid) else { - break - } - - add_event(ev) - - if resp.subid == self.conversations_subid { - conversation_events.insert(ev.id) - } - } else if resp.subid == self.conversations_subid && !conversation_events.contains(ev.id) { - guard relay_filtered_correctly(ev, subid: resp.subid) else { - break - } - - conversation_events.insert(ev.id) - } - case .notice: - break - //notify(.notice, notice) - case .eose: - guard let txn = NdbTxn(ndb: damus.ndb) else { return } - if resp.subid == sub_id { - load_profiles(context: "profile", profiles_subid: prof_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus, txn: txn) - } - progress += 1 - break - case .auth: - break - } - } + + private func handleNostrEvent(_ ev: NostrEvent) { + // Ensure the event public key matches this profiles public key + // This is done to protect against a relay not properly filtering events by the pubkey + // See https://github.com/damus-io/damus/issues/1846 for more information + guard self.pubkey == ev.pubkey else { return } + Task { await add_event(ev) } } private func findRelaysHandler(relay_id: RelayURL, ev: NostrConnectionEvent) { @@ -211,12 +200,27 @@ class ProfileModel: ObservableObject, Equatable { func subscribeToFindRelays() { var profile_filter = NostrFilter(kinds: [.contacts]) profile_filter.authors = [pubkey] - - damus.nostrNetwork.pool.subscribe(sub_id: findRelay_subid, filters: [profile_filter], handler: findRelaysHandler) + self.findRelaysListener?.cancel() + self.findRelaysListener = Task { + for await item in await damus.nostrNetwork.reader.subscribe(filters: [profile_filter]) { + switch item { + case .event(let borrow): + try? borrow { event in + if case .contacts = event.known_kind { + // TODO: Is this correct? + self.legacy_relay_list = decode_json_relays(event.content) + } + } + case .eose: + break + } + } + } } func unsubscribeFindRelays() { - damus.nostrNetwork.pool.unsubscribe(sub_id: findRelay_subid) + self.findRelaysListener?.cancel() + self.findRelaysListener = nil } func getCappedRelays() -> [RelayURL] { diff --git a/damus/Features/Profile/Views/ProfileActionSheetView.swift b/damus/Features/Profile/Views/ProfileActionSheetView.swift index 437db5483..3b361a8fc 100644 --- a/damus/Features/Profile/Views/ProfileActionSheetView.swift +++ b/damus/Features/Profile/Views/ProfileActionSheetView.swift @@ -283,7 +283,7 @@ fileprivate struct ProfileActionSheetZapButton: View { VStack(alignment: .center, spacing: 10) { Button( action: { - send_zap(damus_state: damus_state, target: .profile(self.profile.pubkey), lnurl: lnurl, is_custom: false, comment: nil, amount_sats: nil, zap_type: damus_state.settings.default_zap_type) + Task { await send_zap(damus_state: damus_state, target: .profile(self.profile.pubkey), lnurl: lnurl, is_custom: false, comment: nil, amount_sats: nil, zap_type: damus_state.settings.default_zap_type) } zap_state = .zapping }, label: { diff --git a/damus/Features/Profile/Views/ProfileView.swift b/damus/Features/Profile/Views/ProfileView.swift index 2c6f60d08..2234dcaf5 100644 --- a/damus/Features/Profile/Views/ProfileView.swift +++ b/damus/Features/Profile/Views/ProfileView.swift @@ -588,3 +588,4 @@ func check_nip05_validity(pubkey: Pubkey, profiles: Profiles) { } } } + diff --git a/damus/Features/Relays/Models/RelayFilters.swift b/damus/Features/Relays/Models/RelayFilters.swift index 1940b0699..b549379e2 100644 --- a/damus/Features/Relays/Models/RelayFilters.swift +++ b/damus/Features/Relays/Models/RelayFilters.swift @@ -84,9 +84,3 @@ func load_relay_filters(_ pubkey: Pubkey) -> Set? { s.insert(filter) } } - -func determine_to_relays(pool: RelayPool, filters: RelayFilters) -> [RelayURL] { - return pool.our_descriptors - .map { $0.url } - .filter { !filters.is_filtered(timeline: .search, relay_id: $0) } -} diff --git a/damus/Features/Relays/Views/RelayConfigView.swift b/damus/Features/Relays/Views/RelayConfigView.swift index 0a6b46b31..cc82ad1b4 100644 --- a/damus/Features/Relays/Views/RelayConfigView.swift +++ b/damus/Features/Relays/Views/RelayConfigView.swift @@ -32,7 +32,7 @@ struct RelayConfigView: View { init(state: DamusState) { self.state = state - _relays = State(initialValue: state.nostrNetwork.pool.our_descriptors) + _relays = State(initialValue: state.nostrNetwork.ourRelayDescriptors) UITabBar.appearance().isHidden = true } @@ -98,7 +98,7 @@ struct RelayConfigView: View { } } .onReceive(handle_notify(.relays_changed)) { _ in - self.relays = state.nostrNetwork.pool.our_descriptors + self.relays = state.nostrNetwork.ourRelayDescriptors } .onAppear { notify(.display_tabbar(false)) diff --git a/damus/Features/Relays/Views/RelayDetailView.swift b/damus/Features/Relays/Views/RelayDetailView.swift index d2474ed79..fd19aaabf 100644 --- a/damus/Features/Relays/Views/RelayDetailView.swift +++ b/damus/Features/Relays/Views/RelayDetailView.swift @@ -30,7 +30,7 @@ struct RelayDetailView: View { func RemoveRelayButton(_ keypair: FullKeypair) -> some View { Button(action: { - self.removeRelay() + Task { await self.removeRelay() } }) { HStack { Text("Disconnect", comment: "Button to disconnect from the relay.") @@ -43,7 +43,7 @@ struct RelayDetailView: View { func ConnectRelayButton(_ keypair: FullKeypair) -> some View { Button(action: { - self.connectRelay() + Task { await self.connectRelay() } }) { HStack { Text("Connect", comment: "Button to connect to the relay.") @@ -177,16 +177,18 @@ struct RelayDetailView: View { } private var relay_object: RelayPool.Relay? { - state.nostrNetwork.pool.get_relay(relay) + // TODO: Concurrency problems? + state.nostrNetwork.connectedRelays.first(where: { $0.descriptor.url == relay }) } private var relay_connection: RelayConnection? { relay_object?.connection } - func removeRelay() { + func removeRelay() async { do { - try state.nostrNetwork.userRelayList.remove(relayURL: self.relay) + // TODO: Concurrency problems? + try await state.nostrNetwork.userRelayList.remove(relayURL: self.relay) dismiss() } catch { @@ -194,9 +196,10 @@ struct RelayDetailView: View { } } - func connectRelay() { + func connectRelay() async { do { - try state.nostrNetwork.userRelayList.insert(relay: NIP65.RelayList.RelayItem(url: relay, rwConfiguration: .readWrite)) + // TODO: Concurrency problems? + try await state.nostrNetwork.userRelayList.insert(relay: NIP65.RelayList.RelayItem(url: relay, rwConfiguration: .readWrite)) dismiss() } catch { diff --git a/damus/Features/Relays/Views/RelayFilterView.swift b/damus/Features/Relays/Views/RelayFilterView.swift index 77c2a6084..a4e677e8d 100644 --- a/damus/Features/Relays/Views/RelayFilterView.swift +++ b/damus/Features/Relays/Views/RelayFilterView.swift @@ -19,7 +19,7 @@ struct RelayFilterView: View { } var relays: [RelayPool.RelayDescriptor] { - return state.nostrNetwork.pool.our_descriptors + return state.nostrNetwork.ourRelayDescriptors } var body: some View { diff --git a/damus/Features/Relays/Views/RelayStatusView.swift b/damus/Features/Relays/Views/RelayStatusView.swift index d3f6ed610..18aa4ca83 100644 --- a/damus/Features/Relays/Views/RelayStatusView.swift +++ b/damus/Features/Relays/Views/RelayStatusView.swift @@ -56,7 +56,7 @@ struct RelayStatusView: View { struct RelayStatusView_Previews: PreviewProvider { static var previews: some View { - let connection = test_damus_state.nostrNetwork.pool.get_relay(RelayURL("wss://relay.damus.io")!)!.connection + let connection = test_damus_state.nostrNetwork.getRelay(RelayURL("wss://relay.damus.io")!)!.connection RelayStatusView(connection: connection) } } diff --git a/damus/Features/Relays/Views/RelayToggle.swift b/damus/Features/Relays/Views/RelayToggle.swift index 41e0b77c2..8fb5474a3 100644 --- a/damus/Features/Relays/Views/RelayToggle.swift +++ b/damus/Features/Relays/Views/RelayToggle.swift @@ -36,7 +36,7 @@ struct RelayToggle: View { } private var relay_connection: RelayConnection? { - state.nostrNetwork.pool.get_relay(relay_id)?.connection + state.nostrNetwork.getRelay(relay_id)?.connection } } diff --git a/damus/Features/Relays/Views/RelayView.swift b/damus/Features/Relays/Views/RelayView.swift index 4113254ca..85c61d671 100644 --- a/damus/Features/Relays/Views/RelayView.swift +++ b/damus/Features/Relays/Views/RelayView.swift @@ -24,13 +24,13 @@ struct RelayView: View { self.recommended = recommended self.model_cache = state.relay_model_cache _showActionButtons = showActionButtons - let relay_state = RelayView.get_relay_state(pool: state.nostrNetwork.pool, relay: relay) + let relay_state = RelayView.get_relay_state(state: state, relay: relay) self._relay_state = State(initialValue: relay_state) self.disableNavLink = disableNavLink } - static func get_relay_state(pool: RelayPool, relay: RelayURL) -> Bool { - return pool.get_relay(relay) == nil + static func get_relay_state(state: DamusState, relay: RelayURL) -> Bool { + return state.nostrNetwork.getRelay(relay) == nil } var body: some View { @@ -110,7 +110,7 @@ struct RelayView: View { .contentShape(Rectangle()) } .onReceive(handle_notify(.relays_changed)) { _ in - self.relay_state = RelayView.get_relay_state(pool: state.nostrNetwork.pool, relay: self.relay) + self.relay_state = RelayView.get_relay_state(state: state, relay: self.relay) } .onTapGesture { if !disableNavLink { @@ -120,7 +120,7 @@ struct RelayView: View { } private var relay_connection: RelayConnection? { - state.nostrNetwork.pool.get_relay(relay)?.connection + state.nostrNetwork.getRelay(relay)?.connection } func add_action(keypair: FullKeypair) async { diff --git a/damus/Features/Relays/Views/UserRelaysView.swift b/damus/Features/Relays/Views/UserRelaysView.swift index 20f7c44e9..9e87ade8e 100644 --- a/damus/Features/Relays/Views/UserRelaysView.swift +++ b/damus/Features/Relays/Views/UserRelaysView.swift @@ -22,7 +22,7 @@ struct UserRelaysView: View { static func make_relay_state(state: DamusState, relays: [RelayURL]) -> [(RelayURL, Bool)] { return relays.map({ r in - return (r, state.nostrNetwork.pool.get_relay(r) == nil) + return (r, state.nostrNetwork.getRelay(r) == nil) }).sorted { (a, b) in a.0 < b.0 } } diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index a8393865d..5820f02d4 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -39,63 +39,41 @@ class SearchHomeModel: ObservableObject { self.objectWillChange.send() } - func subscribe() { + func load() async { loading = true - let to_relays = determine_to_relays(pool: damus_state.nostrNetwork.pool, filters: damus_state.relay_filters) - - var follow_list_filter = NostrFilter(kinds: [.follow_list]) - follow_list_filter.until = UInt32(Date.now.timeIntervalSince1970) + let to_relays = damus_state.nostrNetwork.ourRelayDescriptors + .map { $0.url } + .filter { !damus_state.relay_filters.is_filtered(timeline: .search, relay_id: $0) } - damus_state.nostrNetwork.pool.subscribe(sub_id: base_subid, filters: [get_base_filter()], handler: handle_event, to: to_relays) - damus_state.nostrNetwork.pool.subscribe(sub_id: follow_pack_subid, filters: [follow_list_filter], handler: handle_event, to: to_relays) - } - - func unsubscribe(to: RelayURL? = nil) { - loading = false - damus_state.nostrNetwork.pool.unsubscribe(sub_id: base_subid, to: to.map { [$0] }) - damus_state.nostrNetwork.pool.unsubscribe(sub_id: follow_pack_subid, to: to.map { [$0] }) - } - - func handle_event(relay_id: RelayURL, conn_ev: NostrConnectionEvent) { - guard case .nostr_event(let event) = conn_ev else { - return + for await item in damus_state.nostrNetwork.reader.subscribe(filters: [get_base_filter()], to: to_relays) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await self.handleEvent(event) + case .eose: break + } } + loading = false - switch event { - case .event(let sub_id, let ev): - guard sub_id == self.base_subid || sub_id == self.profiles_subid || sub_id == self.follow_pack_subid else { + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + load_profiles(context: "universe", load: .from_events(events.all_events), damus_state: damus_state, txn: txn) + } + + @MainActor + func handleEvent(_ ev: NostrEvent) { + if ev.is_textlike && should_show_event(state: damus_state, ev: ev) && !ev.is_reply() { + if !damus_state.settings.multiple_events_per_pubkey && seen_pubkey.contains(ev.pubkey) { return } - if ev.is_textlike && should_show_event(state: damus_state, ev: ev) && !ev.is_reply() - { - if !damus_state.settings.multiple_events_per_pubkey && seen_pubkey.contains(ev.pubkey) { - return - } - seen_pubkey.insert(ev.pubkey) - - if self.events.insert(ev) { - self.objectWillChange.send() - } - } - case .notice(let msg): - print("search home notice: \(msg)") - case .ok: - break - case .eose(let sub_id): - loading = false + seen_pubkey.insert(ev.pubkey) - if sub_id == self.base_subid { - // Make sure we unsubscribe after we've fetched the global events - // global events are not realtime - unsubscribe(to: relay_id) - - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } - load_profiles(context: "universe", profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.all_events), damus_state: damus_state, txn: txn) + if self.events.insert(ev) { + self.objectWillChange.send() } - - break - case .auth: - break } } } @@ -135,44 +113,35 @@ enum PubkeysToLoad { case from_keys([Pubkey]) } -func load_profiles(context: String, profiles_subid: String, relay_id: RelayURL, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn) { +func load_profiles(context: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn) { let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events, txn: txn) guard !authors.isEmpty else { return } - print("load_profiles[\(context)]: requesting \(authors.count) profiles from \(relay_id)") - - let filter = NostrFilter(kinds: [.metadata], authors: authors) - - damus_state.nostrNetwork.pool.subscribe_to(sub_id: profiles_subid, filters: [filter], to: [relay_id]) { rid, conn_ev in + Task { + print("load_profiles[\(context)]: requesting \(authors.count) profiles from relay pool") + let filter = NostrFilter(kinds: [.metadata], authors: authors) - let now = UInt64(Date.now.timeIntervalSince1970) - switch conn_ev { - case .ws_connection_event: - break - case .nostr_event(let ev): - guard ev.subid == profiles_subid, rid == relay_id else { return } - - switch ev { - case .event(_, let ev): - if ev.known_kind == .metadata { - damus_state.ndb.write_profile_last_fetched(pubkey: ev.pubkey, fetched_at: now) + for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { + let now = UInt64(Date.now.timeIntervalSince1970) + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + if event.known_kind == .metadata { + damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now) } case .eose: - print("load_profiles[\(context)]: done loading \(authors.count) profiles from \(relay_id)") - damus_state.nostrNetwork.pool.unsubscribe(sub_id: profiles_subid, to: [relay_id]) - case .ok: - break - case .notice: - break - case .auth: break } } - - + + print("load_profiles[\(context)]: done loading \(authors.count) profiles from relay pool") } } diff --git a/damus/Features/Search/Models/SearchModel.swift b/damus/Features/Search/Models/SearchModel.swift index fbb96ab90..0de7e2939 100644 --- a/damus/Features/Search/Models/SearchModel.swift +++ b/damus/Features/Search/Models/SearchModel.swift @@ -14,8 +14,8 @@ class SearchModel: ObservableObject { @Published var loading: Bool = false var search: NostrFilter - let sub_id = UUID().description let profiles_subid = UUID().description + var listener: Task? = nil let limit: UInt32 = 500 init(state: DamusState, search: NostrFilter) { @@ -39,17 +39,32 @@ class SearchModel: ObservableObject { search.kinds = [.text, .like, .longform, .highlight, .follow_list] //likes_filter.ids = ref_events.referenced_ids! - - print("subscribing to search '\(search)' with sub_id \(sub_id)") - state.nostrNetwork.pool.register_handler(sub_id: sub_id, handler: handle_event) - loading = true - state.nostrNetwork.pool.send(.subscribe(.init(filters: [search], sub_id: sub_id))) + listener?.cancel() + listener = Task { + self.loading = true + print("subscribing to search") + for await item in await state.nostrNetwork.reader.subscribe(filters: [search]) { + switch item { + case .event(let borrow): + try? borrow { ev in + let event = ev.toOwned() + if event.is_textlike && event.should_show_event { + self.add_event(event) + } + } + case .eose: + break + } + guard let txn = NdbTxn(ndb: state.ndb) else { return } + load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) + } + self.loading = false + } } func unsubscribe() { - state.nostrNetwork.pool.unsubscribe(sub_id: sub_id) - loading = false - print("unsubscribing from search '\(search)' with sub_id \(sub_id)") + listener?.cancel() + listener = nil } func add_event(_ ev: NostrEvent) { @@ -65,25 +80,6 @@ class SearchModel: ObservableObject { objectWillChange.send() } } - - func handle_event(relay_id: RelayURL, ev: NostrConnectionEvent) { - let (sub_id, done) = handle_subid_event(pool: state.nostrNetwork.pool, relay_id: relay_id, ev: ev) { sub_id, ev in - if ev.is_textlike && ev.should_show_event { - self.add_event(ev) - } - } - - guard done else { - return - } - - self.loading = false - - if sub_id == self.sub_id { - guard let txn = NdbTxn(ndb: state.ndb) else { return } - load_profiles(context: "search", profiles_subid: self.profiles_subid, relay_id: relay_id, load: .from_events(self.events.all_events), damus_state: state, txn: txn) - } - } } func event_matches_hashtag(_ ev: NostrEvent, hashtags: [String]) -> Bool { @@ -106,33 +102,3 @@ func event_matches_filter(_ ev: NostrEvent, filter: NostrFilter) -> Bool { } return true } - -func handle_subid_event(pool: RelayPool, relay_id: RelayURL, ev: NostrConnectionEvent, handle: (String, NostrEvent) -> ()) -> (String?, Bool) { - switch ev { - case .ws_connection_event: - return (nil, false) - - case .nostr_event(let res): - switch res { - case .event(let ev_subid, let ev): - handle(ev_subid, ev) - return (ev_subid, false) - - case .ok: - return (nil, false) - - case .notice(let note): - if note.contains("Too many subscription filters") { - // TODO: resend filters? - pool.reconnect(to: [relay_id]) - } - return (nil, false) - - case .eose(let subid): - return (subid, true) - - case .auth: - return (nil, false) - } - } -} diff --git a/damus/Features/Search/Views/SearchHomeView.swift b/damus/Features/Search/Views/SearchHomeView.swift index 163d39824..5f056dfa3 100644 --- a/damus/Features/Search/Views/SearchHomeView.swift +++ b/damus/Features/Search/Views/SearchHomeView.swift @@ -14,6 +14,7 @@ struct SearchHomeView: View { @StateObject var model: SearchHomeModel @State var search: String = "" @FocusState private var isFocused: Bool + @State var loadingTask: Task? func content_filter(_ fstate: FilterState) -> ((NostrEvent) -> Bool) { var filters = ContentFilters.defaults(damus_state: damus_state) @@ -84,8 +85,8 @@ struct SearchHomeView: View { ) .refreshable { // Fetch new information by unsubscribing and resubscribing to the relay - model.unsubscribe() - model.subscribe() + loadingTask?.cancel() + loadingTask = Task { await model.load() } } } @@ -93,8 +94,8 @@ struct SearchHomeView: View { SearchResultsView(damus_state: damus_state, search: $search) .refreshable { // Fetch new information by unsubscribing and resubscribing to the relay - model.unsubscribe() - model.subscribe() + loadingTask?.cancel() + loadingTask = Task { await model.load() } } } @@ -129,11 +130,11 @@ struct SearchHomeView: View { } .onAppear { if model.events.events.isEmpty { - model.subscribe() + loadingTask = Task { await model.load() } } } .onDisappear { - model.unsubscribe() + loadingTask?.cancel() } } } diff --git a/damus/Features/Search/Views/SearchingEventView.swift b/damus/Features/Search/Views/SearchingEventView.swift index c3aed87f5..7f132bc32 100644 --- a/damus/Features/Search/Views/SearchingEventView.swift +++ b/damus/Features/Search/Views/SearchingEventView.swift @@ -77,7 +77,8 @@ struct SearchingEventView: View { } case .event(let note_id): - find_event(state: state, query: .event(evid: note_id)) { res in + Task { + let res = await state.nostrNetwork.findEvent(query: .event(evid: note_id)) guard case .event(let ev) = res else { self.search_state = .not_found return @@ -85,7 +86,8 @@ struct SearchingEventView: View { self.search_state = .found(ev) } case .profile(let pubkey): - find_event(state: state, query: .profile(pubkey: pubkey)) { res in + Task { + let res = await state.nostrNetwork.findEvent(query: .profile(pubkey: pubkey)) guard case .profile(let pubkey) = res else { self.search_state = .not_found return @@ -93,7 +95,8 @@ struct SearchingEventView: View { self.search_state = .found_profile(pubkey) } case .naddr(let naddr): - naddrLookup(damus_state: state, naddr: naddr) { res in + Task { + let res = await state.nostrNetwork.lookup(naddr: naddr) guard let res = res else { self.search_state = .not_found return diff --git a/damus/Features/Settings/Views/FirstAidSettingsView.swift b/damus/Features/Settings/Views/FirstAidSettingsView.swift index 6a40361bb..84fbe48e1 100644 --- a/damus/Features/Settings/Views/FirstAidSettingsView.swift +++ b/damus/Features/Settings/Views/FirstAidSettingsView.swift @@ -68,7 +68,7 @@ struct FirstAidSettingsView: View { guard let new_contact_list_event = make_first_contact_event(keypair: damus_state.keypair) else { throw FirstAidError.cannotMakeFirstContactEvent } - damus_state.nostrNetwork.pool.send(.event(new_contact_list_event)) + damus_state.nostrNetwork.send(event: new_contact_list_event) damus_state.settings.latest_contact_event_id_hex = new_contact_list_event.id.hex() } diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index a18fd4382..8d2da21e8 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -65,12 +65,11 @@ class HomeModel: ContactsDelegate { let resub_debouncer = Debouncer(interval: 3.0) var should_debounce_dms = true - let home_subid = UUID().description - let contacts_subid = UUID().description - let notifications_subid = UUID().description - let dms_subid = UUID().description - let init_subid = UUID().description - let profiles_subid = UUID().description + var homeHandlerTask: Task? + var contactsHandlerTask: Task? + var notificationsHandlerTask: Task? + var dmsHandlerTask: Task? + var nwcHandlerTask: Task? var loading: Bool = false @@ -94,23 +93,10 @@ class HomeModel: ContactsDelegate { preload_events(state: self.damus_state, events: [ev]) } - var pool: RelayPool { - self.damus_state.nostrNetwork.pool - } - var dms: DirectMessagesModel { return damus_state.dms } - func has_sub_id_event(sub_id: String, ev_id: NoteId) -> Bool { - if !has_event.keys.contains(sub_id) { - has_event[sub_id] = Set() - return false - } - - return has_event[sub_id]!.contains(ev_id) - } - func setup_debouncer() { // turn off debouncer after initial load DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) { @@ -140,6 +126,28 @@ class HomeModel: ContactsDelegate { damus_state.drafts.load(from: damus_state) } + enum RelayListLoadingError: Error { + case noRelayList + case relayListParseError + + var humanReadableError: ErrorView.UserPresentableError { + switch self { + case .noRelayList: + return ErrorView.UserPresentableError( + user_visible_description: NSLocalizedString("Your relay list could not be found, so we cannot connect you to your Nostr network.", comment: "Human readable error description for a failure to find the relay list"), + tip: NSLocalizedString("Please check your internet connection and restart the app. If the error persists, please go to Settings > First Aid.", comment: "Human readable tips for what to do for a failure to find the relay list"), + technical_info: "No NIP-65 relay list or legacy kind:3 contact event could be found." + ) + case .relayListParseError: + return ErrorView.UserPresentableError( + user_visible_description: NSLocalizedString("Your relay list appears to be broken, so we cannot connect you to your Nostr network.", comment: "Human readable error description for a failure to parse the relay list due to a bad relay list"), + tip: NSLocalizedString("Please contact support for further help.", comment: "Human readable tips for what to do for a failure to find the relay list"), + technical_info: "Relay list could not be parsed." + ) + } + } + } + // MARK: - ContactsDelegate functions func latest_contact_event_changed(new_event: NostrEvent) { @@ -158,9 +166,6 @@ class HomeModel: ContactsDelegate { print("hit resub debouncer") resub_debouncer.debounce { - print("resub") - self.unsubscribe_to_home_filters() - switch resubbing { case .following: break @@ -175,25 +180,16 @@ class HomeModel: ContactsDelegate { } @MainActor - func process_event(sub_id: String, relay_id: RelayURL, ev: NostrEvent) { - if has_sub_id_event(sub_id: sub_id, ev_id: ev.id) { - return - } - - let last_k = get_last_event_of_kind(relay_id: relay_id, kind: ev.kind) - if last_k == nil || ev.created_at > last_k!.created_at { - last_event_of_kind[relay_id]?[ev.kind] = ev - } - + func process_event(ev: NostrEvent, context: SubscriptionContext) { guard let kind = ev.known_kind else { return } switch kind { case .chat, .longform, .text, .highlight: - handle_text_event(sub_id: sub_id, ev) + handle_text_event(ev, context: context) case .contacts: - handle_contact_event(sub_id: sub_id, relay_id: relay_id, ev: ev) + handle_contact_event(ev: ev) case .metadata: // profile metadata processing is handled by nostrdb break @@ -202,7 +198,7 @@ class HomeModel: ContactsDelegate { case .mute_list: handle_mute_list_event(ev) case .boost: - handle_boost_event(sub_id: sub_id, ev) + handle_boost_event(ev, context: context) case .like: handle_like_event(ev) case .dm: @@ -216,7 +212,7 @@ class HomeModel: ContactsDelegate { case .nwc_request: break case .nwc_response: - handle_nwc_response(ev, relay: relay_id) + handle_nwc_response(ev) case .http_auth: break case .status: @@ -261,7 +257,7 @@ class HomeModel: ContactsDelegate { pdata.status.update_status(st) } - func handle_nwc_response(_ ev: NostrEvent, relay: RelayURL) { + func handle_nwc_response(_ ev: NostrEvent) { Task { @MainActor in // TODO: Adapt KeychainStorage to StringCodable and instead of parsing to WalletConnectURL every time guard let nwc_str = damus_state.settings.nostr_wallet_connect, @@ -269,7 +265,6 @@ class HomeModel: ContactsDelegate { return } - guard nwc.relay == relay else { return } // Don't process NWC responses coming from relays other than our designated one guard ev.referenced_pubkeys.first == nwc.keypair.pubkey else { return // This message is not for us. Ignore it. } @@ -289,9 +284,9 @@ class HomeModel: ContactsDelegate { // since command results are not returned for ephemeral events, // remove the request from the postbox which is likely failing over and over if damus_state.nostrNetwork.postbox.remove_relayer(relay_id: nwc.relay, event_id: resp.req_id) { - Log.debug("HomeModel: got NWC response, removed %s from the postbox [%s]", for: .nwc, resp.req_id.hex(), relay.absoluteString) + Log.debug("HomeModel: got NWC response, removed %s from the postbox", for: .nwc, resp.req_id.hex()) } else { - Log.debug("HomeModel: got NWC response, %s not found in the postbox, nothing to remove [%s]", for: .nwc, resp.req_id.hex(), relay.absoluteString) + Log.debug("HomeModel: got NWC response, %s not found in the postbox, nothing to remove", for: .nwc, resp.req_id.hex()) } damus_state.wallet.handle_nwc_response(response: resp) // This can handle success or error cases @@ -303,7 +298,6 @@ class HomeModel: ContactsDelegate { return } - print("nwc success: \(resp.response.result.debugDescription) [\(relay)]") WalletConnect.handle_zap_success(state: self.damus_state, resp: resp) } } @@ -382,19 +376,11 @@ class HomeModel: ContactsDelegate { self.deleted_events.insert(ev.id) } - func handle_contact_event(sub_id: String, relay_id: RelayURL, ev: NostrEvent) { + func handle_contact_event(ev: NostrEvent) { process_contact_event(state: self.damus_state, ev: ev) - - if sub_id == init_subid { - pool.send(.unsubscribe(init_subid), to: [relay_id]) - if !done_init { - done_init = true - send_home_filters(relay_id: nil) - } - } } - func handle_boost_event(sub_id: String, _ ev: NostrEvent) { + func handle_boost_event(_ ev: NostrEvent, context: SubscriptionContext) { var boost_ev_id = ev.last_refid() if let inner_ev = ev.get_inner_event(cache: damus_state.events) { @@ -409,7 +395,7 @@ class HomeModel: ContactsDelegate { if inner_ev.is_textlike { DispatchQueue.main.async { - self.handle_text_event(sub_id: sub_id, ev) + self.handle_text_event(ev, context: context) } } } @@ -457,94 +443,50 @@ class HomeModel: ContactsDelegate { } } - @MainActor - func handle_event(relay_id: RelayURL, conn_event: NostrConnectionEvent) { - switch conn_event { - case .ws_connection_event(let ev): - switch ev { - case .connected: - if !done_init { - self.loading = true - send_initial_filters(relay_id: relay_id) - } else { - //remove_bootstrap_nodes(damus_state) - send_home_filters(relay_id: relay_id) - } - - // connect to nwc relays when connected - if let nwc_str = damus_state.settings.nostr_wallet_connect, - let r = pool.get_relay(relay_id), - r.descriptor.variant == .nwc, - let nwc = WalletConnectURL(str: nwc_str), - nwc.relay == relay_id - { - WalletConnect.subscribe(url: nwc, pool: pool) - } - case .error(let merr): - let desc = String(describing: merr) - if desc.contains("Software caused connection abort") { - pool.reconnect(to: [relay_id]) + /// Send the initial filters, just our contact list and relay list mostly + func send_initial_filters() { + Task { + let filter = NostrFilter(kinds: [.contacts], limit: 1, authors: [damus_state.pubkey]) + for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await process_event(ev: event, context: .initialContactList) + continue + case .eose: + if !done_init { + done_init = true + send_home_filters() + } + break } - case .disconnected: - pool.reconnect(to: [relay_id]) - default: - break } - update_signal_from_pool(signal: self.signal, pool: damus_state.nostrNetwork.pool) - case .nostr_event(let ev): - switch ev { - case .event(let sub_id, let ev): - // globally handle likes - /* - let always_process = sub_id == notifications_subid || sub_id == contacts_subid || sub_id == home_subid || sub_id == dms_subid || sub_id == init_subid || ev.known_kind == .like || ev.known_kind == .boost || ev.known_kind == .zap || ev.known_kind == .contacts || ev.known_kind == .metadata - if !always_process { - // TODO: other views like threads might have their own sub ids, so ignore those events... or should we? - return - } - */ - - self.process_event(sub_id: sub_id, relay_id: relay_id, ev: ev) - case .notice(let msg): - print(msg) - - case .eose(let sub_id): - guard let txn = NdbTxn(ndb: damus_state.ndb) else { - return - } - - if sub_id == dms_subid { - var dms = dms.dms.flatMap { $0.events } - dms.append(contentsOf: incoming_dms) - load_profiles(context: "dms", profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(dms), damus_state: damus_state, txn: txn) - } else if sub_id == notifications_subid { - load_profiles(context: "notifications", profiles_subid: profiles_subid, relay_id: relay_id, load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn) - } else if sub_id == home_subid { - load_profiles(context: "home", profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events.events), damus_state: damus_state, txn: txn) + } + + Task { + let relayListFilter = NostrFilter(kinds: [.relay_list], limit: 1, authors: [damus_state.pubkey]) + for await item in damus_state.nostrNetwork.reader.subscribe(filters: [relayListFilter]) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await process_event(ev: event, context: .initialRelayList) + case .eose: break } - - self.loading = false - break - - case .ok: - break - case .auth: - break } - } } - - /// Send the initial filters, just our contact list mostly - func send_initial_filters(relay_id: RelayURL) { - let filter = NostrFilter(kinds: [.contacts], limit: 1, authors: [damus_state.pubkey]) - let subscription = NostrSubscribe(filters: [filter], sub_id: init_subid) - pool.send(.subscribe(subscription), to: [relay_id]) - } - /// After initial connection or reconnect, send subscription filters for the home timeline, DMs, and notifications - func send_home_filters(relay_id: RelayURL?) { + func send_home_filters() { // TODO: since times should be based on events from a specific relay // perhaps we could mark this in the relay pool somehow @@ -589,38 +531,99 @@ class HomeModel: ContactsDelegate { let contacts_filter_chunks = contacts_filter.chunked(on: .authors, into: MAX_CONTACTS_ON_FILTER) var contacts_filters = contacts_filter_chunks + [our_contacts_filter, our_blocklist_filter, our_old_blocklist_filter] var dms_filters = [dms_filter, our_dms_filter] - let last_of_kind = get_last_of_kind(relay_id: relay_id) - - contacts_filters = update_filters_with_since(last_of_kind: last_of_kind, filters: contacts_filters) - notifications_filters = update_filters_with_since(last_of_kind: last_of_kind, filters: notifications_filters) - dms_filters = update_filters_with_since(last_of_kind: last_of_kind, filters: dms_filters) //print_filters(relay_id: relay_id, filters: [home_filters, contacts_filters, notifications_filters, dms_filters]) - subscribe_to_home_filters(relay_id: relay_id) - - let relay_ids = relay_id.map { [$0] } + subscribe_to_home_filters() - pool.send(.subscribe(.init(filters: contacts_filters, sub_id: contacts_subid)), to: relay_ids) - pool.send(.subscribe(.init(filters: notifications_filters, sub_id: notifications_subid)), to: relay_ids) - pool.send(.subscribe(.init(filters: dms_filters, sub_id: dms_subid)), to: relay_ids) + + self.contactsHandlerTask?.cancel() + self.contactsHandlerTask = Task { + for await item in damus_state.nostrNetwork.reader.subscribe(filters: contacts_filters) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + var event = ev.toOwned() + } + guard let event else { return } + await self.process_event(ev: event, context: .contacts) + case .eose: continue + } + } + } + self.notificationsHandlerTask?.cancel() + self.notificationsHandlerTask = Task { + for await item in damus_state.nostrNetwork.reader.subscribe(filters: notifications_filters) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await self.process_event(ev: event, context: .notifications) + case .eose: + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + load_profiles(context: "notifications", load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn) + } + } + } + self.dmsHandlerTask?.cancel() + self.dmsHandlerTask = Task { + for await item in damus_state.nostrNetwork.reader.subscribe(filters: dms_filters) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await self.process_event(ev: event, context: .dms) + case .eose: + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + var dms = dms.dms.flatMap { $0.events } + dms.append(contentsOf: incoming_dms) + load_profiles(context: "dms", load: .from_events(dms), damus_state: damus_state, txn: txn) + } + } + } + self.nwcHandlerTask?.cancel() + self.nwcHandlerTask = Task { + if let nwc_str = damus_state.settings.nostr_wallet_connect, + let nwc = WalletConnectURL(str: nwc_str) + { + var filter = NostrFilter(kinds: [.nwc_response]) + filter.authors = [nwc.pubkey] + filter.limit = 0 + for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter], to: [nwc.relay]) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await self.process_event(ev: event, context: .nwc) + case .eose: continue + } + } + } + + } } func get_last_of_kind(relay_id: RelayURL?) -> [UInt32: NostrEvent] { return relay_id.flatMap { last_event_of_kind[$0] } ?? [:] } - func unsubscribe_to_home_filters() { - pool.send(.unsubscribe(home_subid)) - } - func get_friends() -> [Pubkey] { var friends = damus_state.contacts.get_friend_list() friends.insert(damus_state.pubkey) return Array(friends) } - func subscribe_to_home_filters(friends fs: [Pubkey]? = nil, relay_id: RelayURL? = nil) { + func subscribe_to_home_filters(friends fs: [Pubkey]? = nil) { // TODO: separate likes? var home_filter_kinds: [NostrKind] = [ .text, .longform, .boost, .highlight @@ -649,11 +652,34 @@ class HomeModel: ContactsDelegate { home_filters.append(hashtag_filter) } - let relay_ids = relay_id.map { [$0] } - home_filters = update_filters_with_since(last_of_kind: get_last_of_kind(relay_id: relay_id), filters: home_filters) - let sub = NostrSubscribe(filters: home_filters, sub_id: home_subid) - - pool.send(.subscribe(sub), to: relay_ids) + self.homeHandlerTask?.cancel() + self.homeHandlerTask = Task { + for await item in damus_state.nostrNetwork.reader.subscribe(filters: home_filters) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await self.process_event(ev: event, context: .home) + case .eose: + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + load_profiles(context: "home", load: .from_events(events.events), damus_state: damus_state, txn: txn) + } + } + } + } + + /// Adapter pattern to make migration easier + enum SubscriptionContext { + case initialContactList + case initialRelayList + case home + case notifications + case dms + case contacts + case nwc } func handle_mute_list_event(_ ev: NostrEvent) { @@ -746,7 +772,7 @@ class HomeModel: ContactsDelegate { } - func handle_text_event(sub_id: String, _ ev: NostrEvent) { + func handle_text_event(_ ev: NostrEvent, context: SubscriptionContext) { guard should_show_event(state: damus_state, ev: ev) else { return } @@ -770,10 +796,13 @@ class HomeModel: ContactsDelegate { } } - if sub_id == home_subid { + switch context { + case .home: insert_home_event(ev) - } else if sub_id == notifications_subid { + case .notifications: handle_notification(ev: ev) + case .dms, .contacts, .initialRelayList, .initialContactList, .nwc: + break } } @@ -1191,3 +1220,24 @@ func create_in_app_event_zap_notification(profiles: Profiles, zap: Zap, locale: } } +// MARK: - Extension to bridge NIP-65 relay list structs with app-native objects +// TODO: Do we need this?? + +//extension NIP65.RelayList { +// static func fromLegacyContactList(_ contactList: NdbNote) throws(BridgeError) -> Self { +// guard let relayListInfo = decode_json_relays(contactList.content) else { throw .couldNotDecodeRelayListInfo } +// let relayItems = relayListInfo.map({ url, rwConfiguration in +// return RelayItem(url: url, rwConfiguration: rwConfiguration.toNIP65RWConfiguration() ?? .readWrite) +// }) +// return NIP65.RelayList(relays: relayItems) +// } +// +// static func fromLegacyContactList(_ contactList: NdbNote?) throws(BridgeError) -> Self? { +// guard let contactList = contactList else { return nil } +// return try fromLegacyContactList(contactList) +// } +// +// enum BridgeError: Error { +// case couldNotDecodeRelayListInfo +// } +//} diff --git a/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift b/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift index 06043f39f..1ed038bfc 100644 --- a/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift +++ b/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift @@ -123,8 +123,8 @@ extension WalletConnect { let delay = 0.0 // We don't need a delay when fetching a transaction list or balance - WalletConnect.request_transaction_list(url: nwc, pool: damus_state.nostrNetwork.pool, post: damus_state.nostrNetwork.postbox, delay: delay, on_flush: flusher) - WalletConnect.request_balance_information(url: nwc, pool: damus_state.nostrNetwork.pool, post: damus_state.nostrNetwork.postbox, delay: delay, on_flush: flusher) + damus_state.nostrNetwork.requestTransactionList(url: nwc, delay: delay, on_flush: flusher) + damus_state.nostrNetwork.requestBalanceInformation(url: nwc, delay: delay, on_flush: flusher) return } @@ -153,22 +153,6 @@ extension WalletConnect { } } - /// Send a donation zap to the Damus team - static func send_donation_zap(pool: RelayPool, postbox: PostBox, nwc: WalletConnectURL, percent: Int, base_msats: Int64) async { - let percent_f = Double(percent) / 100.0 - let donations_msats = Int64(percent_f * Double(base_msats)) - - let payreq = LNUrlPayRequest(allowsNostr: true, commentAllowed: nil, nostrPubkey: "", callback: "https://sendsats.lol/@damus") - guard let invoice = await fetch_zap_invoice(payreq, zapreq: nil, msats: donations_msats, zap_type: .non_zap, comment: nil) else { - // we failed... oh well. no donation for us. - print("damus-donation failed to fetch invoice") - return - } - - print("damus-donation donating...") - WalletConnect.pay(url: nwc, pool: pool, post: postbox, invoice: invoice, zap_request: nil, delay: nil) - } - /// Handles a received Nostr Wallet Connect error static func handle_error(zapcache: Zaps, evcache: EventCache, resp: WalletConnect.FullWalletResponse) { // find a pending zap with the nwc request id associated with this response and remove it diff --git a/damus/Features/Wallet/Views/SendPaymentView.swift b/damus/Features/Wallet/Views/SendPaymentView.swift index f9555e6db..2f0e94eda 100644 --- a/damus/Features/Wallet/Views/SendPaymentView.swift +++ b/damus/Features/Wallet/Views/SendPaymentView.swift @@ -185,7 +185,7 @@ struct SendPaymentView: View { sendState = .processing // Process payment - guard let payRequestEv = WalletConnect.pay(url: nwc, pool: damus_state.nostrNetwork.pool, post: damus_state.nostrNetwork.postbox, invoice: invoice.string, zap_request: nil, delay: nil) else { + guard let payRequestEv = damus_state.nostrNetwork.nwcPay(url: nwc, post: damus_state.nostrNetwork.postbox, invoice: invoice.string, zap_request: nil) else { sendState = .failed(error: .init( user_visible_description: NSLocalizedString("The payment request could not be made to your wallet provider.", comment: "A human-readable error message"), tip: NSLocalizedString("Check if your wallet looks configured correctly and try again. If the error persists, please contact support.", comment: "A human-readable tip for an error when a payment request cannot be made to a wallet."), diff --git a/damus/Features/Zaps/Models/ZapsModel.swift b/damus/Features/Zaps/Models/ZapsModel.swift index 9e1e56949..49af62d5c 100644 --- a/damus/Features/Zaps/Models/ZapsModel.swift +++ b/damus/Features/Zaps/Models/ZapsModel.swift @@ -11,7 +11,7 @@ class ZapsModel: ObservableObject { let state: DamusState let target: ZapTarget - let zaps_subid = UUID().description + var zapCommsListener: Task? = nil let profiles_subid = UUID().description init(state: DamusState, target: ZapTarget) { @@ -31,46 +31,40 @@ class ZapsModel: ObservableObject { case .note(let note_target): filter.referenced_ids = [note_target.note_id] } - state.nostrNetwork.pool.subscribe(sub_id: zaps_subid, filters: [filter], handler: handle_event) + zapCommsListener?.cancel() + zapCommsListener = Task { + for await item in state.nostrNetwork.reader.subscribe(filters: [filter]) { + switch item { + case .event(let borrow): + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() + } + guard let event else { return } + await self.handle_event(ev: event) + case .eose: + let events = state.events.lookup_zaps(target: target).map { $0.request.ev } + guard let txn = NdbTxn(ndb: state.ndb) else { return } + load_profiles(context: "zaps_model", load: .from_events(events), damus_state: state, txn: txn) + } + } + } } func unsubscribe() { - state.nostrNetwork.pool.unsubscribe(sub_id: zaps_subid) + zapCommsListener?.cancel() + zapCommsListener = nil } - + @MainActor - func handle_event(relay_id: RelayURL, conn_ev: NostrConnectionEvent) { - guard case .nostr_event(let resp) = conn_ev else { - return - } - - guard resp.subid == zaps_subid else { + func handle_event(ev: NostrEvent) { + guard ev.kind == 9735, + let zapper = state.profiles.lookup_zapper(pubkey: target.pubkey), + let zap = Zap.from_zap_event(zap_ev: ev, zapper: zapper, our_privkey: state.keypair.privkey) + else { return } - switch resp { - case .ok: - break - case .notice: - break - case .eose: - let events = state.events.lookup_zaps(target: target).map { $0.request.ev } - guard let txn = NdbTxn(ndb: state.ndb) else { return } - load_profiles(context: "zaps_model", profiles_subid: profiles_subid, relay_id: relay_id, load: .from_events(events), damus_state: state, txn: txn) - case .event(_, let ev): - guard ev.kind == 9735, - let zapper = state.profiles.lookup_zapper(pubkey: target.pubkey), - let zap = Zap.from_zap_event(zap_ev: ev, zapper: zapper, our_privkey: state.keypair.privkey) - else { - return - } - - self.state.add_zap(zap: .zap(zap)) - case .auth: - break - } - - - + self.state.add_zap(zap: .zap(zap)) } } diff --git a/damus/Features/Zaps/Views/CustomizeZapView.swift b/damus/Features/Zaps/Views/CustomizeZapView.swift index d7a7502d5..cf6befd9f 100644 --- a/damus/Features/Zaps/Views/CustomizeZapView.swift +++ b/damus/Features/Zaps/Views/CustomizeZapView.swift @@ -175,7 +175,9 @@ struct CustomizeZapView: View { } else { Button(action: { let amount = model.custom_amount_sats - send_zap(damus_state: state, target: target, lnurl: lnurl, is_custom: true, comment: model.comment, amount_sats: amount, zap_type: model.zap_type) + Task { + await send_zap(damus_state: state, target: target, lnurl: lnurl, is_custom: true, comment: model.comment, amount_sats: amount, zap_type: model.zap_type) + } model.zapping = true }) { HStack { diff --git a/damus/Features/Zaps/Views/NoteZapButton.swift b/damus/Features/Zaps/Views/NoteZapButton.swift index e8d1f859e..d7a1c9efe 100644 --- a/damus/Features/Zaps/Views/NoteZapButton.swift +++ b/damus/Features/Zaps/Views/NoteZapButton.swift @@ -72,7 +72,7 @@ struct NoteZapButton: View { func tap() { guard let our_zap else { - send_zap(damus_state: damus_state, target: target, lnurl: lnurl, is_custom: false, comment: nil, amount_sats: nil, zap_type: damus_state.settings.default_zap_type) + Task { await send_zap(damus_state: damus_state, target: target, lnurl: lnurl, is_custom: false, comment: nil, amount_sats: nil, zap_type: damus_state.settings.default_zap_type) } return } @@ -173,13 +173,13 @@ func initial_pending_zap_state(settings: UserSettingsStore) -> PendingZapState { return .external(ExtPendingZapState(state: .fetching_invoice)) } -func send_zap(damus_state: DamusState, target: ZapTarget, lnurl: String, is_custom: Bool, comment: String?, amount_sats: Int?, zap_type: ZapType) { +func send_zap(damus_state: DamusState, target: ZapTarget, lnurl: String, is_custom: Bool, comment: String?, amount_sats: Int?, zap_type: ZapType) async { guard let keypair = damus_state.keypair.to_full() else { return } // Only take the first 10 because reasons - let relays = Array(damus_state.nostrNetwork.pool.our_descriptors.prefix(10)) + let relays = Array(damus_state.nostrNetwork.ourRelayDescriptors.prefix(10)) let content = comment ?? "" guard let mzapreq = make_zap_request_event(keypair: keypair, content: content, relays: relays, target: target, zap_type: zap_type) else { @@ -232,7 +232,7 @@ func send_zap(damus_state: DamusState, target: ZapTarget, lnurl: String, is_cust flusher = .once({ pe in // send donation zap when the pending zap is flushed, this allows user to cancel and not send a donation Task { @MainActor in - await WalletConnect.send_donation_zap(pool: damus_state.nostrNetwork.pool, postbox: damus_state.nostrNetwork.postbox, nwc: nwc_state.url, percent: damus_state.settings.donation_percent, base_msats: amount_msat) + await damus_state.nostrNetwork.send_donation_zap(nwc: nwc_state.url, percent: damus_state.settings.donation_percent, base_msats: amount_msat) } }) } @@ -240,7 +240,7 @@ func send_zap(damus_state: DamusState, target: ZapTarget, lnurl: String, is_cust // we don't have a delay on one-tap nozaps (since this will be from customize zap view) let delay = damus_state.settings.nozaps ? nil : 5.0 - let nwc_req = WalletConnect.pay(url: nwc_state.url, pool: damus_state.nostrNetwork.pool, post: damus_state.nostrNetwork.postbox, invoice: inv, zap_request: zapreq, delay: delay, on_flush: flusher) + let nwc_req = damus_state.nostrNetwork.nwcPay(url: nwc_state.url, post: damus_state.nostrNetwork.postbox, invoice: inv, delay: delay, on_flush: flusher) guard let nwc_req, case .nwc(let pzap_state) = pending_zap_state else { print("nwc: failed to send nwc request for zapreq \(reqid.reqid)") diff --git a/damus/Shared/Utilities/Router.swift b/damus/Shared/Utilities/Router.swift index caf2f0e16..9ab1b3fcd 100644 --- a/damus/Shared/Utilities/Router.swift +++ b/damus/Shared/Utilities/Router.swift @@ -130,7 +130,7 @@ enum Route: Hashable { case .FollowersYouKnow(let friendedFollowers, let followers): FollowersYouKnowView(damus_state: damusState, friended_followers: friendedFollowers, followers: followers) case .Script(let load_model): - LoadScript(pool: damusState.nostrNetwork.pool, model: load_model) + LoadScript(pool: RelayPool(ndb: damusState.ndb, keypair: damusState.keypair), model: load_model) case .NIP05DomainEvents(let events, let nip05_domain_favicon): NIP05DomainTimelineView(damus_state: damusState, model: events, nip05_domain_favicon: nip05_domain_favicon) case .NIP05DomainPubkeys(let domain, let nip05_domain_favicon, let pubkeys): @@ -237,7 +237,6 @@ enum Route: Hashable { case .FollowersYouKnow(let friendedFollowers, let followers): hasher.combine("followersYouKnow") hasher.combine(friendedFollowers) - hasher.combine(followers.sub_id) case .Script(let model): hasher.combine("script") hasher.combine(model.data.count) diff --git a/highlighter action extension/ActionViewController.swift b/highlighter action extension/ActionViewController.swift index 46a63b488..0ee8bbc32 100644 --- a/highlighter action extension/ActionViewController.swift +++ b/highlighter action extension/ActionViewController.swift @@ -164,7 +164,7 @@ struct ShareExtensionView: View { break case .active: print("txn: 📙 HIGHLIGHTER ACTIVE") - state.nostrNetwork.pool.ping() + state.nostrNetwork.ping() @unknown default: break } diff --git a/share extension/ShareViewController.swift b/share extension/ShareViewController.swift index 2afa8624a..67c38f76a 100644 --- a/share extension/ShareViewController.swift +++ b/share extension/ShareViewController.swift @@ -193,7 +193,7 @@ struct ShareExtensionView: View { break case .active: print("txn: 📙 SHARE ACTIVE") - state.nostrNetwork.pool.ping() + state.nostrNetwork.ping() @unknown default: break } From 5f3ce30826340205432f1c25204c605fbbfa3f03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 18 Aug 2025 17:17:26 -0700 Subject: [PATCH 02/91] Fix memory race condition MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Core/Nostr/RelayPool.swift | 13 +++++++++---- damus/Features/Posting/Models/PostBox.swift | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index be9e1be97..33aae481a 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -99,6 +99,7 @@ class RelayPool { } } + @MainActor func register_handler(sub_id: String, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) { for handler in handlers { // don't add duplicate handlers @@ -201,8 +202,10 @@ class RelayPool { } func subscribe(sub_id: String, filters: [NostrFilter], handler: @escaping (RelayURL, NostrConnectionEvent) -> (), to: [RelayURL]? = nil) { - register_handler(sub_id: sub_id, handler: handler) - send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) + Task { + await register_handler(sub_id: sub_id, handler: handler) + send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) + } } /// Subscribes to data from the `RelayPool` based on a filter and a list of desired relays. @@ -264,8 +267,10 @@ class RelayPool { } func subscribe_to(sub_id: String, filters: [NostrFilter], to: [RelayURL]?, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) { - register_handler(sub_id: sub_id, handler: handler) - send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) + Task { + await register_handler(sub_id: sub_id, handler: handler) + send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) + } } func count_queued(relay: RelayURL) -> Int { diff --git a/damus/Features/Posting/Models/PostBox.swift b/damus/Features/Posting/Models/PostBox.swift index 061b1ec0d..0f84038c0 100644 --- a/damus/Features/Posting/Models/PostBox.swift +++ b/damus/Features/Posting/Models/PostBox.swift @@ -60,7 +60,7 @@ class PostBox { init(pool: RelayPool) { self.pool = pool self.events = [:] - pool.register_handler(sub_id: "postbox", handler: handle_event) + Task { await pool.register_handler(sub_id: "postbox", handler: handle_event) } } // only works reliably on delay-sent events From 8083269709ad2b955ff0969c3d4d1ade36671fd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 18 Aug 2025 17:46:40 -0700 Subject: [PATCH 03/91] Switch to local relay model MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changelog-Changed: Switched to the local relay model Changelog-Added: Notes now load offline Signed-off-by: Daniel D’Aquino --- .../SubscriptionManager.swift | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 9de44e8ba..d9c6a2366 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -32,17 +32,14 @@ extension NostrNetworkManager { /// - Returns: An async stream of nostr data func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { return AsyncStream { continuation in - let streamTask = Task { - for await item in self.pool.subscribe(filters: filters, to: desiredRelays) { + let ndbStreamTask = Task { + for await item in try self.ndb.subscribe(filters: try filters.map({ try NdbFilter(from: $0) })) { switch item { - case .eose: continuation.yield(.eose) - case .event(let nostrEvent): - // At this point of the pipeline, if the note is valid it should have been processed and verified by NostrDB, - // in which case we should pull the note from NostrDB to ensure validity. - // However, NdbNotes are unowned, so we return a function where our callers can temporarily borrow the NostrDB note - let noteId = nostrEvent.id + case .eose: + continuation.yield(.eose) + case .event(let noteKey): let lender: NdbNoteLender = { lend in - guard let ndbNoteTxn = self.ndb.lookup_note(noteId) else { + guard let ndbNoteTxn = self.ndb.lookup_note_by_key(noteKey) else { throw NdbNoteLenderError.errorLoadingNote } guard let unownedNote = UnownedNdbNote(ndbNoteTxn) else { @@ -54,8 +51,15 @@ extension NostrNetworkManager { } } } + let streamTask = Task { + for await _ in self.pool.subscribe(filters: filters, to: desiredRelays) { + // NO-OP. Notes will be automatically ingested by NostrDB + // TODO: Improve efficiency of subscriptions? + } + } continuation.onTermination = { @Sendable _ in streamTask.cancel() // Close the RelayPool stream when caller stops streaming + ndbStreamTask.cancel() } } } From abd797b7b3c3c12cec355d86b7ad1bb6650afc57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 18 Aug 2025 18:12:46 -0700 Subject: [PATCH 04/91] Fix another race condition that leads to a memory error MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Core/Nostr/RelayPool.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 33aae481a..4026bebd5 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -284,6 +284,7 @@ class RelayPool { return c } + @MainActor func queue_req(r: NostrRequestType, relay: RelayURL, skip_ephemeral: Bool) { let count = count_queued(relay: relay) guard count <= 10 else { @@ -326,7 +327,7 @@ class RelayPool { } guard relay.connection.isConnected else { - queue_req(r: req, relay: relay.id, skip_ephemeral: skip_ephemeral) + Task { await queue_req(r: req, relay: relay.id, skip_ephemeral: skip_ephemeral) } continue } From e113dee95e4071de5547026a104d5b98ed39329a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 20 Aug 2025 15:14:52 -0700 Subject: [PATCH 05/91] Publish "loading" variable update on the main thread to avoid undefined behaviour MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Search/Models/SearchHomeModel.swift | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index 5820f02d4..ae9505d8e 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -40,7 +40,9 @@ class SearchHomeModel: ObservableObject { } func load() async { - loading = true + DispatchQueue.main.async { + self.loading = true + } let to_relays = damus_state.nostrNetwork.ourRelayDescriptors .map { $0.url } .filter { !damus_state.relay_filters.is_filtered(timeline: .search, relay_id: $0) } @@ -57,7 +59,9 @@ class SearchHomeModel: ObservableObject { case .eose: break } } - loading = false + DispatchQueue.main.async { + self.loading = false + } guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } load_profiles(context: "universe", load: .from_events(events.all_events), damus_state: damus_state, txn: txn) From 940b83f5c46c402d738ec341eb1658c880f86262 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 22 Aug 2025 12:57:50 -0700 Subject: [PATCH 06/91] Add ndb subscription tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus.xcodeproj/project.pbxproj | 16 ++++ .../SubscriptionManager.swift | 31 ++++--- .../Features/Timeline/Models/HomeModel.swift | 4 +- damusTests/Mocking/MockDamusState.swift | 7 +- .../NostrNetworkManagerTests.swift | 86 +++++++++++++++++ .../NostrNetworkManagerTests/test_notes.jsonl | 92 +++++++++++++++++++ 6 files changed, 219 insertions(+), 17 deletions(-) create mode 100644 damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift create mode 100644 damusTests/NostrNetworkManagerTests/test_notes.jsonl diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index 8f086bcd2..84ecdfc84 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -1754,6 +1754,8 @@ D7DF58342DFCF18D00E9AD28 /* SendPaymentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7DF58312DFCF18800E9AD28 /* SendPaymentView.swift */; }; D7EB00B02CD59C8D00660C07 /* PresentFullScreenItemNotify.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */; }; D7EB00B12CD59C8D00660C07 /* PresentFullScreenItemNotify.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */; }; + D7EBF8BB2E59022A004EAE29 /* NostrNetworkManagerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */; }; + D7EBF8BE2E59470D004EAE29 /* test_notes.jsonl in Resources */ = {isa = PBXBuildFile; fileRef = D7EBF8BD2E594708004EAE29 /* test_notes.jsonl */; }; D7EDED152B11776B0018B19C /* LibreTranslateServer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3AE45AF5297BB2E700C1D842 /* LibreTranslateServer.swift */; }; D7EDED162B1177840018B19C /* LNUrls.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CB883B5297730E400DC99E7 /* LNUrls.swift */; }; D7EDED172B1177960018B19C /* TranslationService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3AAA95C9298DF87B00F3D526 /* TranslationService.swift */; }; @@ -2688,6 +2690,8 @@ D7DEEF2E2A8C021E00E0C99F /* NostrEventTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NostrEventTests.swift; sourceTree = ""; }; D7DF58312DFCF18800E9AD28 /* SendPaymentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SendPaymentView.swift; sourceTree = ""; }; D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PresentFullScreenItemNotify.swift; sourceTree = ""; }; + D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NostrNetworkManagerTests.swift; sourceTree = ""; }; + D7EBF8BD2E594708004EAE29 /* test_notes.jsonl */ = {isa = PBXFileReference; lastKnownFileType = text; path = test_notes.jsonl; sourceTree = ""; }; D7EDED1B2B1178FE0018B19C /* NoteContent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NoteContent.swift; sourceTree = ""; }; D7EDED1D2B11797D0018B19C /* LongformEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LongformEvent.swift; sourceTree = ""; }; D7EDED202B117DCA0018B19C /* SequenceUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SequenceUtils.swift; sourceTree = ""; }; @@ -3670,6 +3674,7 @@ 4CE6DEF627F7A08200C66700 /* damusTests */ = { isa = PBXGroup; children = ( + D7EBF8BC2E5946F9004EAE29 /* NostrNetworkManagerTests */, D7DB1FED2D5AC50F00CF06DA /* NIP44v2EncryptionTests.swift */, D7A0D8742D1FE66A00DCBE59 /* EditPictureControlTests.swift */, E06336A72B7582D600A88E6B /* Assets */, @@ -4991,6 +4996,15 @@ path = NIP65; sourceTree = ""; }; + D7EBF8BC2E5946F9004EAE29 /* NostrNetworkManagerTests */ = { + isa = PBXGroup; + children = ( + D7EBF8BD2E594708004EAE29 /* test_notes.jsonl */, + D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */, + ); + path = NostrNetworkManagerTests; + sourceTree = ""; + }; E06336A72B7582D600A88E6B /* Assets */ = { isa = PBXGroup; children = ( @@ -5313,6 +5327,7 @@ files = ( E06336AB2B75850100A88E6B /* img_with_location.jpeg in Resources */, D7DB1FF12D5AC5D700CF06DA /* nip44.vectors.json in Resources */, + D7EBF8BE2E59470D004EAE29 /* test_notes.jsonl in Resources */, 4C0C039A2A61E27B0098B3B8 /* bool_setting.wasm in Resources */, D7DB1FF32D5AC5EA00CF06DA /* LICENSES in Resources */, 4C0C03992A61E27B0098B3B8 /* primal.wasm in Resources */, @@ -5924,6 +5939,7 @@ D71DC1EC2A9129C3006E207C /* PostViewTests.swift in Sources */, 3AAC7A022A60FE72002B50DF /* LocalizationUtilTests.swift in Sources */, D7CBD1D62B8D509800BFD889 /* DamusPurpleImpendingExpirationTests.swift in Sources */, + D7EBF8BB2E59022A004EAE29 /* NostrNetworkManagerTests.swift in Sources */, D7DEEF2F2A8C021E00E0C99F /* NostrEventTests.swift in Sources */, 4C8D00D429E3C5D40036AF10 /* NIP19Tests.swift in Sources */, 3A30410129AB12AA008A0F29 /* EventGroupViewTests.swift in Sources */, diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index d9c6a2366..4202c1162 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -33,23 +33,28 @@ extension NostrNetworkManager { func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { return AsyncStream { continuation in let ndbStreamTask = Task { - for await item in try self.ndb.subscribe(filters: try filters.map({ try NdbFilter(from: $0) })) { - switch item { - case .eose: - continuation.yield(.eose) - case .event(let noteKey): - let lender: NdbNoteLender = { lend in - guard let ndbNoteTxn = self.ndb.lookup_note_by_key(noteKey) else { - throw NdbNoteLenderError.errorLoadingNote + do { + for await item in try self.ndb.subscribe(filters: try filters.map({ try NdbFilter(from: $0) })) { + switch item { + case .eose: + continuation.yield(.eose) + case .event(let noteKey): + let lender: NdbNoteLender = { lend in + guard let ndbNoteTxn = self.ndb.lookup_note_by_key(noteKey) else { + throw NdbNoteLenderError.errorLoadingNote + } + guard let unownedNote = UnownedNdbNote(ndbNoteTxn) else { + throw NdbNoteLenderError.errorLoadingNote + } + lend(unownedNote) } - guard let unownedNote = UnownedNdbNote(ndbNoteTxn) else { - throw NdbNoteLenderError.errorLoadingNote - } - lend(unownedNote) + continuation.yield(.event(borrow: lender)) } - continuation.yield(.event(borrow: lender)) } } + catch { + Log.error("NDB streaming error: %s", for: .ndb, error.localizedDescription) + } } let streamTask = Task { for await _ in self.pool.subscribe(filters: filters, to: desiredRelays) { diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index 8d2da21e8..d9566364e 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -561,8 +561,8 @@ class HomeModel: ContactsDelegate { try? borrow { ev in event = ev.toOwned() } - guard let event else { return } - await self.process_event(ev: event, context: .notifications) + guard let theEvent = event else { return } + await self.process_event(ev: theEvent, context: .notifications) case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } load_profiles(context: "notifications", load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn) diff --git a/damusTests/Mocking/MockDamusState.swift b/damusTests/Mocking/MockDamusState.swift index fa8c13237..0be2400bb 100644 --- a/damusTests/Mocking/MockDamusState.swift +++ b/damusTests/Mocking/MockDamusState.swift @@ -11,7 +11,8 @@ import EmojiPicker // Generates a test damus state with configurable mock parameters func generate_test_damus_state( - mock_profile_info: [Pubkey: Profile]? + mock_profile_info: [Pubkey: Profile]?, + home: HomeModel? = nil ) -> DamusState { // Create a unique temporary directory let ndb = Ndb.test @@ -32,7 +33,7 @@ func generate_test_damus_state( boosts: .init(our_pubkey: our_pubkey), contacts: .init(our_pubkey: our_pubkey), mutelist_manager: mutelist_manager, profiles: profiles, - dms: .init(our_pubkey: our_pubkey), + dms: home?.dms ?? .init(our_pubkey: our_pubkey), previews: .init(), zaps: .init(our_pubkey: our_pubkey), lnurls: .init(), @@ -52,6 +53,8 @@ func generate_test_damus_state( emoji_provider: DefaultEmojiProvider(showAllVariations: false), favicon_cache: .init() ) + + home?.damus_state = damus return damus } diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift new file mode 100644 index 000000000..91ff65dd9 --- /dev/null +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -0,0 +1,86 @@ +// +// NostrNetworkManagerTests.swift +// damus +// +// Created by Daniel D'Aquino on 2025-08-22. +// + +import XCTest +@testable import damus + + +class NostrNetworkManagerTests: XCTestCase { + var damusState: DamusState? = nil + + override func setUpWithError() throws { + // Put setup code here. This method is called before the invocation of each test method in the class. + damusState = generate_test_damus_state(mock_profile_info: nil) + + let notesJSONL = getTestNotesJSONL() + + for noteText in notesJSONL.split(separator: "\n") { + let _ = damusState!.ndb.process_event("[\"EVENT\",\"subid\",\(String(noteText))]") + } + } + + override func tearDownWithError() throws { + // Put teardown code here. This method is called after the invocation of each test method in the class. + damusState = nil + } + + func getTestNotesJSONL() -> String { + // Get the path for the test_notes.jsonl file in the same folder as this test file + let testBundle = Bundle(for: type(of: self)) + let fileURL = testBundle.url(forResource: "test_notes", withExtension: "jsonl")! + + // Load the contents of the file + return try! String(contentsOf: fileURL, encoding: .utf8) + } + + func ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter, expectedCount: Int) { + let endOfStream = XCTestExpectation(description: "Stream should receive EOSE") + let gotAtLeastExpectedCount = XCTestExpectation(description: "Stream should receive at least the expected number of items") + var receivedCount = 0 + var eventIds: Set = [] + Task { + for await item in self.damusState!.nostrNetwork.reader.subscribe(filters: [filter]) { + switch item { + case .event(borrow: let borrow): + try? borrow { event in + receivedCount += 1 + if eventIds.contains(event.id) { + XCTFail("Got duplicate event ID: \(event.id) ") + } + eventIds.insert(event.id) + } + if receivedCount == expectedCount { + gotAtLeastExpectedCount.fulfill() + } + case .eose: + // End of stream, break out of the loop + endOfStream.fulfill() + } + } + } + wait(for: [endOfStream, gotAtLeastExpectedCount], timeout: 10.0) + XCTAssertEqual(receivedCount, expectedCount, "Event IDs: \(eventIds.map({ $0.hex() }))") + } + + /// Tests to ensure that subscribing gets the correct amount of events + /// + /// ## Implementation notes: + /// + /// To create a new scenario, `nak` can be used as a reference: + /// 1. `cd` into the folder where the `test_notes.jsonl` file is + /// 2. Run `nak serve --events test_notes.jsonl` + /// 3. On a separate terminal, run `nak` commands with the desired filter against the local relay, and get the line count. Example: + /// ``` + /// nak req --kind 1 ws://localhost:10547 | wc -l + /// ``` + func testNdbSubscription() { + ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.text]), expectedCount: 57) + ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(authors: [Pubkey(hex: "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245")!]), expectedCount: 22) + ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.boost], referenced_ids: [NoteId(hex: "64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1")!]), expectedCount: 5) + ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.text, .boost, .zap], referenced_ids: [NoteId(hex: "64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1")!], limit: 500), expectedCount: 5) + } +} diff --git a/damusTests/NostrNetworkManagerTests/test_notes.jsonl b/damusTests/NostrNetworkManagerTests/test_notes.jsonl new file mode 100644 index 000000000..56ca111db --- /dev/null +++ b/damusTests/NostrNetworkManagerTests/test_notes.jsonl @@ -0,0 +1,92 @@ +{"kind":1,"id":"92416fa9cfb470248c541ee5d56668f9b86f1e0b134d712df7bfc306313bc3f0","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755909948,"tags":[["e","c1154afd60952dc14fb17b5eae02e6dc32be2548bf0164962bcd9f77d7e8d3c3","wss://relay.damus.io","root"],["p","9ca0bd7450742d6a20319c0e3d4c679c9e046a9dc70e8ef55c2905e24052340b"],["p","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24"]],"content":"LFG","sig":"d34ac228f50f55ce6edc0216df2f25bdf756c2027dc74de09e26e738c69fca9e95d7b852d5cc4f511b682718830929f1fc56bc98cf90bea959a8a9939b6a5e8b"} +{"kind":1,"id":"d90a79de6fd73365350a67f7111b3aa0b58be49b85a8cdd93914e7038f91c0c2","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755909710,"tags":[["e","ed5b84593e1bb642147b63983c47b27c789e514fd24fb14fb7e312115613fec0","","root"],["e","e1ee326dbebc2e293cab090a2cc787570b54b5bdb6260d2061487986047d7019","wss://relay.damus.io","reply"],["p","8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c"],["p","50de492cfe5472450df1a0176fdf6d915e97cb5d9f8d3eccef7d25ff0a8871de"]],"content":"Interesting. \n\nThe ephemeral part makes it a different thing.","sig":"7901fddf98c1ab87fe8ca56a0cdeab0f4ba6d38611e3fa8d2375de9e39f66ebac45ba27fec9efdb79d6e037aa43272e01dfe9ab3761f025e10a2735ca10f5de1"} +{"kind":1,"id":"51d8806ed9842625f6df3fd2105847008da7669d31b606e34cafc5adaf6da3b7","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755909579,"tags":[["e","38ef7da6e141824f87fd3507bcd35f7315d7eeb974f979ad04ae63f87b1194f1","","root"],["e","37396c8c95188306138e414a30a4816c3941eba2e69375840abc2e89b55e1fb7","wss://nos.lol","reply"],["p","deab79dafa1c2be4b4a6d3aca1357b6caa0b744bf46ad529a5ae464288579e68"],["p","9ca0bd7450742d6a20319c0e3d4c679c9e046a9dc70e8ef55c2905e24052340b"]],"content":"Vlogs are 4ever\n\nHere you go\n\nnostr:nevent1qqsdfr20vtzwz8u7dmwuf5jx6fxcztk7qvh8c4a3nr95wly8k94t6fsw9smfy","sig":"b3be1bf1b3689142fbd02c0db1afc80b38eec46e6dd492b8c485336305af4a0d3eb1d7c2aac31b98aa9628b6c756544b76da37b6974a8bc1f9934afbc417e3b3"} +{"kind":6,"id":"c32c1f22c9cf450b748af5746ec49cb86ff3adc4b772c8a457ed29417598667d","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755909541,"tags":[["p","6e468422dfb74a5738702a8823b9b28168abab8655faacb6853cd0ee15deee93"],["e","b09e18b36361b9aa44f7b5b444f31532c82c35a017db512e825eb98c9d8a9a00","","root","2f29aa33c2a3b45c2ef32212879248b2f4a49a002bd0de0fa16c94e138ac6f13"],["p","2f29aa33c2a3b45c2ef32212879248b2f4a49a002bd0de0fa16c94e138ac6f13"]],"content":"{\"kind\":1,\"created_at\":1755528938,\"sig\":\"dd0f13716999fa9534295ccd99b7fb8bf3db4288c476246fa7bd1a89b520b2d3eba30e375d791dd0d05e24eb9061eb625ec20d394ace3cf1a26b7f1175e708a1\",\"tags\":[[\"e\",\"bfa746f8b86cabcbd3b479638391413d5dadb6bf416aa2d8df8b4ed270df50f8\",\"wss:\\/\\/pyramid.fiatjaf.com\",\"root\"],[\"p\",\"6e468422dfb74a5738702a8823b9b28168abab8655faacb6853cd0ee15deee93\",\"\",\"mention\"]],\"content\":\"Pura vloga \",\"pubkey\":\"2f29aa33c2a3b45c2ef32212879248b2f4a49a002bd0de0fa16c94e138ac6f13\",\"id\":\"b09e18b36361b9aa44f7b5b444f31532c82c35a017db512e825eb98c9d8a9a00\"}","sig":"af2301dc144ecf2a34178cde389d8e3fef66f68ed455fd2cb5811c9d09545ed824299b159143248937411161da9d0886359288fd6f6dec9009774a72fddcfc7b"} +{"kind":6,"id":"0d1ede2c2e16a3c0c4bb6fd00935d8bbfd793f685b2e0e6d9ee92a3af4dea043","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755909358,"tags":[["p","78ce6faa72264387284e647ba6938995735ec8c7d5c5a65737e55130f026307d"],["p","78ce6faa72264387284e647ba6938995735ec8c7d5c5a65737e55130f026307d"],["e","c7e3f4c46a036634be446d84b65749a4eb513cb341c3ea5b072f494128e797d3","wss://wot.utxo.one","root","460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c"],["p","460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c","wss://wot.utxo.one"]],"content":"{\"created_at\":1755901048,\"tags\":[[\"alt\",\"A short note: I think I am the only dev that is happy to see the...\"],[\"p\",\"78ce6faa72264387284e647ba6938995735ec8c7d5c5a65737e55130f026307d\",\"wss:\\/\\/relay.damus.io\\/\"],[\"p\",\"78ce6faa72264387284e647ba6938995735ec8c7d5c5a65737e55130f026307d\",\"wss:\\/\\/relay.damus.io\\/\"]],\"content\":\"I think I am the only dev that is happy to see the number of Google Play installs go down in their little chart. \\n\\nBecause I know people are uninstalling to re-install via nostr:nprofile1qqs83nn04fezvsu89p8xg7axjwye2u67errat3dx2um725fs7qnrqlgpz4mhxue69uhhyetvv9ujuerpd46hxtnfduhs79prlk\\n\\nOne day we won't need Google Play.\",\"sig\":\"6c76e44e603e621b2369880db50e6973abc09162a7b6113845f141eacd98b8237aef1a2ea8f3ec8667badb974ab08b46bef308f6bf3797264c98a0e52f955c70\",\"pubkey\":\"460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c\",\"id\":\"c7e3f4c46a036634be446d84b65749a4eb513cb341c3ea5b072f494128e797d3\",\"kind\":1}","sig":"760c30bb51c2d4f670a288c02400115394eb63094a713798f38a18b10d0bd25dab462a9649c9a4cae2217208891510e952adc6d7deb5bb95cda49c1515488970"} +{"kind":1,"id":"19fba9de403728cb8619e3c2f2c8a44f90b57c2f70507d5dc24d9f3e6018889a","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755908847,"tags":[["e","ed5b84593e1bb642147b63983c47b27c789e514fd24fb14fb7e312115613fec0","","root"],["e","de94c7299f74827979e43d4122ed45e3c8138cf6d52a18d42e58363d47c48393","wss://relay.mostr.pub","reply"],["p","50de492cfe5472450df1a0176fdf6d915e97cb5d9f8d3eccef7d25ff0a8871de"],["p","8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c"]],"content":"Kind 20,000","sig":"4039f31b75f15803c6ec37ffd7a62cbf52565c6fa0687b05302b947ae966fd55035ca9c86bbe8d74cf75728d8a7d51404ef15a5de59978f3fc16a5e477b3828d"} +{"kind":1,"id":"f0267191bb81bdf8cca0e799a6ab23acefc9a66598d6f3cc9ab0e19b001f12fa","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755908715,"tags":[["e","f2d3cb82c7314bbc901440b60a91bf23ebca29aaad6ec51ef5f7471f6ce9c313","","root"],["e","5b5758263ca631b682feca2098501cf66b6aeb3334d1568582082a085f0a587a","wss://wot.utxo.one","reply"],["p","fcf70a45cfa817eaa813b9ba8a375d713d3169f4a27f3dcac3d49112df67d37e"],["p","4523be58d395b1b196a9b8c82b038b6895cb02b683d0c253a955068dba1facd0"],["p","21b5a9c73d53c546547300441d780682b66cf4ee666bcbd5923eb1d839a545b6"]],"content":"No one has sats. It is difficult to buy them.","sig":"040a5daf170f8d9423dd714af0c0693f253a410d7168015cd4b9d44ed0b5e041b189e2ac7754c591454b4b4046a31e77e7b0883af1968acef1a5510e235b8683"} +{"kind":1,"id":"49c8d4a3fc242137c32365fe1d9c34bc9c329f6f6d42367ec5a7762fca143e5c","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755908681,"tags":[["q","ed5b84593e1bb642147b63983c47b27c789e514fd24fb14fb7e312115613fec0","wss://relay.damus.io","8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c"],["p","8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c","wss://relay.damus.io"],["p","8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c"],["p","82341f882b6eabcd2ba7f1ef90aad961cf074af15b9ef44a09f9d2a8fbfbe6a2"]],"content":"How might we funnel the bitchat enthusiasts to the 💩post network nostr:npub1sg6plzptd64u62a878hep2kev88swjh3tw00gjsfl8f237lmu63q0uf63m\n\nnostr:nevent1qqsw6kuytylphdjzz3ak8xpug7e8c7y7298ayna3f7m7xys32cflasqpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq32amnwvaz7tmjv4kxz7fwd4hhxarj9ec82cszyz8mzs95arw7l97wfwpp6frj0zs6gdfnvf3r7eqzzjztxuhefqqqcqcyqqqqqqghywde8","sig":"a3420fa531664d1af0ddbbb394b39fec43c44798b01587367f63189f27b17891e266674506d29cf8bd9ba7999d54f645f112c1fde836cefac955c85a908896ec"} +{"kind":1,"id":"6cc757ccf1285e66968bd3f06729ecd6df614c3d48e9c4066c962457b9723d74","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755908570,"tags":[["e","fd8e7a19706831d1d5b28b09b772a11cb396d5d8c995c0dfe5cc6814434c8f28","wss://relay.damus.io","root"],["p","9ca0bd7450742d6a20319c0e3d4c679c9e046a9dc70e8ef55c2905e24052340b"]],"content":"😂","sig":"5326dae1148144105fa8016539c35ddedb714c7e9994acf603ef9b6d4bc1bded5eb91e525220dc2edd30d2e2a8b769eec267b5ab79a680181bd0afa12c405a5a"} +{"kind":1,"id":"dd69cfa06d95c22a5a7a631dadc006cc1b7db42f40933b42e9dd52a8c2df0aa9","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755905846,"tags":[["q","e278ab9f097d74dd954315c621809afdf5b80944f8a5aa6b6c1a2b01ed9a300e","wss://relay.damus.io","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63","wss://relay.damus.io"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"]],"content":"👀\n\nnostr:nevent1qqswy79tnuyh6axaj4p3t33pszd0madcp9z03fd2ddkp52cpakdrqrspz3mhxue69uhhyetvv9ujuerpd46hxtnfduqs6amnwvaz7tmwdaejumr0dsq3yamnwvaz7tmhda6zuat50phjummwv5q3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqgs9pk20ctv9srrg9vr354p03v0rrgsqkpggh2u45va77zz4mu5p6ccrqsqqqqqpnthcqs","sig":"aa1300f909832639c66cd86721c62a510bb16158556556a2a4edea69c31cf01dc32e138a48f4d2b8280934f810c60e17f198d0dff18f45d9d72032715821814d"} +{"kind":1,"id":"16c5a17822534689e80ea278fd95fc98fc41436bac07a8e09d404383fcd2aef7","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755905826,"tags":[["e","2db065de0a20e620798d812cc0ef7ca6e224a10cc95d6742e95b0fe759b1cf23","wss://nos.lol","root"],["p","50054d07e2cdf32b1035777bd9cf73992a4ae22f91c14a762efdaa5bf61f4755"]],"content":"😂","sig":"2958d6b20445df6a625977f24e6f52e8e709c674f17af91cad9c1571b8912fd1d35afeeaf09629d99adf1d270219846ad8072669218c96e7b6f17cab90ef5556"} +{"kind":1,"id":"4b94a1b02354b4e3a69359460a0aac51c92ed7d983fcf655665e4e2ebbf65e2d","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755896304,"tags":[["e","2caf8dd5e8a65cf07f576d8edca4a870df458c409f03113c738f324b6d0492bc","wss://a.nos.lol","root"],["e","553a013beac05f186711b958e5991e4d44a832d3babbae306371d71de5a4a6a2","wss://relay.mostr.pub","reply"],["p","deba271e547767bd6d8eec75eece5615db317a03b07f459134b03e7236005655"]],"content":"Do you know who got him to check out linux?","sig":"1e15b810ccd13ab6c9639e99a2d207d3bc5a85f439cfdcfa5a12d7437f916a0203c90a2881b8afe9cb78ca49b34fdc9cd55cdd2d9c4755f0fa928e7bdae99fad"} +{"kind":1,"id":"f4048d952f5675dcb0f126b95270e835ee461b7ff8b807de84487b01501d8598","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755892873,"tags":[["e","2caf8dd5e8a65cf07f576d8edca4a870df458c409f03113c738f324b6d0492bc","wss://relay.mostr.pub","root"],["p","deba271e547767bd6d8eec75eece5615db317a03b07f459134b03e7236005655"]],"content":"Npub?","sig":"f6cf81ab8a6b4a1d0c093a0129520eb854ccb1fa6b1d3737e36d900412647c1f55080f8a3715e856fa2d93611e6a15f7133e77eed0737ee1e590252636f6ee54"} +{"kind":1,"id":"de5daebeba485cf01055d0bd2900682f69b52fee8b47ae28a052ab02f41d66ab","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755889552,"tags":[["e","06b45b6bf9f2853d21654fe1f6982e186259d491028aa2b748b6e8a0a2886bda","wss://nos.lol","root"],["e","9c4e8877e51ab402e88b75a889ae2efa240e1d09b6c940d8dc2a0efd5d2a4a47","wss://nos.lol","reply"],["p","c4f5e7a75a8ce3683d529cff06368439c529e5243c6b125ba68789198856cac7"],["p","b7c6f6915cfa9a62fff6a1f02604de88c23c6c6c6d1b8f62c7cc10749f307e81"],["p","facf399fbdee81dd45d2748e8129b24d27fba2224b5ecd7eb461c57670977863"]],"content":"👀","sig":"81197f0d313f6952c9b8bfab1e018d27826593283f284bce473761d966c523e90980c35ad1162eb356b1427bf07a413d4b27bc4836a99ced01c38fded7bcd154"} +{"kind":1,"id":"f1e6211f25e17c28c339fdf1ba1b78fce0ce043e1a9d090822ef6476c4f08553","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755889301,"tags":[["e","fcf96abd7841beaa04ef020426037888e32af3e6a64640a00be740a9684c5797","","root"],["e","5657eb11dc4f7133669e79ff1cc6fbb344fbfe4722d08a2d71de08de7b5bd53c","wss://relay.mostr.pub","reply"],["p","9ca0bd7450742d6a20319c0e3d4c679c9e046a9dc70e8ef55c2905e24052340b"],["p","deab79dafa1c2be4b4a6d3aca1357b6caa0b744bf46ad529a5ae464288579e68"],["r","https://github.com/nostrability/nostrability/issues/101"]],"content":"See https://github.com/nostrability/nostrability/issues/101","sig":"63627baac2934ffc036194003e10f5f885bdf8dbb145fa3069820177403058ffa32dd395ed9e812cd63cd3d9df516f6cdeda26d4a8f2205534d311c1403a4aab"} +{"kind":1,"id":"0e6fb993ff7173c39d898c309a580bd6f59be966380809db2c797b40f4ebf4dd","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755889246,"tags":[["q","645751e4c36ef0b3399fee382bedbc6670bd66b25cd54cf31b80dc104ffec5fb","wss://relay.damus.io","e5237023a5c0929e7ae0e5128d41a8213138400ec110dbe9d8a29278f22b7c13"],["p","e5237023a5c0929e7ae0e5128d41a8213138400ec110dbe9d8a29278f22b7c13","wss://relay.damus.io"],["imeta","url https://image.nostr.build/8386cfe9ae90f20810b4057c00cf252d0fbd8d215e50e9c011b27d5db1b0aa89.jpg","blurhash e@M@l#R*WXozt700t7j]azkCWBa}j]ofj]-;fkj[azazxuj]j[fQa}","dim 375x315"],["p","e5237023a5c0929e7ae0e5128d41a8213138400ec110dbe9d8a29278f22b7c13"],["p","e5237023a5c0929e7ae0e5128d41a8213138400ec110dbe9d8a29278f22b7c13"],["r","https://following.space/d/y156932o9xfh"],["r","https://image.nostr.build/8386cfe9ae90f20810b4057c00cf252d0fbd8d215e50e9c011b27d5db1b0aa89.jpg"]],"content":"Added nostr:npub1u53hqga9czffu7hqu5fg6sdgyycnssqwcygdh6wc52f83u3t0sfstpnzt7 to builders follow pack\n\nhttps://following.space/d/y156932o9xfh\n\nhttps://image.nostr.build/8386cfe9ae90f20810b4057c00cf252d0fbd8d215e50e9c011b27d5db1b0aa89.jpg\n\nnostr:nevent1qqsxg463unpkau9n8x07uwptak7xvu9av6e9e42v7vdcphqsfllvt7cpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqgsw2gmsywjupy570tsw2y5dgx5zzvfcgq8vzyxma8v29ync7g4hcycrqsqqqqqp8alq0r","sig":"5f53f6d5a468ffdcf87ed85a0df2a77f597b900a583ae962bbb7bbcd5fbfe1c933789817703092a80098f2fbf71bf09246a3b9dc32a77a1717da4bf32f484a3a"} +{"kind":39089,"id":"c74b41c4b0470c9f2f3f710980263bde1fbb97813dba0a7895bf829cc2e1b901","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755889149,"tags":[["title","Human Architecture, Local Vernacular, and Craftsmanship"],["d","y156932o9xfh"],["image","https://c4.wallpaperflare.com/wallpaper/82/123/783/architecture-building-bulgaria-village-wallpaper-preview.jpg"],["p","5db2be23cde61dd0a69e667a021a943fa38760104fe4160ce13b1a097e9fe447"],["p","7a8e476bd97e1a9c348b2f9e1c8c9d1f371e2fda001dae82e44d336d4ca2f7ec"],["p","16f1a0100d4cfffbcc4230e8e0e4290cc5849c1adc64d6653fda07c031b1074b"],["p","462eb31a6b3de5727407e796d984be2c631cb4bfa854f8a1a2b092dcc6d7bbe1"],["p","e9986a10caaa96738ceda88aabd3e184307be5143e687457581f9b096c6ef89c"],["p","704c4773626bf0f7ebf99d861eb0eae473be2b004f91725f8a1750486c9c848f"],["p","41261aca9c043397d53c1d09d2d62926e8ab230ec8f7516c258c81c6844169c3"],["p","94f57887daad1a4b952bd755539f239922cd614a1b1ba0e623ea8361a4ca2a65"],["p","0ab915c92977c66b57c6bf64d58252db46e5d027ad2c7e1aac9aa3b4bc2ae379"],["p","7a78fbfec68c2b3ab6084f1f808321ba3b5ea47502c41115902013e648e76288"],["p","b6424601bbdff474098af14b18e3cf5a7cfeeafc129538b430884ce1a11e4ff4"],["p","a2ddc2e07b48561da61f5e06cb5a172329a8afdc1e5f8059154330be5b30ea24"],["p","79416b7f67ac988f40550c518d6676db6d6f54d2d98e4519eec854b87bd79913"],["p","61066504617ee79387021e18c89fb79d1ddbc3e7bff19cf2298f40466f8715e9"],["p","518b2e2976d23bbf778edd3a8613f393d67c5df579b99bb459222ae3cb6e6b78"],["p","e5237023a5c0929e7ae0e5128d41a8213138400ec110dbe9d8a29278f22b7c13"],["description","Aesthetic architecture, craftsmanship, and localism"]],"content":"","sig":"6c3d20919df52ecf1a4c9eec9c0a8adedef5659a30f03ac2fc1306780d1b23e5d3b59bca94cd16bed2bf96abbdafb32983ed5d9f8bd34c03e5b4e47962c4f23e"} +{"kind":1,"id":"757d37a9cfc98c9abf3ff6310b0f5ba93a2a191190a299d92d8e5af50dbfb9a8","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755887457,"tags":[["e","047b960cf0d37469636a6fa7cc6e7526c78848d2ec111bbd522f2de304875474","wss://no.str.cr","root"],["e","afd67b05d544496c3d1e5b5098c80b67275b3a47adbf175a234fd39191f14b1a","wss://nos.lol","reply"],["imeta","url https://image.nostr.build/c189fbac808cde10d19b396cc9f463a43b4d8c8ad643fcac24a3f005d6cda1d2.jpg","blurhash eBNBDT}hX9kE^1^HW9SiWFR+[o15-T-6R-sDw^awI[NeRYWERks*S2","dim 746x694"],["p","82341f882b6eabcd2ba7f1ef90aad961cf074af15b9ef44a09f9d2a8fbfbe6a2"],["p","b7c6f6915cfa9a62fff6a1f02604de88c23c6c6c6d1b8f62c7cc10749f307e81"],["p","314072c16fa9433e1374f62e5b02c8163946ed298a9cde3b1541513c29d19fff"],["r","https://image.nostr.build/c189fbac808cde10d19b396cc9f463a43b4d8c8ad643fcac24a3f005d6cda1d2.jpg"]],"content":"https://image.nostr.build/c189fbac808cde10d19b396cc9f463a43b4d8c8ad643fcac24a3f005d6cda1d2.jpg","sig":"8e2c2c6c2c0bc6bf5c955fab0fbd4005338ebd3c8fa3a98792294fde1fce5b5e8bfa21d9d5d7416d173d55a1bcc6300507a2a2503062c4cb63549ef009c44676"} +{"kind":1,"id":"9b2a3016d3cd10b00d2071640ce3905e29c5cd662cda82566b3e0c78ebb52bfc","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755885702,"tags":[["e","f2d3cb82c7314bbc901440b60a91bf23ebca29aaad6ec51ef5f7471f6ce9c313","wss://wot.utxo.one","root"],["e","ce013bc42b6267923920aa7e4648187dcce84c37d48a9397e8f31fa468e5d15d","wss://relay.nostr.band","reply"],["p","fcf70a45cfa817eaa813b9ba8a375d713d3169f4a27f3dcac3d49112df67d37e"],["p","4523be58d395b1b196a9b8c82b038b6895cb02b683d0c253a955068dba1facd0"]],"content":"WOT and ephemeral identities dont mesh","sig":"4f626f25ad036c3645a441ed1820c3902b6afd3fef1358be67ef9a696e5fec96bd82603c359a6c570c9afee25636d6450872b7e6ba5b51eb7f81118f20589fe2"} +{"kind":1,"id":"46517fb4a1196ffd7a848d480da1dc05dda0a5a0b28b3e508329244abefb583c","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755885662,"tags":[["q","8a09e11c290c1d2ebd6d33466ff6002b2267066a6ba0723b6674f96ce47ba22d","wss://relay.mostr.pub","16b8676587c1ddde60b23b27205112a4d5f0ce7bd0414f67476d5eea1502af36"],["p","16b8676587c1ddde60b23b27205112a4d5f0ce7bd0414f67476d5eea1502af36","wss://relay.mostr.pub"],["p","16b8676587c1ddde60b23b27205112a4d5f0ce7bd0414f67476d5eea1502af36"],["t","devstr"]],"content":"#devstr\n\nnostr:nevent1qqsg5z0prs5sc8fwh4knx3n07cqzkgn8qe4xhgrj8dn8f7tvu3a6ytgpz4mhxue69uhhyetvv9ujumt0wd68ytnsw43qzrthwden5te0dehhxtnvdakqz9nhwden5te0wfjkccte9ehx7um5wghxyctwvsq3gamnwvaz7tmjv4kxz7fwv3sk6atn9e5k7q3qz6uxwev8c8wauc9j8vnjq5gj5n2lpnnm6pq57e68d40w59gz4umqxpqqqqqqztkzta8","sig":"d200cae4968eaa4264718ea327e6bb174a73d06876b4e9d664181764fab3f1d6740b743d2ff007694dbecb12294324014d6eade3e4a21ed38bf2397c7d1ca519"} +{"kind":1,"id":"502e08c0d192a12146ea5d8ac1be691da4954ca8def478f68b5323f985bbb2a7","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755909539,"tags":[["e","2e8cff181fdcb0afff38ab61be66f1870980d6c43dff1b852eb543636e597ac5","wss://nos.lol/","root"],["e","c32386a930a9fb28ccff3c1f5f1b7398e5fc2b6f824452478318bff312f6f024","","reply"],["p","5ea4648045bb1ff222655ddd36e6dceddc43590c26090c486bef38ef450da5bd"]],"content":"I’ve always like the idea of having items that are insanely rare: 6 gem roll (rare) * perfect gem slot quality(6 * rare roll) * max stat slots * best stat rolls (T6)\n\nBasically when you multiply it out you get something so astronomically rare, but would be epic if it dropped","sig":"0588ed0aa5b81fc4cc23eb21474bd73e9fcf382de91feb106b6dff7e5cc47a1107dad68d0e4fb3f13b7e9bd00105b584bf3b87bdd94ea01e1143ccf01c54111d"} +{"kind":1,"id":"642d9830ce89200fcb5b88fedf6015a9154bc690c8c6c951d896e9c4c18c2529","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755909323,"tags":[["e","2e8cff181fdcb0afff38ab61be66f1870980d6c43dff1b852eb543636e597ac5","","root"],["e","2c11132854da7a9bc59122a67b167886f91c285a1a2e88828b90fee5466eb204","","reply"],["p","e1ff3bfdd4e40315959b08b4fcc8245eaa514637e1d4ec2ae166b743341be1af"]],"content":"🙂‍↕️","sig":"78b94dc0432ef7b272418f4e0ffd800917172112fd667e2f2d4274d742c0c09156b7610dfea80c684487d2ea411df07a30e53b8ccccb896fbf669f181e772d68"} +{"kind":1,"id":"2f368384d551847d1e72bde5e72786766533811f550ee368f8f58637c0a62a43","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755908193,"tags":[["client","Damus Notedeck"],["e","2e8cff181fdcb0afff38ab61be66f1870980d6c43dff1b852eb543636e597ac5","","root"],["e","984d1ea5a2f8436d5ff1fc263d6358d109de342a7ec15b8584f6a1576a371d67","","reply"],["p","f4d89779148ccd245c8d50914a284fd62d97cb0fb68b797a70f24a172b522db9"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"would have to make a game xD","sig":"9d58d1de8a88c2ccdcfbef137f4822a2245963a94d306f6d2cdcd9bcd9c92ae1122e9920c8d3f6ab855f20f5e5a1bc6fff2ac0b8c02f7128c7832de6b38541b2"} +{"kind":1,"id":"2e8cff181fdcb0afff38ab61be66f1870980d6c43dff1b852eb543636e597ac5","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755908085,"tags":[["q","40bdb8cb3021de44caa02faf49e179fb0c0e8d6d339ca1c8892cecbcd3e511c4"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"using this i was able to vibe code a diablo/path of exile-style loot system. it can run millions of simulations in under a second.\n\na nostr mmorpg or arpg might be fun... what if item drops could be minted as nostr notes with verifiable randomness 🤔\n\nhttps://jb55.com/s/4379d654da44f249.txt\nnostr:note1gz7m3jesy80yfj4q97h5nctelvxqartdxww2rjyf9nkte5l9z8zqjm7squ","sig":"d4cc34732aae5f8e68235bea86fe9b15c1322acc7b3fc7c2cc7e5ff320bd8e5a2fb3ed5824f5e9d172bfc1223c90ea76e5a2fe7cbfec4de1ca3df4fcd8c894a9"} +{"kind":1,"id":"281415ea46e423141bd3f12464758f6447c1775183a34669cf110439a8a73e40","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755887712,"tags":[["client","Damus Notedeck"],["e","29f001c119491883092482957e88d425d1a7fcb4c3fef7fb932e9f27745de725","","root"],["p","2eb03a1f316c3cf9c900e7f536ee28e5486349067be018a965a7c7ca5b4f7f3c"]],"content":"what would you like to see improved ? ","sig":"b7794c3ed2548f69af2f0be6b6236c371c6f81e416fc529b542629838b63e012a8ba66ae8915f415a700e9f6d52b0560d9b9b15b56ecd377aacfe6099c1eeb24"} +{"kind":1,"id":"b625eda87a4748b43406a0de3d5f66349c84eeb1b86e89c3dc1113af56c0c79f","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755887677,"tags":[["client","Damus Notedeck"],["e","9160bae9eaee8dac9d3597e232e09964b4fb7da0a4735c2dc25c5f28839678d4","","root"],["e","379b7edcdea3e98541ac509ebf5215b3980ce848bd28629c6e865d91c7235077","","reply"],["p","9267545d2917b80f707ffdb44a8ff979182568ef7baa04ee756b1f01d4e3688a"],["p","d9dba0e072bdb353dfb0020de159126af47e69e133ea91bbd48e8bede37320e2"]],"content":"there will be more android options very soon ;)","sig":"650cc10c6452e4fea4e9d3749002e4359727396a3841c2e6cf0316ab4aa890393626c9c801c275240cde4775338f8841396ed21d26de1c598d8f440f5fc49fb5"} +{"kind":1,"id":"a3e9187d8408212fecb061e1327f6ef8b89fd52e964d6464c3f196ad6a6d6064","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755885181,"tags":[["client","Damus Notedeck"],["e","a7dcfef3a4b362f3513ff57fa59719593e7649b129802931dbce4149f1bcd442","","root"],["e","2d53546f65233870f813cf2270e570db4f00e632f7c236367aaa2b5309e386e3","","reply"],["p","26d6a946675e603f8de4bf6f9cef442037b70c7eee170ff06ed7673fc34c98f1"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"i recommend this beginner guide\n\nhttps://www.youtube.com/watch?v=di5-CYWyLEs","sig":"2dadc2af5bfb9a84cc24edfe92850a696ceea94fa3c866de7286542c8538ac383104987c06a86bce6e19da7a8b9353fbcb26572bcb769aed5a192dad6cbb3686"} +{"kind":1,"id":"a67651d6bfced2c224d3f812457c3f1f0dd1213e81b54e04ebde7c6ae3ab7945","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755885129,"tags":[["client","Damus Notedeck"],["e","a7dcfef3a4b362f3513ff57fa59719593e7649b129802931dbce4149f1bcd442","","root"],["e","5add9e23f9a306a000436119e28a185168e81c000c46f2dd68b290859945a666","","reply"],["p","26d6a946675e603f8de4bf6f9cef442037b70c7eee170ff06ed7673fc34c98f1"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"yeah its on steam. traditional roguelike","sig":"78e82409a52b349a159c63b0337545e86921bcdb4e04a1fbcffd431112c5a62f97de6d0b62c5b46ea8857094364f12b461c721288486ad2e3e8493162774fe82"} +{"kind":1,"id":"a7dcfef3a4b362f3513ff57fa59719593e7649b129802931dbce4149f1bcd442","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755885086,"tags":[["client","Damus Notedeck"]],"content":"caves of qud is a great game","sig":"6ec4a651cf0053121b47ad9531ed781f2cf8036eab805c82e26ed5c93a3403772b0883d69ee1e0080b6bd47d2e35d08c4d89cd49f9d03c721a151a0edc3c084e"} +{"kind":1,"id":"f2ff20e10f2ebe979515e96143e59b030112e490cc52ca42ee00938e0ca9d732","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755885036,"tags":[["client","Damus Notedeck"],["e","4d3e87afe931a28dfd6d1ab2e61a899bb9049d2f5049563e371f546a69586617","","root"],["p","5cad5613509b802476cf7487e7f5e1736279b4786b47381ca642c7ea4fd1f886"]],"content":"hey! welcome","sig":"8315ad54dc7560fd89757d148bc988af17d87fc82473bea96c7945e5ddf6f01ef49c2e82dbc2f3b60c6fa34d531680c0fb9ad9a8a0e71a7b00f73e7a12e0f96f"} +{"kind":1,"id":"4e9aa4cc28ddd9146f8dea79330e298610774de25e8228bdec7d16bfcb5c607a","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755884538,"tags":[["client","Damus Notedeck"],["e","cddf92e53ecfa1f6abf4bcf415aa41802dca57ecb36853a3ce39e7739e98df94","","root"],["p","6a02b7d5d5c1ceec3d0ad28dd71c4cfeebb6397b95fef5cd5032c9223a13d02a"],["p","3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681"]],"content":"i know I must be boring too. I like it simple","sig":"2a9cb1880363c9f586e561f585f7e81b580a87ecbb709011aa83046efb554f75e0c50d2a86a37a85a331183e2817b0b48c8fa9adc76add1735e5bea0a19979f9"} +{"kind":1,"id":"ce28e54c82af1c1c0e0cccda349b86e284b42facacdf0feee2d1b44052a9cf2c","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755882932,"tags":[["client","Damus Notedeck"],["e","ca31e602b4b7a2523571c98926379044c63202682ac48b2c4d86da5d5be00970","","root"],["p","06b7819d7f1c7f5472118266ed7bca8785dceae09e36ea3a4af665c6d1d8327c"],["p","82341f882b6eabcd2ba7f1ef90aad961cf074af15b9ef44a09f9d2a8fbfbe6a2"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"i think they doing cashu","sig":"985fcbed77ff49a6fc8598752ab47342edc89479b949a302aaa97fe974410b1a743f0517660cb40e70c2163cb858ca7781aecc138728c16d0b219336599ef8be"} +{"kind":1,"id":"6a64cd94926a35d67e5a1643bb90ac16cf6c809245a8f80a48d810c76441d0cc","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755882166,"tags":[["client","Damus Notedeck"]],"content":"nostr: censorship resistant goat zapping","sig":"c7beed84a4406330fa7d9a5a7aa796ffab08ffb41990c2e50472840082710f0bb766683f1132e0da9544149e319dd832897f8934aa61c37d2c7ccf77199a1a1e"} +{"kind":1,"id":"7e3c448c200995013aed1b3bab5fc3575b7cd520ca9a8f655abc4796fcc9a68e","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755877989,"tags":[["client","Damus Notedeck"],["t","bitchat"]],"content":"very cool to see #bitchat ephemeral chat is compatible with coolr.chat:\n\nhttps://coolr.chat\nhttps://bitchat-world-view.vercel.app/\n\nhttps://cdn.jb55.com/s/4f11afad4a6e5b52.png","sig":"d00f2b9ea494355907765fdc6689ae157029fb820713bec359618c12d3df90b7018939dc0640185b2f8732851d1ed2e29a1294cee4db77220735daf16ac7a7ea"} +{"kind":1,"id":"f4884bff57922d9d2e0e0d99515b18ca3d9f49f39b0af61be25f4b48e3eefa74","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755876549,"tags":[["client","Damus Notedeck"],["e","6841d1e7c04d82efd2a80a0a1147e0d24f63dbd31094fceaec05148a13ba364d","","root"],["p","1bc70a0148b3f316da33fe3c89f23e3e71ac4ff998027ec712b905cd24f6a411"]],"content":"really? I'm on geohash #9 and its basically empty","sig":"80f47c9494ca104f524e73edff12e0f6e0caa63cae8c10276822133de9cb691de4ce78e03999568d05fe5b33237e6860d585c026268b0fe55909d4e96e57a0bb"} +{"kind":1,"id":"349a6694ca05d36f96e6a2407d6b63f6762e375ff8b8fa621225e0d3a2738728","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755859555,"tags":[["e","5b60889c025ef590044586d3ae9c74f0c20ae813b2fdbf48f7703401c2305baf","","root"],["e","eef2a2b4e0e1c90416436bcf401d9e469de3163adcd4bcdf3a7e3ec4fd0484d7","","reply"],["p","4523be58d395b1b196a9b8c82b038b6895cb02b683d0c253a955068dba1facd0"],["p","592295cf2b09a7f9555f43adb734cbee8a84ee892ed3f9336e6a09b6413a0db9"],["p","dd664d5e4016433a8cd69f005ae1480804351789b59de5af06276de65633d319"],["p","8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c"]],"content":"mostly a reflection of how i use the app. If i wanted these things i would probably add them. The rest comes down to if someone wants it enough to add it to damus, but that rarely happens.\n\nnot to mention we’re blocked from updating our app until we remove zaps again which i don’t want to do again… so i can’t even push new features if i wanted to. 😪","sig":"3fef8586a14c0365af0e0c16c96b748685081833b5dd8482b956ce4852792bf49ee1829c564e38d6a6fbcbe2f7134a67c56661fe3f00d0ecfbe9321488429546"} +{"kind":1,"id":"ab990710e7afd4627ea5f42ea137f56ab463d43d9e5c662e40cb9cea064378d2","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755855651,"tags":[["e","5b60889c025ef590044586d3ae9c74f0c20ae813b2fdbf48f7703401c2305baf","","root"],["p","4523be58d395b1b196a9b8c82b038b6895cb02b683d0c253a955068dba1facd0"]],"content":"how would a kind20 client query images? also images in kind1 can be memes/gifs which would ruin kind20 clients ?","sig":"46f7a73d6016cabf59712d26c2fb7e5e33ddcadb5b7ee27ebff1fc928449d4ec5f7b6cec02eaf855514100ddf3023c1141ae8bef36c68b2a8398f2d3233d2441"} +{"kind":1,"id":"315bfe5cf896010dba98fc9cd0c6cb972950bd7999ee0f3fb5b4e07bb95c232d","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755825631,"tags":[["client","Damus Notedeck"],["e","bf77fc4233a7b067b908fde8294c3c68d3e46cb14b7d0febbc136fbbe84178e8","","root"],["e","fdb51267f4ca1337206e515bcd53625807866f3921c7a82d1caf16aee0765a53","","reply"],["p","460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c"],["p","cc448f8b3442b60396a14baaab38fc76c8d3f31ad9f7c0e1724d921fa1ad47db"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"yes, strfry does","sig":"f55bda13b213e94d68cd5f8f963ca445bd854a6c91212fe89bae723a0bd0efb15e679de612ffc164456b16dc3d3753825a4c0ab86567b7327abc8ae6d9bbd376"} +{"kind":10002,"id":"c247609192db5c26e5c3955e7c3d9a6c20ac29e170c653ad5119cf0cc3eb5010","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755824065,"tags":[["r","ws://relay.jb55.com"],["r","wss://nos.lol"],["r","wss://nostr.land"],["r","wss://nostr.wine"],["r","wss://pyramid.fiatjaf.com"],["r","wss://relay.damus.io"],["r","wss://relay.mostr.pub"],["r","wss://relay.nostr.band"]],"content":"","sig":"d2c829bace4f09d7e76629a9ecb184365e0ed67dc79202c81efb50c5e1aea8c1fb5deffefb258c54a844629411338cc46aa8299a821859289f39677a4467f826"} +{"kind":1,"id":"76d97e6506881ac99c79a75a05f82ac57dc22cc1ec3ec6fe88e21f2b6dfebaf4","pubkey":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","created_at":1755814930,"tags":[["q","b19ca91932ed749eeb69caa3ce9c470d5b9754a0d043a79fe48a56f6dc1933c4"],["p","63fe6318dc58583cfe16810f86dd09e18bfd76aabc24a0081ce2856f330504ed"]],"content":"very cool\nnostr:note1kxw2jxfja46fa6mfe23ua8z8p4dew49q6pp608ly3ft0dhqex0zq4nn7ls","sig":"6e0bef1f5705c67d95a7d04e96b9e7387119bf34f9ab34758ba5c0e6e96bcef01848afb1c0cd9a5468b3803f529ef2f57438819f79af883cf2094d59d02177cf"} +{"kind":7,"id":"0b67ed87cf3a31afc75ab0a95e6373d574ca83327368ed0ad47d6953410dc6cf","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1755278349,"tags":[["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","5de075b68db52e02dc999a3cc0a6aab6f66b9e1e57b6db69c74dab58f13d4f10"],["p","bd1e19980e2c91e6dc657e92c25762ca882eb9272d2579e221f037f93788de91"]],"content":"🙏","sig":"d130955b5b4ffcd3315befba91b4fd2319c8dac61da584e7d3755dfdd0a477946e37522db86d36332d199d4ec5d882a61ce19b97ec7ac405e03b1bf2b80b7d7b"} +{"kind":6,"id":"0bd0d9ed97384bbfa6a5fb512d618d27a2ed5591aeb9f937bedd004870cc0e64","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1755117689,"tags":[["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e","wss://relay.danieldaquino.me/inbox","root","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","wss://relay.danieldaquino.me/inbox"]],"content":"{\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\",\"created_at\":1755116910,\"kind\":1,\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\",\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss:\\/\\/relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss:\\/\\/relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]],\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\"}","sig":"0915ef843a7b9b632450ed1ad31933062091986e6ee671a4ed898b7f89fa74a470c86397e77dd6b5da833c06380701f1c17c8057cc99dec2f0eb920b33bdf01f"} +{"kind":7,"id":"42f1ff45cee4923b17f7c03e8d7cfa13c66548a8f188a908165866bb3b7a9e7f","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1755117653,"tags":[["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30","wss://relay.damus.io"],["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e","wss://relay.danieldaquino.me/inbox","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","wss://relay.danieldaquino.me/inbox"]],"content":"🫂","sig":"10f4424a55635ee55c3b0533dcd02d8b3125941ff744fb92921a4eb5f30fa305b54cc7aad0f262db00a46803b5f15673df036248ba1c944cdd2e229fac6b236b"} +{"kind":7,"id":"258782c286d1087739647961f3df0861113d29a814229f095bf3a8d60dda6c14","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1755099397,"tags":[["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["e","de71cb16b1496d5b554e1002dcf3a94d896b6eb22422725181ba831be87ce406","wss://relay.mostr.pub","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","wss://relay.mostr.pub"]],"content":"😂","sig":"a7b72d3afadb6ce96a5d0251ceeb38eed77393ae257e9fa90e4e243c820a7d210353c1e10132d017ef55b94910ffeadebd6baf16d736cf30cfcb1308b4f935b9"} +{"kind":1,"id":"4f28e9c9c3d80192cc86ab6961e263361ba096864f34052900ad9c38018d38bd","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754961006,"tags":[["e","93891fd089e7cbc2b9f0b7cb9207225abab4d216a14ebde1385f179c2e0c35dd","wss://nos.lol","root"],["e","9a6d9ba9261ca66b00cb3b6a51928580daff22e012c711a076787a1106b7c648","wss://relay.nostr.wirednet.jp","reply"],["p","adc14fa3ad590856dd8b80815d367f7c1e6735ad00fd98a86d002fbe9fb535e1"],["p","52b4a076bcbbbdc3a1aefa3735816cf74993b1b8db202b01c883c58be7fad8bd"]],"content":"The space-inefficient way hex references are encoded on nostr notes?","sig":"744e4ed5bc9668d100809f14ebad17de1725b24d8b95b476eb52ad1ef793241955d81e0a32e33bbc506100d16ca5b149498e0a93531f63f9c38d0a2c6c8d4c26"} +{"kind":1,"id":"77887755fb89426f79573f74086e0607363680ec526f36235149349abb05a295","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754950684,"tags":[["e","42355093df3f5d1c1b656c21ab2f49e486ea5ddb971e222ec7b4627912f2d039","","root"],["e","f9864ca79deb5f52eccfe452978323c73cd4974fc05c228ef143a93882b26c62","","reply"],["p","15b5cf6cdf4fd1c02f28bcce0f197cafae4c8c7c66a3e2e23af9fe610875315e"],["p","bd1e19980e2c91e6dc657e92c25762ca882eb9272d2579e221f037f93788de91"],["p","af9d70407464247d19fd243cf1bee81e6df1e639217dc66366bf37aa42d05d35"],["p","20986fb83e775d96d188ca5c9df10ce6d613e0eb7e5768a0f0b12b37cdac21b3"],["p","a93be9fb02e46c40476a84f56975db5835ffead918ffb2bd022904996d3fdc0c"],["p","32dc4f259c3a7ce6ad130ef4771069d70d0ba6a55ce960fac4e439a3f95ddcce"],["p","18905d0a5d623ab81a98ba98c582bd5f57f2506c6b808905fc599d5a0b229b08"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"]],"content":"It works, thanks!","sig":"7cbe45c815ce61f300aee9c2edff76c17270199d3cc70e7352d78f364f9b611a80759cc1f02bd12ae001533ed1b4f8ce20db477db3657ed198b379c1fb96a968"} +{"kind":7,"id":"85a0317ccabeb651abe2a34c841a3f5fb79c853f46fde5a2148810e3980b0de8","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754950245,"tags":[["e","42355093df3f5d1c1b656c21ab2f49e486ea5ddb971e222ec7b4627912f2d039","","root"],["e","3f772dee58b1e8ae01b63418b08b73fbe062916548d86f32647413637a84848b","wss://relay.damus.io","reply"],["p","15b5cf6cdf4fd1c02f28bcce0f197cafae4c8c7c66a3e2e23af9fe610875315e"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["p","bd1e19980e2c91e6dc657e92c25762ca882eb9272d2579e221f037f93788de91"],["p","af9d70407464247d19fd243cf1bee81e6df1e639217dc66366bf37aa42d05d35"],["p","20986fb83e775d96d188ca5c9df10ce6d613e0eb7e5768a0f0b12b37cdac21b3"],["p","a93be9fb02e46c40476a84f56975db5835ffead918ffb2bd022904996d3fdc0c"],["p","32dc4f259c3a7ce6ad130ef4771069d70d0ba6a55ce960fac4e439a3f95ddcce"],["p","18905d0a5d623ab81a98ba98c582bd5f57f2506c6b808905fc599d5a0b229b08"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","f9864ca79deb5f52eccfe452978323c73cd4974fc05c228ef143a93882b26c62"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"]],"content":"🤙","sig":"3b1c7a9b22320a53fef7739fa05c698086d161f2a799749c71d47e434d538ae6b742ef47bef3c336480e2dc8a0d0e0f1458259fb23054b7da9c975bca2149565"} +{"kind":1,"id":"33fdd7582e7419be2c432e356542fd2835ae490485de0d84eecba617ce9ae26d","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754949975,"tags":[["e","cf679da8d56793d2ef1b20abf2155f826bf02bce8aa122709fb7e95257488cea","","root"],["e","286ca51f64de36c6069b90e423f0e75f718c7448c871c4985df41347be43db08","","reply"],["p","3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681"],["p","43baaf0c28e6cfb195b17ee083e19eb3a4afdfac54d9b6baf170270ed193e34c"],["r","https://github.com/damus-io/damus/issues/2131"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"]],"content":"Thank you for confirming and reporting the issue!\n\nWe do not have a specific ETA for the fix quite yet as we are a bit backlogged, but we have it documented here and plan to fix it in the future: https://github.com/damus-io/damus/issues/2131\n\nCC nostr:npub1zafcms4xya5ap9zr7xxr0jlrtrattwlesytn2s42030lzu0dwlzqpd26k5","sig":"fd6a5dd33116830d8f52f6704cc58e24dd6ae7306739290caf63e223ed4c8f2d64cd1d73af550d3d7629640495731eefeb763fd42b5d38110d7476fe5ecdd7a9"} +{"kind":1,"id":"b6ed4e8dd43477b42c14dd5ec4c82cfad42734c26a0a1de3bafd2d30c727a287","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754927122,"tags":[["e","42355093df3f5d1c1b656c21ab2f49e486ea5ddb971e222ec7b4627912f2d039","","root"],["e","d1256bf2a58100d87b144ab36718c37e77f5880c7cf1a96fa4fd0206e887a8da","","reply"],["p","af9d70407464247d19fd243cf1bee81e6df1e639217dc66366bf37aa42d05d35"],["p","20986fb83e775d96d188ca5c9df10ce6d613e0eb7e5768a0f0b12b37cdac21b3"],["p","bd1e19980e2c91e6dc657e92c25762ca882eb9272d2579e221f037f93788de91"],["p","32dc4f259c3a7ce6ad130ef4771069d70d0ba6a55ce960fac4e439a3f95ddcce"],["p","a93be9fb02e46c40476a84f56975db5835ffead918ffb2bd022904996d3fdc0c"],["p","15b5cf6cdf4fd1c02f28bcce0f197cafae4c8c7c66a3e2e23af9fe610875315e"],["p","18905d0a5d623ab81a98ba98c582bd5f57f2506c6b808905fc599d5a0b229b08"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"]],"content":"Thank you nostr:npub1zafcms4xya5ap9zr7xxr0jlrtrattwlesytn2s42030lzu0dwlzqpd26k5 for filing a ticket! I have moved the priority to high so that we can take a look at this soon. In the meantime, I believe the only workaround would be to use a different NWC wallet with Damus until we get this resolved.\n\nDoes anyone know any guides out there on how to create a Yakihonne NWC wallet for testing? When I go to the “Add wallet” page and click the “Add wallet” button in Yakihonne, it takes me to a blank page.","sig":"56ab0941086f994c6621768e65ec92a1ce012778e40d847cb72fdf62bad6f1beb50d0fe4c598fd28f81e1d3add1db4e23c010db7c9b91c39527b7f1ef268b380"} +{"kind":1,"id":"cd8c6dc571dbdacfc371564fef6ce256faea900e48a37d15c244a1de1dfa0c38","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754925116,"tags":[["e","cf679da8d56793d2ef1b20abf2155f826bf02bce8aa122709fb7e95257488cea","","root"],["p","3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681"],["p","43baaf0c28e6cfb195b17ee083e19eb3a4afdfac54d9b6baf170270ed193e34c"],["p","43baaf0c28e6cfb195b17ee083e19eb3a4afdfac54d9b6baf170270ed193e34c"]],"content":"Good morning nostr:npub1gwa27rpgum8mr9d30msg8cv7kwj2lhav2nvmdwh3wqnsa5vnudxqlta2sz! Do you use Bitcoin Lightning for payments, or Apple in-app purchase?","sig":"2f52beef0edf9a228184c30f94c89a458800f91f0ba37f4346a3ba3ec0264767f329806655453d0926be8633dcdf2382c28f3121304744409eaf0cd27434267a"} +{"kind":3,"id":"9a7198296d406e3d6854c3b1ad9ad3675835416f770ec43b6dc5a8cf1ce70191","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754677441,"tags":[["p","3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["p","82341f882b6eabcd2ba7f1ef90aad961cf074af15b9ef44a09f9d2a8fbfbe6a2"],["p","ea2e3c814d08a378f8a5b8faecb2884d05855975c5ca4b5c25e2d6f936286f14"],["p","be1d89794bf92de5dd64c1e60f6a2c70c140abac9932418fee30c5c637fe9479"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","73923fd4c8d2a590fcadb3feb691cd6a80915872e947093993d1ff10452b3614"],["p","e23828861d37edbe7cf0e0454f94173b66a5b9ecd70ec88808e0d7a557bce5b0"],["p","1306edd66f1da374adc417cf884bbcff57c6399656236c1f872ee10403c01b2d"],["p","460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c"],["p","36f7bc3a3f40b11095f546a86b11ff1babc7ca7111c8498d6b6950cfc7663694"],["p","84dee6e676e5bb67b4ad4e042cf70cbd8681155db535942fcc6a0533858a7240"],["p","54a43756097aae2bf19009747c03ce9a707f842f94931d6daf931d14b4fcda50"],["p","67acce177065a0de48d2f7e7aa01d618e8543e8332e7731947f8f94af7855e25"],["p","97c70a44366a6535c145b333f973ea86dfdc2d7a99da618c40c64705ad98e322"],["p","4657dfe8965be8980a93072bcfb5e59a65124406db0f819215ee78ba47934b3e"],["p","2779f3d9f42c7dee17f0e6bcdcf89a8f9d592d19e3b1bbd27ef1cffd1a7f98d1"],["p","5a8e581f16a012e24d2a640152ad562058cb065e1df28e907c1bfa82c150c8ba"],["p","fa984bd7dbb282f07e16e7ae87b26a2a7b9b90b7246a44771f0cf5ae58018f52"],["p","a9434ee165ed01b286becfc2771ef1705d3537d051b387288898cc00d5c885be"],["p","3bf0c63fcb93463407af97a5e5ee64fa883d107ef9e558472c4eb9aaaefa459d"],["p","787338757fc25d65cd929394d5e7713cf43638e8d259e8dcf5c73b834eb851f2"],["p","0fe0b18b4dbf0e0aa40fcd47209b2a49b3431fc453b460efcf45ca0bd16bd6ac"],["p","c998a5739f04f7fff202c54962aa5782b34ecb10d6f915bdfdd7582963bf9171"],["p","52163ba58faea3504089debf1dfbafd7df502fba26536e3b22761da15419a4d8"],["p","e1055729d51e037b3c14e8c56e2c79c22183385d94aadb32e5dc88092cd0fef4"],["p","eab0e756d32b80bcd464f3d844b8040303075a13eabc3599a762c9ac7ab91f4f"],["p","e88a691e98d9987c964521dff60025f60700378a4879180dcbbb4a5027850411"],["p","d2704392769c20d67a153fa77a8557ab071ef27aafc29cf6b46faf582e0595f2"],["p","7d4e04503ab26615dd5f29ec08b52943cbe5f17bacc3012b26220caa232ab14c"],["p","7fa56f5d6962ab1e3cd424e758c3002b8665f7b0d8dcee9fe9e288d7751ac194"],["p","82f3b82c7f855340fc1905b20ac50b95d64c700d2b9546507415088e81535425"],["p","fe7f6bc6f7338b76bbf80db402ade65953e20b2f23e66e898204b63cc42539a3"],["p","769f03d2e964058793489c706951ad10a897680217b9a0cc9dce146c2b3684f3"],["p","80482e60178c2ce996da6d67577f56a2b2c47ccb1c84c81f2b7960637cb71b78"],["p","566c166f3adab0c8fba5da015b0b3bcc8eb3696b455f2a1d43bfbd97059646a8"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"],["p","369061c9a1ee258d28d123f35f913968884d52c4928ab7bd5a4544fcfd48f3f3"],["p","83e818dfbeccea56b0f551576b3fd39a7a50e1d8159343500368fa085ccd964b"],["p","1739d937dc8c0c7370aa27585938c119e25c41f6c441a5d34c6d38503e3136ef"],["p","4cdbf5bcd7f015a3ebc6853e6566732f9c11357b6e43d6b2edce742fbe9847f4"],["p","a305cc8926861bdde5c71bbb6fd394bb4cea6ef5f5f86402b249fc5ceb0ce220"],["p","ee6ea13ab9fe5c4a68eaf9b1a34fe014a66b40117c50ee2a614f4cda959b6e74"],["p","e76450df94f84c1c0b71677a45d75b7918f0b786113c2d038e6ab8841b99f276"],["p","c7eda660a6bc8270530e82b4a7712acdea2e31dc0a56f8dc955ac009efd97c86"],["p","f2c96c97f6419a538f84cf3fa72e2194605e1848096e6e5170cce5b76799d400"],["p","bd1e19980e2c91e6dc657e92c25762ca882eb9272d2579e221f037f93788de91"],["p","b9ceaeeb4178a549e8b0570f348b2caa4bef8933fe3323d45e3875c01919a2c2"],["p","2edbcea694d164629854a52583458fd6d965b161e3c48b57d3aff01940558884"],["p","c4eabae1be3cf657bc1855ee05e69de9f059cb7a059227168b80b89761cbc4e0"],["p","9989500413fb756d8437912cc32be0730dbe1bfc6b5d2eef759e1456c239f905"],["p","5c508c34f58866ec7341aaf10cc1af52e9232bb9f859c8103ca5ecf2aa93bf78"],["p","91c9a5e1a9744114c6fe2d61ae4de82629eaaa0fb52f48288093c7e7e036f832"],["p","f07e0b1af066b4838386360a1a2cbb374429a9fbaab593027f3fcd3bd3b5c367"],["p","e7424ad457e512fdf4764a56bf6d428a06a13a1006af1fb8e0fe32f6d03265c7"],["p","9168772564e66c07a776a3e2849b02d1a0ac88a7f8e621600c54493ca0de48ea"],["p","762a3c15c6fa90911bf13d50fc3a29f1663dc1f04b4397a89eef604f622ecd60"],["p","bf2376e17ba4ec269d10fcc996a4746b451152be9031fa48e74553dde5526bce"],["p","c9b19ffcd43e6a5f23b3d27106ce19e4ad2df89ba1031dd4617f1b591e108965"],["p","1bc70a0148b3f316da33fe3c89f23e3e71ac4ff998027ec712b905cd24f6a411"],["p","25a2192dcf34c3be326988b5c9f942aa96789899d15b59412602854a8723e9e8"],["p","3eab247c63bb35dfa38e07ca102f6da28ba9b9d4687197743bde3a2b1d80aeed"],["p","d1da05d307e663c91c93fd9ffbbbbf987dc244bf609255ede571237599db7780"],["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","c6603b0f1ccfec625d9c08b753e4f774eaf7d1cf2769223125b5fd4da728019e"],["p","3af187bfe00920d87068a6bcdffc48f4b241d1d82594cd395119a30891041654"],["p","33bd77e5394520747faae1394a4af5fa47f404389676375b6dc7be865ed81452"],["p","6e1010d8e5b953f9a52314d97bc94c597af26d51bae88b3fdf2c8fbd7e962d01"],["p","c48e29f04b482cc01ca1f9ef8c86ef8318c059e0e9353235162f080f26e14c11"],["p","2250f69694c2a43929e77e5de0f6a61ae5e37a1ee6d6a3baef1706ed9901248b"],["p","8fb140b4e8ddef97ce4b821d247278a1a4353362623f64021484b372f948000c"],["p","9c163c7351f8832b08b56cbb2e095960d1c5060dd6b0e461e813f0f07459119e"],["p","07ecf9838136fe430fac43fa0860dbc62a0aac0729c5a33df1192ce75e330c9f"],["p","520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626"],["p","34d2f5274f1958fcd2cb2463dabeaddf8a21f84ace4241da888023bf05cc8095"],["p","07804b786c6a3b400b7b20d9bfc945035f3ad213da797b0c50954767c375c543"],["p","cb92d81fded72024a68ff0e693a9e6b35687c56040a8780fd739ac6228f9fde5"],["p","d7c6d014b342815ba29c48f3449e4f0073df84f4ad580ae173538041a6abb6b8"],["p","e8ed3798c6ffebffa08501ac39e271662bfd160f688f94c45d692d8767dd345a"],["p","c7617e84337c611c7d5f941b35b1ec51f2ae6e9f41aac9616092d510e1c295e0"],["p","3fc5f8553abd753ac47967c4c468cfd08e8cb9dee71b79e12d5adab205bc04d3"],["p","58c741aa630c2da35a56a77c1d05381908bd10504fdd2d8b43f725efa6d23196"],["p","77620d8f02483272621d387ad54c1bf612b11a9261c7f3927b408434eca7de3f"],["p","c3a7907e72442236aec5f61494914d495bba089844a2d820f8889efbe2e696d2"],["p","5a3de28ffd09d7506cff0a2672dbdb1f836307bcff0217cc144f48e19eea3fff"],["p","a37118a4888e02d28e8767c08caaf73b49abdac391ad7ff18a304891e416dc33"],["p","17717ad4d20e2a425cda0a2195624a0a4a73c4f6975f16b1593fc87fa46f2d58"],["p","6a359852238dc902aed19fbbf6a055f9abf21c1ca8915d1c4e27f50df2f290d9"],["p","42b3db1ca9f73ea861cca1f5a9f74dadf97b6ff539cdf722ccae16119907dfe6"],["p","772bd267dffbff318d1a89f257c3371410111a8b89571dbbefa77af6bfa179f3"],["p","c3e6982c7f93e443d99f2d22c3d6fc6ba61475af11bcf289f927a7b905fffe51"],["p","126103bfddc8df256b6e0abfd7f3797c80dcc4ea88f7c2f87dd4104220b4d65f"],["p","180a6d42c7d64f8c3958d9d10dd5a4117eaaacea8e7f980781e9a53136cf5693"],["p","aa738a5bc00ee7a52adb9b6ecb423ec0ab1385bac788695b74017799b89a1cad"],["p","2cb13233ab33652ab671bb880e18f53d9e316e814c1674c15f5f0892f84a2a37"],["p","3e310bfff37d3019c116397188cc7760bf93bb2aa351d7078101491da5000ed2"],["p","df173277182f3155d37b330211ba1de4a81500c02d195e964f91be774ec96708"],["p","971615b70ad9ec896f8d5ba0f2d01652f1dfe5f9ced81ac9469ca7facefad68b"],["p","0815ff97fe03766824041b68e2a39f3e3c9580852643803307eabea240a2bb75"],["p","02d9f5676fffc339ffe94dfab38bebe21ce117c6f1509d9922a82d454f420da2"],["p","c2622c916d9b90e10a81b2ba67b19bdfc5d6be26c25756d1f990d3785ce1361b"],["p","fdd5e8f6ae0db817be0b71da20498c1806968d8a6459559c249f322fa73464a7"],["p","e4748f6e31807c1b63db0397e031070f288e88edbc04f45e908570c1f323eb8e"],["p","f5bda68c3dcf79344beb1145f18ce5e75e3ff5c8140e9ec3bd1d4ae7ee6458e1"],["p","e80ce979e9db1d2843f5175c8f62d3773bd6240a78aec22c6cccb69c65ddaddd"],["p","8ef5443e95fabc7244ff453cb86a6995e5587d75f531dabf25377ff8f7723f85"],["p","55f04590674f3648f4cdc9dc8ce32da2a282074cd0b020596ee033d12d385185"],["p","de7aa63b1c7e809f66a67bdcd2bd4c952a09cd52ee01ed7db358d09bad97f840"],["p","fd4b8c1b94b3038c742e02b4524a782ca545af3c9a516bffdeade6a63e0d101f"],["p","f45f5667ffe7649d7c9f648930c9498cda88974e7ea28929194d0167cdcbc124"],["p","68d81165918100b7da43fc28f7d1fc12554466e1115886b9e7bb326f65ec4272"],["p","a2e0ffda9d70a5e8a9a8635ede2b73a646a49fde0d346a36a8de0c24108ae7cf"],["p","efc37e97fa4fad679e464b7a6184009b7cc7605aceb0c5f56b464d2b986a60f0"],["p","9eab64e92219ccedb15ea9b75ababaa4ae831451019394e0e3336390c3a742d8"],["p","89e14be49ed0073da83b678279cd29ba5ad86cf000b6a3d1a4c3dc4aa4fdd02c"],["p","4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"],["p","04c915daefee38317fa734444acee390a8269fe5810b2241e5e6dd343dfbecc9"],["p","c89cf36deea286da912d4145f7140c73495d77e2cfedfb652158daa7c771f2f8"],["p","aa55a479ad6934d0fd78f3dbd88515cd1ca0d7a110812e711380d59df7598935"],["p","207ce4f7db8343c826ace044b5ad7c56cd1c4e1e75cd4c22cba90650d82ce4b9"],["p","a363b731c07ff45cd55025c0311344bac7096d51e475de51074a51434b25908b"],["p","36732cc35fe56185af1b11160a393d6c73a1fe41ddf1184c10394c28ca5d627b"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["p","d77637850017cffa7a61c7032db0f28be947d5487f9d504aabe4449a91b53cff"],["p","e1ff3bfdd4e40315959b08b4fcc8245eaa514637e1d4ec2ae166b743341be1af"],["p","e6a9a4f853e4b1d426eb44d0c5db09fdc415ce513e664118f46f5ffbea304cbc"],["p","76c71aae3a491f1d9eec47cba17e229cda4113a0bbb6e6ae1776d7643e29cafa"],["p","c6f1e85df311da691822ac0e4cfa9980d99aa1fd12c8753f773910e61465689c"],["p","3492dd43d496a237f4441fd801f5078b63542c3e158ffea903cb020a1af4ffdd"],["t","umbrel"],["p","5be6446aa8a31c11b3b453bf8dafc9b346ff328d1fa11a0fa02a1e6461f6a9b1"],["p","06639a386c9c1014217622ccbcf40908c4f1a0c33e23f8d6d68f4abf655f8f71"],["p","b12b632c887f0c871d140d37bcb6e7c1e1a80264d0b7de8255aa1951d9e1ff79"],["p","77b504e58f206c2aa1b2ae6acc1eb11321f4061eafc8e5b531015dbca536b4e4"],["p","be7a5291b532e8b918f2dc98148948a33d3e0da07788d7416f73b4c7514f08e6"],["p","f8d42124a72ed89896247ad912f47dd76ac6b6cdd10fe178f5e04acfead0d4a2"],["p","56a6b75373c8f7b93c53bcae86d8ffbaba9f2a1b38122054fcdb7f3bf645b727"],["p","9be21611a341426e9146257c54179e22d178bb7d4106e247ddf3e507b7985a6b"],["p","ca76c77833d474460290472cb72e0b82951a76b43d9de28e217eacb8e784c54b"],["p","e33fe65f1fde44c6dc17eeb38fdad0fceaf1cae8722084332ed1e32496291d42"],["t","blobstr"],["p","d61f3bc5b3eb4400efdae6169a5c17cabf3246b514361de939ce4a1a0da6ef4a"],["p","6e468422dfb74a5738702a8823b9b28168abab8655faacb6853cd0ee15deee93"],["p","a4237e420cdb0b3231d171fe879bcae37a2db7abf2f12a337b975337618c3ac2"],["p","6dd9fdfbbcd92e4d3f7f647f9ddab3cba64af1d66e0bdbe216b57eceac8b0558"],["p","efa6abd09142caf23dfb70ed3b9bd549042901caa66f686259a1cc55a4970369"],["p","3c906042e889f081619588980bcf1ebca6a5443022ad6dd8205aba269577212b"],["p","c6b554646377f111ff7a9cf7e8f30ab488d7a7c2ee7ff85cc44b47fe357bc26a"],["p","619af6a60b3fe4c733aaca061c522cc9c7cf1d87ef4c908facc5ed936d3bdf23"],["p","88839dc9f47311d6dd04232b97e3a15eecdd5427b0a2a885203f929edc6e83f7"],["p","fecb85b4d3d5b6bcf7a989e4be969c6006fb2904c50a3b48561e0c09e3016ebc"],["p","b708f7392f588406212c3882e7b3bc0d9b08d62f95fa170d099127ece2770e5e"],["p","c8df6ae886c711b0e87adf24da0181f5081f2b653a61a23b1055a36022293a06"],["p","9cb3545c36940d9a2ef86d50d5c7a8fab90310cc898c4344bcfc4c822ff47bca"],["p","d43d930bbdaff253c77264dde927abe6df24980b0a6cae01b9fc715017dc00a7"],["p","3d2e51508699f98f0f2bdbe7a45b673c687fe6420f466dc296d90b908d51d594"],["p","70b05f8949abfa2f94f0bda5971a2123914cd3bf700c87972e56da4ce48fc286"],["p","52b4a076bcbbbdc3a1aefa3735816cf74993b1b8db202b01c883c58be7fad8bd"],["p","051acee30009506ea470900693ec6746dc740e9f61e43c95fc909556539edc78"],["p","b9cfebd0043778453f8cc5ec017f250c46d3056b460fc8c7f8a3b02e9312461f"],["p","c7d32972e398d4d20cd69b1a8451956cc14a2e9065ad1a8fda185c202698937b"],["p","055f25d9e536b6ddefee79403ee30b005101945f55f18f38493b38eec0109e1d"],["p","a68076b7b443bc16d094a0eff8061453503d75142b1852b5bec38c81a4e20257"],["p","37cdc3e7f5a7147752cc6cb348bd77e1b999e3e43a4b21b740812193ba81c298"],["p","1586fd57ac81b66177b0087bb0c0fa465f30b9895949c8936836ec5e6cd13132"],["p","43e87f5c42c5b13c193bf1e6f29740caa143ce01228ae4bd4b06254834580caa"],["p","ebdee92945ef05283be0ac3de25787c81a6a58a10f568f9c6b61d9dd513adbad"],["p","020f2d21ae09bf35fcdfb65decf1478b846f5f728ab30c5eaabcd6d081a81c3e"],["p","266815e0c9210dfa324c6cba3573b14bee49da4209a9456f9484e5106cd408a5"],["p","148d1366a5e4672b1321adf00321778f86a2371a4bdbe99133f28df0b3d32fa1"],["p","8eaf62bb9a34c62d56c9ef430623141a664f8aad97cd5745eed6afac12a765dd"],["p","a87d9b9bb89966bafbf304abd79aebbb2df8121c9ab9c49710484a4bd3a305be"],["p","90590edc247b100f23879a412b6616c65e874ac790610a15e6b3257a18d9ae43"],["p","e083d953e54a18a2121d8d7927425d612b32b344a895d2712c864ab3bd78cf89"],["p","9ab67932279abdb25e03dc752648b506e77f140971971fd973e01f5845a1b1ec"],["p","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24"],["p","500ccc532c301711d88aa6d30b11dc477e6a32770853f8ab1c2be389b824e3f8"],["p","5cad82c898ee66013711945d687f7d9549f645a0118467dae2f5e274c598d6ff"],["p","a19aadee53bc71d25afef365067a3978ac30d5520d36ec8cc74d77a872af7359"],["p","3eeb3de14ec5c48c6c4c9ff80908c4186170eabb74b2a6705a7db9f9922cd61e"],["p","504ef89b3f60f09c09c20ed50cdfafa5342f7eba4a56b51228cceb91e20faa52"],["p","805b34f708837dfb3e7f05815ac5760564628b58d5a0ce839ccbb6ef3620fac3"],["p","bb90e7f0531d4abaa39ff85091577434fb6fd3aff9cfb8da275199e241eb4eff"],["p","91fc34496959bbc344cda4eebbbbacdc4aff11d3e1ccf13f847026b8329e6765"],["p","0095c837e8ed370de6505c2c631551af08c110853b519055d0cdf3d981da5ac3"],["p","f1989a96d75aa386b4c871543626cbb362c03248b220dc9ae53d7cefbcaaf2c1"],["p","9d065f84c0cba7b0ef86f5d2d155e6ce01178a8a33e194f9999b7497b1b2201b"],["p","6f35047caf7432fc0ab54a28fed6c82e7b58230bf98302bf18350ff71e10430a"],["p","69a0a0910b49a1dbfbc4e4f10df22b5806af5403a228267638f2e908c968228d"],["p","1c31ccda2709fc6cf5db0a0b0873613e25646c4a944779dfb5e8d6cbbcd2ee1c"],["p","641fd340a76239260fb2a6f41c33b4e697be4670221fb28f1598896bc777c4fe"],["p","9c6fa6638f64ab2779fb386b4f32d1e02b5109f2c507a6bcb8baed0d18c4e3d0"],["p","da18e9860040f3bf493876fc16b1a912ae5a6f6fa8d5159c3de2b8233a0d9851"],["p","28c64522edc6f3555c8abc6df7992c354fac4894885900518307b2d4cfb90206"],["p","a762b5bb70506089e8af2b11f27ffae8d75191638ec6bd52f0c454280c393a86"],["p","8ea485266b2285463b13bf835907161c22bb3da1e652b443db14f9cee6720a43"],["p","0d6c8388dcb049b8dd4fc8d3d8c3bb93de3da90ba828e4f09c8ad0f346488a33"],["p","0f36d20bc04f1f0e776cacc7f7b308a7aad5fe9b7744ceea6491ae1762b8c18c"],["p","c88f94f0a391b9aaa1ffefd645253b1a968b0a422a876ea48920a95d45c33f47"],["p","aa116590cf23dc761a8a9e38ff224a3d07db45c66be3035b9f87144bda0eeaa5"],["p","b83a28b7e4e5d20bd960c5faeb6625f95529166b8bdb045d42634a2f35919450"],["p","8806372af51515bf4aef807291b96487ea1826c966a5596bca86697b5d8b23bc"],["p","d36e8083fa7b36daee646cb8b3f99feaa3d89e5a396508741f003e21ac0b6bec"],["p","11b9a89404dbf3034e7e1886ba9dc4c6d376f239a118271bd2ec567a889850ce"],["p","2389f44d7f8345a17ab6c5410bff6cf68b44943840723a4d2e1b158ebc62e3d9"],["p","3fceb943b89af7fc05c8cee47475b3db7a8b1b408376e6186b6b4e655841dcff"],["p","35123fe8c74b2403d28ff49792f22e72d82a01c6405de4aba6afb453daad76a7"],["p","880f967145ab66b53d9dc279d44a9722ba875d232c73f3df4707d1e79c4336ce"],["p","173ffe1c2fc7003c21f2af5075ccff1fb671d522f273e02756ab5763aebfdd13"],["p","90aaac10e033b4d1e607d426fc0eaabb9caa86720421dad8ae30182dd3efdb01"],["p","2658362c3137eaa801fae404be36ffc80e16a61c43a891a3a046bec4b72e498a"],["p","cc8d072efdcc676fcbac14f6cd6825edc3576e55eb786a2a975ee034a6a026cb"],["p","8dab107e8ecdd48a3eea02076b186c92fd873156da9b502002b4f4c8d5210fc9"],["p","c1fc7771f5fa418fd3ac49221a18f19b42ccb7a663da8f04cbbf6c08c80d20b1"],["p","d388af725538cea442c0a9f8c35b877fb57790a6c4d1040c9e95493d08db98a6"],["p","3c2a2b35ee90288b2ee2a87ae579f5592b08d2aec05561b0c47c32e1450512b1"],["p","b07d216f2f0422ec0252dd81a6513b8d0b0c7ef85291fbf5a85ef23f8df78fa7"],["p","e03cfe011d81424bb60a12e9eb0cb0c9c688c34712c3794c0752e0718b369ef2"],["p","86723e843e3d05bb9b677ca056643ad1df9b601c7ac3dc1540d50ac6a56e6039"],["p","ca9c32f2f2be48ee17211acdb6da1791c4d1955038a8b2bad326a8f1cc818d37"],["p","f1725586a402c06aec818d1478a45aaa0dc16c7a9c4869d97c350336d16f8e43"],["p","781a1527055f74c1f70230f10384609b34548f8ab6a0a6caa74025827f9fdae5"],["p","ad308b5baaf4638d9a67d5af285dc430b5dba2417e6c540629c2ade244399d80"],["p","b7996c183e036df27802945b80bbdc8b0bf5971b6621a86bf3569c332117f07d"],["p","47259076c85f9240e852420d7213c95e95102f1de929fb60f33a2c32570c98c4"],["p","f4e916d220f9619e1ba1a287ae62d6631259d9e14299ace032f08b39dce975d5"],["p","035f494df237c41f9a313496282d88cf2056d8b57d2ce027c5671efc09c4622e"],["p","56fff0a8bd6a54973f39edf70ce058e4495d2a8024e2caf1c965822fc2f3dca2"],["p","82550cfaa6f893e4fb1fdc5a851e61e99176934147041871717a641773a619b1"],["p","c4f5e7a75a8ce3683d529cff06368439c529e5243c6b125ba68789198856cac7"],["p","56172b53f730750b40e63c501b16068dd96a245e7f0551675c0fec9817ee96e0"],["p","ccbf87d871b541639daa16ea86162f9030b3c1b9dcdd307a50e9248421c5e960"],["p","b6494a74d18a2dfa3f80ced9fadae35807716fce1071e4de19e2d746b6d87606"],["p","0461fcbecc4c3374439932d6b8f11269ccdb7cc973ad7a50ae362db135a474dd"],["p","d8a6ecf0c396eaa8f79a4497fe9b77dc977633451f3ca5c634e208659116647b"],["p","dc57c8ff44a150a5efb8b0151651889d0bcf5f9ad20c839bb245c542ac1bd1a6"],["p","580f511af0a79d2cca20fb5bf6bb89abe6988593f8568177d2a513e87b2bbef3"],["p","2123cc7887a27a62530e01d34f548dec645ad3c1f0cb75137fb3be6d7c274d59"],["p","59ffbe1fc829decf90655438bd2df3a7b746ef4a04634d4ee9e280bb6ce5f14e"],["p","8685ebef665338dd6931e2ccdf3c19d9f0e5a1067c918f22e7081c2558f8faf8"],["p","7a76f389f63e019d3d8ffb0f0631fb28b071757e5fe8c46818caac578756a043"],["p","29b454ea324901ec0ba335d316c5a136329738149ed18f89f86cdb822f94da95"],["p","8125b911ed0e94dbe3008a0be48cfe5cd0c0b05923cfff917ae7e87da8400883"],["p","f2aa7b81b671e478a0113c43cf513da86aac27a9f69a1a7224ce35f1ae7991fb"],["p","e2ccf7cf20403f3f2a4a55b328f0de3be38558a7d5f33632fdaaefc726c1c8eb"],["p","bd171ec3857a546a9c665e016931d7177dbd5a6887992276663980483b1acc15"],["p","b81f6b275ebd27a8f04ffd05dc16bc9fa329cb8d9c464bc7bdbf5068818e03c0"],["p","34104dedf3cc5936802c8308a3d0090f2857d4aba4e8b720accaf6b2ab049969"],["p","c4da3be8e10fa86128530885d18e455900cccff39d7a24c4a6ac12b0284f62b3"],["p","afc76740c1c289c77d49c8b68941f8fa170bc9f2f61e4fc3058811850259ff61"],["p","b299876ba85e33da57269247f7f91aee025f5bd2bc229aa85c7908f37c10c838"],["p","2d9873b25bf2dda6141684d44d5eb76af59f167788a58e363ab1671fefee87f2"],["p","850605096dbfb50b929e38a6c26c3d56c425325c85e05de29b759bc0e5d6cebc"],["p","4379e76bfa76a80b8db9ea759211d90bb3e67b2202f8880cc4f5ffe2065061ad"],["p","31b81ae311c0954d8ec5b9c4ef507452bbe7ba8a08891b5d9a6c50a544cb5b42"],["p","f2af450f423ab280be4a556a0d401cdc30613fedafe36b78dde3ef92f0d6e751"],["p","c161a5ba8ddb3bcca6cb59ee184305d9387fe91fd8fcc5efbbf600336858fb1d"],["p","bca41592bfe2ce09c3043e175be53e347237f9730d84cfceee4282284533a5c8"],["p","e2eea2910b795e9ceaaf85af83fe356961e37dd2aa46682b30c897e9e9baacdd"],["p","8ad8f1f78c8e11966242e28a7ca15c936b23a999d5fb91bfe4e4472e2d6eaf55"],["p","4faaa16714f6340b17b248cee24a6f8558348af5be604928800fd92a3869234d"],["p","5eca50a04afaefe55659fb74810b42654e2268c1acca6e53801b9862db74a83a"],["p","0114bb11dd8eb89bfb40669509b2a5a473d27126e27acae58257f2fd7cd95776"],["p","6b4a29bbd43d1d0eeead384f512dbb591ce9407d27dba48ad54b00d9d2e1972b"],["p","e0ef3b4e051aeb82eeda594d85cac2dfedd71acd08a53334d731d5b06fd74878"],["p","b7b51cc25216d4c10bc85ae27055c9a945fe77cafd463cf23b20917e39ce6816"],["p","220522c2c32b3bf29006b275e224b285d64bb19f79bda906991bcb3861e18cb4"],["p","0ffab7d9247132f14bcd38378b0acedc30d35e2b2670d89b7ae8c2d2c306af99"],["p","0371db7fd4437ce63e60dedc313cad6ee8b17fc691fe05231f9a140134fc0d08"],["p","056b5b5966f500defb3b790a14633e5ec4a0e8883ca29bc23d0030553edb084a"],["p","4b6f261ab67186e3a166103293e6b4f452b37cd442a849fe7579752817c5033e"],["p","220b2014218a71452dbed5a7812d00e61d37a7e2775c3dc8f1f19151a7281ca0"],["p","38a8f31c3e46f488768757e0b93a26ac28451259bb4295ea3847b38088333a6c"],["p","91a0d353baad72fab5a00907cc84fec78dbd4c6443423a8c8f68875b67766939"],["p","ff27d01cb1e56fb58580306c7ba76bb037bf211c5b573c56e4e70ca858755af0"],["p","7cc328a08ddb2afdf9f9be77beff4c83489ff979721827d628a542f32a247c0e"],["p","3d842afecd5e293f28b6627933704a3fb8ce153aa91d790ab11f6a752d44a42d"],["p","2e8840ae1820adb1f2c73d0b1ebb10df9a8b4c69f06e0e2827dda6bdcc46231c"],["p","de7623f81c44e52697dd7b9ffa6183b333c3ea9111306213db147a0f0d9b7c4e"],["p","1d797b4dac0354210037f62ce8f58e5ef3f5e176bd35fc6e97e0f4ddeadc998d"],["p","badbdda507572b397852048ea74f2ef3ad92b1aac07c3d4e1dec174e8cdc962a"],["p","d7df8b3e14166796a8ad8740b06f427aea9dd95b72e0276aa9179210e27f81f7"],["p","ccaa58e37c99c85bc5e754028a718bd46485e5d3cb3345691ecab83c755d48cc"],["p","f728d9e6e7048358e70930f5ca64b097770d989ccd86854fe618eda9c8a38106"],["p","dab6c6065c439b9bafb0b0f1ff5a0c68273bce5c1959a4158ad6a70851f507b6"],["p","98adf137d9da1b1b3654f29ede930f27c95847c9719bf4b38e1cef33c21f7e38"],["p","97a403640c83ac12bce556ded8db2f3ebe891801832fa1114abda73a6ae8598c"],["p","8bf629b3d519a0f8a8390137a445c0eb2f5f2b4a8ed71151de898051e8006f13"],["p","4523be58d395b1b196a9b8c82b038b6895cb02b683d0c253a955068dba1facd0"],["p","ce41c1698a8c042218bc586f0b9ec8d5bffa3dcbcea09bd59db9d0d92c3fc0b4"],["p","0f8ad350ada230bfffa3f47009f3004ace5406c9d91a69577bb2a52d161ebce3"],["p","00000003775402413595ac9e1612bed508815e98ec4aa9d68a2628ff6154856f"],["p","8480070f5054831f6ff8c70d1937d7df21782fa9df011c74e2f9843759ad6b5f"],["p","05e255f64ff2782d0eee59548ac19d7c6c28442040c5151235b165fa5da9eaf0"],["p","43f2116eb9424dadb7be0f1051d52e00402b135455452cb2255ce430eb48f3f2"],["p","72bdbc57bdd6dfc4e62685051de8041d148c3c68fe42bf301f71aa6cf53e52fb"],["p","62facb3db64c9a7d4b5e2942f2adb10f967535281ea4d2a0f395fb7c2219ddd7"],["p","875198796c3377b5f0a8eb5677b6ca909d0f6bdf8bc7a34d6089529b8c2f22ff"],["p","b88c7f007bbf3bc2fcaeff9e513f186bab33782c0baa6a6cc12add78b9110ba3"],["p","c18832b9ed75a4687a0cdba5f45599004bc139e063b867e1f4a57e013a9c5d4e"],["p","1ba6afdb8371a9b8a1abe1d403564add435ea43f780f123b14ca9a55ab4a805b"],["p","83f03ed3d1a40f8c3bc14b00b0d619cf97efd9237a1c34ded7827b6e1ac9a76f"],["p","f4db5270bd991b17bea1e6d035f45dee392919c29474bbac10342d223c74e0d0"]],"content":"{\"wss://nostr.wine\":{\"read\":true,\"write\":true},\"wss://relay.nostr.wirednet.jp\":{\"read\":true,\"write\":true},\"wss://relay.danieldaquino.me/private\":{\"write\":true,\"read\":true},\"wss://relay.snort.social\":{\"write\":true,\"read\":true},\"wss://nos.lol\":{\"write\":true,\"read\":true},\"wss://relay.mostr.pub\":{\"write\":true,\"read\":true},\"wss://relay.danieldaquino.me/inbox\":{\"write\":true,\"read\":true},\"wss://relay.danieldaquino.me\":{\"write\":true,\"read\":true},\"wss://wot.danieldaquino.me\":{\"read\":true,\"write\":true},\"wss://relay.damus.io\":{\"read\":true,\"write\":true},\"wss://yabu.me\":{\"read\":true,\"write\":true},\"wss://relay.danieldaquino.me/chat\":{\"write\":true,\"read\":true}}","sig":"351d52badb118f712373ca204c3186a6897a1963d31849298c9f48b682647039d5b0d2cba1d052179f74a3f1215ec77050b7418b801ad1ae0eb06181587282b8"} +{"kind":7,"id":"2845a460e81846127c4d8fb2a8a9c2ec46497e37ec5cf1a5d829e2d4e198e090","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754675942,"tags":[["e","bf0d578922f8ac542b232af7589d8dd4da06bedb5c88b1a3ccc5b173b256757f","wss://pyramid.fiatjaf.com","root"],["e","4c599882d92a5b381a347a9ac0e26b5d74e316657d76b93ef39baca492e5f955","wss://nos.lol","reply"],["p","0d6c8388dcb049b8dd4fc8d3d8c3bb93de3da90ba828e4f09c8ad0f346488a33"],["p","958b754a1d3de5b5eca0fe31d2d555f451325f8498a83da1997b7fcd5c39e88c"],["e","ddb156020a4c760f2c9d77275249de04c257df2c117a75b6b51096ac7d5c4fa7","wss://relay.damus.io","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","wss://relay.damus.io"]],"content":"😂","sig":"f0b8d9e5d2554e852e37fc068bc68b5523a35506cd7f0f38696206c245db5e6253d49446105868d76926dc2cd187df01d46504348556002993aab8e5a94b80c7"} +{"kind":7,"id":"60a6e6c03135911c7dd7296501589810973a30da52a20e6958cd8f8308e9bac5","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754332140,"tags":[["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","cdfff002322e591ece6e054cecc111cfde10225803f64de3220554aa23d0296c","wss://relay.nostr.wirednet.jp","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","wss://relay.nostr.wirednet.jp"]],"content":"🤙","sig":"0adb758b1d0ae4d793f755726d90645e7b11284287e4ea520d30c5ceb899884d5d5670522b9d0be0396eb29999e07c903f7fdb8bd2b0bb1a1655948bca692feb"} +{"kind":6,"id":"42242b928cfcc2f7a5c7a4c68806f4af452a241bcaa8c3a0ade35b9605544f4c","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754087878,"tags":[["e","9cb056081e92641563050fd067e831a5d5759e2db18de6ec31d65b51f0059e70","","root","83f03ed3d1a40f8c3bc14b00b0d619cf97efd9237a1c34ded7827b6e1ac9a76f"],["p","83f03ed3d1a40f8c3bc14b00b0d619cf97efd9237a1c34ded7827b6e1ac9a76f"]],"content":"{\"tags\":[],\"pubkey\":\"83f03ed3d1a40f8c3bc14b00b0d619cf97efd9237a1c34ded7827b6e1ac9a76f\",\"created_at\":1754032863,\"id\":\"9cb056081e92641563050fd067e831a5d5759e2db18de6ec31d65b51f0059e70\",\"content\":\"\\\"The secret of change is to focus all of your energy, not on fighting the old, but on building the new.\\\"\",\"kind\":1,\"sig\":\"fbefc7f5fd96d3fea542b3d5e245e6e4063dd86b300d26d5cd037e9759c768bc80e48575768e7f5e07907f1128c164f4a54b00ff4de737a6747452489126915a\"}","sig":"a2ba0337b8562a629e4809975f8d0ac3c3acf9b7fd1d5c16d432377965d926012009b555d14e6f06cd294bdabfe9ddc0adee6fdc44c96c1b5330b5ce205ef854"} +{"kind":7,"id":"c6b29038fd74b4511ebfcc2a62fe2ee0994cdabf96c57576242af69c20658f23","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1754081291,"tags":[["e","2eea2bf9e1ec3482719cf6fc99dd2e8352048afa8409470f1cac2bddbaf1f18a"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"🤙","sig":"b5c3ddd778532b3922b6e833e70fe037484875471dba03e66e4f5c60c75da8aab8e5c16c66227141ad06dcd8099832503b2a4893d38e542172eacffd94ccb263"} +{"kind":7,"id":"9da7d4758b0f6b45f8bee78f98f834b368d231bc716c1eb6a7bb696469112df6","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1753923872,"tags":[["e","a0fe13ee840171202df6cf5d8900656e98d1268c1e22bd23dc88c127edac6be3","","mention"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","","mention"],["e","ff9d2c7b4bf25d9447bb0007d4ec5c1acc85bd6fd5975489ac05393d89470ffb","wss://nostr.wine","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24"],["p","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24","wss://nostr.wine"]],"content":"🤙","sig":"25a3c6af989effda95852ae96f28164bbad80ca370d5bdabf4bdce4a06e635f338a75336852ee4571bda19a29854c5e2c2799a9b9986c3eeabb35a3360f482f1"} +{"kind":7,"id":"ea32095010bfdef5a2fe9dfe13b0626ff37946201b03fd43e4a3801cc8ec4582","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1753923862,"tags":[["e","ff9d2c7b4bf25d9447bb0007d4ec5c1acc85bd6fd5975489ac05393d89470ffb","","root"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["p","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","67483ead425813c55d0494580d4acdbf65a53b8d2258fe77544193550b330932","wss://nos.lol","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4"],["p","17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","wss://nos.lol"]],"content":"🤙","sig":"3663ff94cdecc75534c42e13313a0d6248f32d95bd7bacbeaaf5c85dca39498df478b89bd007e897840f3d930f058bf5bfd9e72ff28c1d0f78ab169ae381baba"} +{"kind":10000,"id":"8c2d2aa82a6bcac20a45d7d35320ce17b000b2c96eea982ca8108679fef40397","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1753484319,"tags":[["p","bbdb3ed8569c578ae4b5834eb0a5bf797ecd5551b15a528fc7561cf2c2c747be"],["word","democrat"],["word","shitcoin"],["p","36af487454212c5c815f04f7ca8be3e6e78ed3154bdf9cad80bcdb4dc0a9f78a"],["p","67b83190ea3fcae410f91c07cee0672a3517629f1231a3191875c7dfae0a1d72"],["p","4fc232dd5fa7e68859166cb8f8fabf3b84fde93ac957d44d37a848fb756d5590"],["p","496d38f69865530028c7d212314d3ce6d605f3528a6c4020a067c9b5bc49fb13"],["p","30d3ffe11bae0bb89a45655c393dfd9024150f2bac8758a6b6b73f8e0f4ad543"],["e","a2b99ca7d6d4a2a4dd104fbcaade422c5ef69b45ca9c542a71e3f5eed7e42a71"],["word","vegetarian"],["word","carnivore"],["p","77ce56f89d1228f7ff3743ce1ad1b254857b9008564727ebd5a1f317362f6ca7"],["p","0689df5847a8d3376892da29622d7c0fdc1ef1958f4bc4471d90966aa1eca9f2"],["word","trump"],["p","e5a633cac6effc774f59b455dc4c6ae640b3b0d7bea4dd51ee567dc6a9512d1e"],["word","meat"],["p","f8e6c64342f1e052480630e27e1016dce35fc3a614e60434fef4aa2503328ca9"],["e","29b89ee4c4aa1e9b0602e2640a9f8df9149df0fd0bec3f5d53ad8f69841e7f34"],["p","d4a2f2a06ac25d8cd496ca7e04260bd645a2705ce0d5a30c245ab9fb4def3159"],["p","356875ffd729b06eeb4c1d7a70a1f750045d067774d21c0faffe4af2bf96a2e8"],["p","0d7bc97435d2ea65924a439175e6e8364c279870b7f15351846af13a59086167"],["p","fb7d9edb022881ac80da6369832f67e300f06d8524a2a55d1aa88aed51b481ba"],["p","1ae011cb34999af15602a2aa927f32bb92d65f6161ed71abdaa4f50c1257a556"],["p","5347d301dc729236160f3ccda4d10d81adf4085431293b02737c50cbe2dad739"],["p","0259247de23ef81418dd2bd2f13697451af2af4f873098af989051bbbd5014d3"],["p","16f1a0100d4cfffbcc4230e8e0e4290cc5849c1adc64d6653fda07c031b1074b"],["p","ab7d91705025289d6149ad9ea1abc4ce4c49555df71904f29136c223785ea6f7"],["p","12eaa5ca3d10c895bea4906ab5015a7f59b2fcc1b96a24426ca67d09c06ff3b6"],["p","a94e9a93b4ff4090d22c5bd5970277c96ab64fefcdbc89a97cd032108d411595"],["p","122a5dc0e22abc23fb0c0f2dcd374876686a6b9940ac34935e512a78fdc5f88b"],["word","republican"],["word","kamala"],["p","2d0853759fd1fe8257685c9af893122f0d32cea444003b2630c24f8b32cdf4c8"],["p","d5475b24841e54e51087a09b067c9639bea1c8a530256a8f5412589c8098e1c4"]],"content":"","sig":"5181bd86ca7790494962420ea8f2bb7f367879d184fa89170446a67c0736ee8271a9b473e533ab1840878ebd428873f6a298e0730224648df23b9aea023c4f9b"} +{"kind":6,"id":"e4b78743122765b7766d2e14244ffa91084e1c41bb4a626ebd0336c8ab602208","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1752874441,"tags":[["e","2b1c97955bab8a873ba367a73128bf83f9c0d65c9cd9ba9cb4355d36b17491dc","","root"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"{\"sig\":\"8f05c1471512c250bb964c13fe7a76f4dfdb23e7704e995219ba1c6adb54475029bc3f7d042f44716849e039380c8f413f6fedb71fda85c737465e63da3cbd0e\",\"id\":\"2b1c97955bab8a873ba367a73128bf83f9c0d65c9cd9ba9cb4355d36b17491dc\",\"tags\":[[\"client\",\"Damus Notedeck\"],[\"t\",\"notedeck\"]],\"content\":\"quick obsidian clone in damus #notedeck in a 100 lines of code:\\n\\nhttps:\\/\\/cdn.jb55.com\\/s\\/notedeck-notebook-demo.mp4\\n\\nhttps:\\/\\/cdn.jb55.com\\/s\\/f0e9f40d25a770ea.txt\",\"created_at\":1752871394,\"kind\":1,\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\"}","sig":"f81a7164fd615fe3743932fc8498d282d1abfe986271c0e051ba4779b4f3c18b7ff3e2620df716cff580ee8501f89755aba4769bbfdddad29dfab3d94647ca2c"} +{"kind":7,"id":"7c0f11422af42ca9297d140d9ce03695634333214e9ad133457f3e7be07a7f09","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1752874419,"tags":[["e","2b1c97955bab8a873ba367a73128bf83f9c0d65c9cd9ba9cb4355d36b17491dc"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"🤙","sig":"fc63d687ab7320db2eb7d9cee58375698eacb31d247149fb6c42fa7db09fcb154362c6ae00cdac887a129542b2791b14dd4f2ab84560b436789a25a9c7a0be94"} +{"kind":10002,"id":"4d86fa420ddd84aefa4e30b9e62bcdd13bef227909269cf09079da7080d7dfe2","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755901575,"tags":[["r","wss://relay.nos.social"],["r","wss://relay.primal.net"],["r","wss://pyramid.fiatjaf.com"],["r","wss://nos.lol"],["r","wss://yabu.me"],["r","wss://relay.coinos.io"],["r","wss://relay.damus.io"],["r","wss://nostr.wine"],["r","wss://relay.mostr.pub"],["r","wss://relay.denver.space"],["r","wss://relay.goodmorningbitcoin.com"],["r","wss://relay.fountain.fm"]],"content":"","sig":"3aa55dbcd9956cdb5d4c04a0155113fc6bed3bf4872038e078c14ff4d96cf465184369d6c4098a82b4844ded1da6bb646c5439294214e8936bd4ffcffad9d092"} +{"kind":1,"id":"364f77d549e367b77995d9f9671296dd1233d019a5a5fb6087cb769e7180048b","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755899036,"tags":[],"content":"simultaneously playing runescape, while running damus on xcode, posting here, and spamming bitchat. It’s what the internet should’ve always been","sig":"38c9c0c56ad0e74120e5df6d89f5c6563fe8fc7174cea564b6dc6ac8b4bacafc5f55813d9c51827d5c65c4c6f24670992b859ecbcbd8a088938a548f68c08195"} +{"kind":7,"id":"ff794b29531a7367f1c23bc778aff9b53579673539d91cb899c6de975200f2fb","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755906275,"tags":[["e","e278ab9f097d74dd954315c621809afdf5b80944f8a5aa6b6c1a2b01ed9a300e","wss://nos.lol","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63","wss://nos.lol"]],"content":"🤙","sig":"998cb2e367fbe745f21f821ba9263b708bbaf15a0f6a3a54bb1bec1903e6ffe50c72f65d04a43f07368be185daf3367964500fcbcc2375e6c48359ef6b463cce"} +{"kind":7,"id":"8cd0201e41b37ffd1cf02e77add0013876d0ddc71dd52d223e5c0d0e527a8abf","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755896884,"tags":[["p","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24"],["p","520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","wss://relay.primal.net"],["p","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24","wss://relay.nostrplebs.com:"],["e","8fe79d59801269f6196f00d96ed892ebb000833b0898624409c95cf576e08d2a","wss://relay.primal.net","reply","520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626"],["e","bdbc7d31737d089217b850a41dd4f2c74cca616670002bc4b9922eaadb7e5fb5","wss://relay.nostrplebs.com:","root","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24"],["e","4abcdc544c147fcdfe8bdc708ede04a4620458f26c43518b4e3de37e65996267","wss://relay.damus.io","2f52b19fe1408f69ee0e909ed8fcac138c5f370c05de24f0d8370ada79752323"],["p","2f52b19fe1408f69ee0e909ed8fcac138c5f370c05de24f0d8370ada79752323","wss://relay.damus.io"]],"content":"🤙","sig":"a18b457adbc1f6cf76abf01dc0a452d4c1047b74d98e018eb7aa4b4de409c37ebc053b45633891146f56c6db6dc93128fd9cd25bd63dd58dc2e091b44ee961fa"} +{"kind":7,"id":"37890b05cc0b9279075922bc1b00e247269d4c07fd32e2298e4fe00e6b67993f","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755894783,"tags":[["e","b4c9b8c731f57c1696e25dec179040362740c82925cee45174fd645ab416ad49","wss://relay.damus.io","ea57b25f7a57c61d7dd0bf62411244a580d6709e42a20428fd381f89ef8d63db"],["p","ea57b25f7a57c61d7dd0bf62411244a580d6709e42a20428fd381f89ef8d63db","wss://relay.damus.io"]],"content":"🤙","sig":"f1639feed320e308500648a960fea7163f6ef17099e6f9bff7843ca797c6540a96889b20b0c2017dc510ae57032695c7e6ddd598cab453b980a6cf5299357b48"} +{"kind":1,"id":"e3964330590661a9ac1733f04a9ff05a8142e7643683c3ef4c42b24fa2e2b9e6","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755894439,"tags":[["p","82341f882b6eabcd2ba7f1ef90aad961cf074af15b9ef44a09f9d2a8fbfbe6a2"]],"content":"nostr:npub1sg6plzptd64u62a878hep2kev88swjh3tw00gjsfl8f237lmu63q0uf63m how do I change my profile pic on bitchat?","sig":"585609cfba576e696fdf2ad4b43a6401a7b793fe3cb6a323a543e349bc5e169faf4960d3d40e812801e57ccb6f8d45a0952e87386b91f53ae0f502ea0ce01e99"} +{"kind":1,"id":"468321654a41a72cfa9bf945ce1fac3083ab2a988a0ddbc3ed77e6fcf59c67db","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755894392,"tags":[["e","93e25b235c01ce84b0f56613e901798415f587e65d45ad833136b132810af0f6","wss://nos.lol","root"],["p","d307643547703537dfdef811c3dea96f1f9e84c8249e200353425924a9908cf8"]],"content":"old school nostr","sig":"2a9c910a09cbed196d70388fd29470875c7c69aaa69ce912f6bd4693bd300aa3b286ef01524d699ceaba2b85c58373a3b45364cfb0a13444714c69ad334cb284"} +{"kind":1,"id":"11e62ee0fce5484159b32a1bcd24b5e90334d8d4c84ed2f2affbbfec9850bd16","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755894359,"tags":[["q","bc7d8fef98b7ccf7e4f0cec56ebe60825c3ec14ce9724fa29bbe594711646520","wss://relay.primal.net","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63","wss://relay.primal.net"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"]],"content":"I don't believe we shall ever have free open speech again before we take the thing out of the hands of centralized entities, that is, we can't take them violently out of the hands of centralized entities, all we can do is by some sly roundabout way introduce something that they can't stop\n\nnostr:nevent1qqstclv0a7vt0n8huncva3twhesgyhp7c9xwjuj052dmuk28z9jx2gqpzemhxue69uhhyetvv9ujuurjd9kkzmpwdejhgqg5waehxw309aex2mrp0yhxgctdw4eju6t0qy2hwumn8ghj7un9d3shjtnrda5kummn9e5k7qfzwaehxw309aex2mrp0yhxwmm0v3kk7unwd9hxwcnfw33k76tw9e3k7mgzypgdjn7zmpvqc6ptqud9gtutrcc6yq9s2z96h9dr80hss4wl9qwkxqcyqqqqqqgway7wd","sig":"e46c3c7c43afe3db53a8d7a0c20268a1cc153032cf88d64f07531ca185cc42de5dde6860ffe5d9d8f573b08aca4a94e1333f8f6f5bb60147cdad9af514fab215"} +{"kind":1,"id":"8fe79d59801269f6196f00d96ed892ebb000833b0898624409c95cf576e08d2a","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755894028,"tags":[["e","bdbc7d31737d089217b850a41dd4f2c74cca616670002bc4b9922eaadb7e5fb5","wss://nostr.wine","root"],["p","3f770d65d3a764a9c5cb503ae123e62ec7598ad035d836e2a810f3877a745b24"]],"content":"is it chatting or bitching?","sig":"f520f01563f946a3d2ec7b71976fa2e2e0c9dc39bdb92a5b46c08b8b7d165d91dc47d7954f8f6ca49181a12e21c1f63820a046439948ed2e8fe64571ce57c5a7"} +{"kind":1,"id":"3b630c7e27edd3c4158018d8547537c442861c1e4abfbfb7eb1c49a6e23db01c","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755893906,"tags":[["e","bc7d8fef98b7ccf7e4f0cec56ebe60825c3ec14ce9724fa29bbe594711646520","wss://relay.primal.net","root"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"]],"content":"this is the way","sig":"77eaa6ab39f3112e1d836ba769ad3865adbfbbefe282a7e4ce4f82d7e3c3123f124c18d509fc70671146973822c2a72eab277f29bc8f07f86ba1f7fbcd17fa2d"} +{"kind":7,"id":"cee750e4f750dae6f5437775f8a1daeb5b256e885dbbe214af5a47565758cd5b","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755893895,"tags":[["e","39199d6e636c92b10941d0f9ab3a1ebed1348864f6813223e5ce5062aabaf389","","mention"],["e","bc7d8fef98b7ccf7e4f0cec56ebe60825c3ec14ce9724fa29bbe594711646520","wss://relay.primal.net","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63","wss://relay.primal.net"]],"content":"🤙","sig":"d318cf5ac79593e1cbe40a56b483b4c8ac195dbcbf3c86c3b819bde6ff7fbd6e4e89d3478aec2d2e651d7eeb680ada9e44220cf291d707b690b61a3813d8cc91"} +{"kind":1,"id":"d9070372fe375987ad3f44c4551ff38965b5553a3e6e6fbd767da7c09d11f897","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755893868,"tags":[],"content":"interop the best op","sig":"6183764f9876c597a52c674e80fe7e899750f0ba93fb2eedb910fd20326dac531a2c408351fd0326946798275bc9303e29dcbf2fcf9c552b17113685dda911ff"} +{"kind":7,"id":"ec547734a1ff63d5531a84867d6e97b5e97f6104a30f38bc7af342c061257535","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755893674,"tags":[["e","2e6e7e9ce61ace1de9bfb12d9ef82e87cf106237d8f1b8b3c71220287701ba53","wss://relay.primal.net/","root","520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626"],["p","520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","wss://relay.nos.social/"],["e","d9af90c17ce8efef29020b1da5ce238b10bfdf2e711e649509a14d6411264d36","wss://relay.mostr.pub","460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c"],["p","460c25e682fda7832b52d1f22d3d22b3176d972f60dcdc3212ed8c92ef85065c","wss://relay.mostr.pub"]],"content":"🤙","sig":"73fee919cf93422e87740b6f03a90d8bc11b74487b79bc21470b13aa6565bcba046e34c8d406d399ce09fa1b2294afab1e3c68a39f3d52e51a18e066e31cbf1f"} +{"kind":1,"id":"2e6e7e9ce61ace1de9bfb12d9ef82e87cf106237d8f1b8b3c71220287701ba53","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755893538,"tags":[["imeta","url https://video.nostr.build/79fed7b39455fb0db6a6414da55875d4cbc5e321b5d3f99496038e1d9e0d3ed1.mp4","blurhash e14LOL_2%NIn9Y-p%3%MbIM{Rj9YM|tQ%MxtkC%MogRQ_3fPIoogxu","dim 886x1920"],["r","https://video.nostr.build/79fed7b39455fb0db6a6414da55875d4cbc5e321b5d3f99496038e1d9e0d3ed1.mp4"]],"content":"WIP\n\nhttps://video.nostr.build/79fed7b39455fb0db6a6414da55875d4cbc5e321b5d3f99496038e1d9e0d3ed1.mp4","sig":"9dfb20ea3744d279cea2681d465df94243c932529e422329be6ad528600473abd900735117349d0fd2f9e3e5a6d557a0c4a833abd612515abbc4724b5612e498"} +{"kind":1311,"id":"dc76dc201689e2c872781ff55e51750f12a0d9dba1a772b37a9c2eb6dafbc5b7","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755892447,"tags":[["a","30311:cf45a6ba1363ad7ed213a078e710d24115ae721c9b47bd1ebf4458eaefb4c2a5:a5b6f719-a6b6-41ec-9c06-869b6cd6e287"]],"content":"watching on Damus 🤙 ","sig":"127b74b74e39408beca8e84da9ddaac510aeb9e0b2eca41d37d44ce39f916dd4c85b71713e6ab1aca9cfa6015d665d342e364ac8cc1ce7fd5462ebbed33ec597"} +{"kind":1311,"id":"ffeef04e13bf12382df8bbb8a724da88fc6932bd61ab04decb5d9a22b814817e","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755882131,"tags":[["a","30311:cf45a6ba1363ad7ed213a078e710d24115ae721c9b47bd1ebf4458eaefb4c2a5:2891fc5e-2958-4faa-ab5e-8cb8db78d177"]],"content":"🐐","sig":"0c35be1fc6a8382a4e3a323b9910d236a19ca0443456061d3290f8ff918849b328c437a4b98fa56aec49a56c4ba87e5012c5e9c1c4ecfbe877f2f6d6116dedfd"} +{"kind":1311,"id":"54308674e3a2d83f0c6c881f70fe4066275e383d4a1d9ddbbc154e37cd647be4","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755881713,"tags":[["a","30311:cf45a6ba1363ad7ed213a078e710d24115ae721c9b47bd1ebf4458eaefb4c2a5:537a365c-f1ec-44ac-af10-22d14a7319fb"]],"content":"Hello from Damus ","sig":"2d09a0981e176d150866adf5f66abb8aa76a3d9ff7adb0d7faf6655832f26945f2a0a4b26980eda9afb43ee87301de20099239edd95934e45314d42af148659a"} +{"kind":6,"id":"f93d7fc7ceec0dad7bf9005b2f591dc8fde418a737a8df10f4e4c332545ad24c","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755878732,"tags":[["e","a26f5e404530201f8c289c216402affb8f6c403744ea4474e86e08c8e44d856f","wss://relay.primal.net","root","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63"],["p","50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63","wss://relay.primal.net"]],"content":"{\"created_at\":1755877124,\"content\":\"I love nostr\",\"pubkey\":\"50d94fc2d8580c682b071a542f8b1e31a200b0508bab95a33bef0855df281d63\",\"sig\":\"3fc5db4c43ef435d0dace5619808c0a4d364ecb88fa4758d828909a1a0ad9ac650d7673b7eaad0b13ebb2625cce5b0991d703de83c6dd10334ef614b294ca2be\",\"tags\":[[\"r\",\"wss:\\/\\/hist.nostr.land\\/\"],[\"r\",\"wss:\\/\\/nos.lol\\/\"],[\"r\",\"wss:\\/\\/nostr.bitcoiner.social\\/\"],[\"r\",\"wss:\\/\\/nostr.mom\\/\"],[\"r\",\"wss:\\/\\/relay.damus.io\\/\"],[\"r\",\"wss:\\/\\/relay.nostr.band\\/\"],[\"r\",\"wss:\\/\\/relay.primal.net\\/\",\"read\"],[\"r\",\"wss:\\/\\/relay.snort.social\\/\"],[\"r\",\"wss:\\/\\/yabu.me\\/\"],[\"r\",\"wss:\\/\\/nostr-pub.wellorder.net\\/\"]],\"id\":\"a26f5e404530201f8c289c216402affb8f6c403744ea4474e86e08c8e44d856f\",\"kind\":1}","sig":"083fc35b658ff263c564810359db48d842e5836652da498f006e3bcd4854f262aa51c066ea861bb870d779708af0a7662c198b470173eaf9dee042af59f188b1"} +{"kind":1311,"id":"e40830af158a3e752f7912e119392842e2f35a5a2ac92dafb53399884fdba988","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755790396,"tags":[["a","30311:cf45a6ba1363ad7ed213a078e710d24115ae721c9b47bd1ebf4458eaefb4c2a5:77cb2589-e57e-4f3b-ae74-176d198e26d6"]],"content":"👏","sig":"ba1ac5f30df4e641a79c1b8f5aa364a4e0ead6ef2aa636cf5c452ecf7cfe581dee9491aacd8ef07fc37983b7fb358462ea8a60f87c0b9917ea4eb3f4f0df1fce"} +{"kind":1,"id":"875bf42c01dacf3afbe769efc88d1fd631de549064e45e168095632b61ea41a9","pubkey":"520830c334a3f79f88cac934580d26f91a7832c6b21fb9625690ea2ed81b5626","created_at":1755740509,"tags":[["t","9q"]],"content":"#9q is quiet","sig":"ed7e2cfc7f6b6abef4c4f5ce3f2b0a0908a1775279b4e32c45ed40503589872027ca6eaf458a2d510c61b4b6f6b906432c351f9367640003d39f680f1c968a53"} +{"kind":6,"id":"69908c4a547ccb813e695dc6f191ea2b9d170009c823ce9a5470a76982076052","pubkey":"0c9e5e17fbdf555ef41daf3c3a196749c848f9c791966f30fae5e4c0480e870b","created_at":1755191835,"tags":[["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"{\"kind\":1,\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss:\\/\\/relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss:\\/\\/relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]],\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\",\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"created_at\":1755116910,\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\",\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\"}","sig":"0745c5a1be1e43c306088ddf380c13aed12b79c177ec339f0b4390fca6286cae187ca86dac8ce888b80a2eecf1424c30869b380cd8977ee8c1e84d6963508355"} +{"kind":6,"id":"7dba785763cd22ccea88d8460e708fca8b64d1c36058d3d0f93d0263e29e3dde","pubkey":"3492dd43d496a237f4441fd801f5078b63542c3e158ffea903cb020a1af4ffdd","created_at":1755143513,"tags":[["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"{\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\",\"kind\":1,\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\",\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\",\"created_at\":1755116910,\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss:\\/\\/relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss:\\/\\/relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]]}","sig":"edc66e637f7df8f47f67055dff158526b9e8567e4c475550f15b2e0b52579d251a4969958068b210806e6178b6ee63be58bfa81cf0a3d95ca76025b43dd6191c"} +{"kind":6,"id":"693aa430a52cc5aab7266c321d3a15cf90688c7ba2eca1515676252c6bc95ad1","pubkey":"709bd2be88c1f020b36d0b1414fd92e7306e8b91612bef0b3e3e202189d608e0","created_at":1755124614,"tags":[["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"{\"created_at\":1755116910,\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\",\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss:\\/\\/relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss:\\/\\/relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]],\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\",\"kind\":1,\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\"}","sig":"360f5dc7ce49e1357712117d1481940cccc082c2ba2bb6b5de453379bc6e323b104fbd197d8360bb49a748660640e3cb7605fe0513dd48dd8176162bc9f820d4"} +{"kind":6,"id":"5898eed0f0dffefa64b49c048dadb57a2dd006cb61af579a38fcc7bac2323a68","pubkey":"7362fd2529fbd03aaf9158464306577a35b03c2e12f6487ca518476b71faa638","created_at":1755119089,"tags":[["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e","","root"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"]],"content":"{\"kind\":1,\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss:\\/\\/relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss:\\/\\/relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]],\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\",\"created_at\":1755116910,\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\",\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\"}","sig":"1d7e422e190f703aa3cb2c6608819e524ae6f549b57d4825807ed384bd63c4ed14d74e9b8685e163066049cfae9ae3aadee0a697f93516728c082586ba510016"} +{"kind":6,"id":"84b074a56e26ae8066eba6e6d4f82a061a22e4a235949ad025b67216a5ccae55","pubkey":"78362df72df3ac28b712524e25a6826e2f6d611e05ed851541a09c9e821e7549","created_at":1755118306,"tags":[["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["alt","Repost event"]],"content":"{\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\",\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"created_at\":1755116910,\"kind\":1,\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss://relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss://relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]],\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\",\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\"}","sig":"e17674420d4b2847cc09e4286a73dbef029c6644f82db408935b3680351ba2d96557a7e1fc53feb3d2fe2fab1aadd48cc022a5a40c38737b5049a66d563cdab2"} +{"kind":6,"id":"ea2a83dbc872de084ca341400ce0dad86746a0b3fbc3a69b041bc81702fb4af5","pubkey":"2779f3d9f42c7dee17f0e6bcdcf89a8f9d592d19e3b1bbd27ef1cffd1a7f98d1","created_at":1755117962,"tags":[["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e","wss://relay.nostr.band","root","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","wss://relay.nostr.band"]],"content":"{\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"created_at\":1755116910,\"kind\":1,\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\",\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss:\\/\\/relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss:\\/\\/relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]],\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\",\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\"}","sig":"8ac697943f87a0b8d34a3f55a8cd80d9dffdec6d2e99c2b186d6d30ad400664dadc62060316e62a424d42b73e47e63c29cbdfafa05e65aa1d26f39ac45273db7"} +{"kind":6,"id":"0bd0d9ed97384bbfa6a5fb512d618d27a2ed5591aeb9f937bedd004870cc0e64","pubkey":"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6","created_at":1755117689,"tags":[["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30"],["p","8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6"],["e","de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e","wss://relay.danieldaquino.me/inbox","root","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"],["p","32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245","wss://relay.danieldaquino.me/inbox"]],"content":"{\"sig\":\"3552dae609073bad57560b450aca996b3133f8bcf7bc9e39f993af8a76b9d929b3847d0c9eb803c60b17ff94648f4b534363786bd49e1439036136dc824f9cb6\",\"created_at\":1755116910,\"kind\":1,\"id\":\"de4ca122d3c5d923945404a0a60bdb1b0ef5fd66539ab97e43991e2c060a5f7e\",\"tags\":[[\"q\",\"9cfb7c1917d40fd08367c432e39612369173c0c0ba603844f81e81132b8ad0e7\",\"wss:\\/\\/relay.damus.io\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\",\"wss:\\/\\/relay.damus.io\"],[\"p\",\"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30\"],[\"t\",\"nostrdb\"],[\"p\",\"8b2be0a0ad34805d76679272c28a77dbede9adcbfdca48c681ec8b624a1208a6\"]],\"pubkey\":\"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245\",\"content\":\"my 2 year old, 200+ commit PR finally landed to get damus ios updated to the latest version of #nostrdb.\\n\\nThis will allow us to have notedeck-level performance and offline-first capabilities\\n\\niOS now has a fully powered embedded relay like how android and notedeck do.\\n\\nThis will enable powerful new features like local profile search which i want to add soon.\\n\\nThanks nostr:npub13v47pg9dxjq96an8jfev9znhm0k7ntwtlh9y335paj9kyjsjpznqzzl3l8 for finally getting this over the finish line!\\n\\nnostr:nevent1qqsfe7murytagr7ssdnugvhrjcfrdytncrqt5cpcgnupaqgn9w9dpecpz3mhxue69uhhyetvv9ujuerpd46hxtnfduq3vamnwvaz7tmjv4kxz7fwdehhxarj9e3xzmnyqyxhwumn8ghj7mn0wvhxcmmvqgsye9khv04jlcqezrm7wgst03lvm0s6wqzh7dztnauu9zhssrp7uvqrqsqqqa28sfwuwx\"}","sig":"0915ef843a7b9b632450ed1ad31933062091986e6ee671a4ed898b7f89fa74a470c86397e77dd6b5da833c06380701f1c17c8057cc99dec2f0eb920b33bdf01f"} +{"kind":6,"id":"8655d0514f116151ab6332a85d2dfd16227460e6b741d8bf2c50970f07752ed4","pubkey":"130dcd3a1963f7fa35b206c44be6bc6f4ea0f5ee531b26126cb989678d5cfff5","created_at":1755705052,"tags":[["e","64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1"],["p","71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975"],["alt","Repost event"]],"content":"{\"id\":\"64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1\",\"pubkey\":\"71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975\",\"created_at\":1755694800,\"kind\":1,\"tags\":[[\"t\",\"meme\"],[\"t\",\"memes\"],[\"t\",\"memestr\"],[\"t\",\"plebchain\"]],\"content\":\"https://smartflowsocial.s3.us-east-1.amazonaws.com/clients/cm7kdrwdk0000qyu6fwtd96ui/0cab65a9-0142-48e3-abd7-94d20e30d3b2.jpg\\n\\n\",\"sig\":\"c000794da8c4f7549b546630b16ed17f6edc0af0269b8c46ce14f5b1937431e7575b78351bc152007ebab5720028e5fe4b738f99e8887f273d35dd2217d1cc3d\"}","sig":"cad578a05b9a776ef1435918024052ca1b7ca44f25f2264da6a8381a12d21a3d56d42bc8a8a6fcd91b205c40d950c453e708c4a2fd7a01a9e6e022f2fdb947eb"} +{"kind":6,"id":"8672128e3f716fb7b54db7b6b2fcf4b6a5dd17842a0f984ac1029715de2ef735","pubkey":"e87c295ede33c2274e419dd63e9c117804c0dacaabcc8ed59c646cf058da788a","created_at":1755701211,"tags":[["e","64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1"],["p","71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975"],["alt","Repost event"]],"content":"{\"id\":\"64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1\",\"pubkey\":\"71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975\",\"created_at\":1755694800,\"kind\":1,\"tags\":[[\"t\",\"meme\"],[\"t\",\"memes\"],[\"t\",\"memestr\"],[\"t\",\"plebchain\"]],\"content\":\"https://smartflowsocial.s3.us-east-1.amazonaws.com/clients/cm7kdrwdk0000qyu6fwtd96ui/0cab65a9-0142-48e3-abd7-94d20e30d3b2.jpg\\n\\n\",\"sig\":\"c000794da8c4f7549b546630b16ed17f6edc0af0269b8c46ce14f5b1937431e7575b78351bc152007ebab5720028e5fe4b738f99e8887f273d35dd2217d1cc3d\"}","sig":"78e6b43f14dd6afe8df057295fbbf1dbbf84f3e5e46545e60c1b6614ec8a17beee7a1b546887aa5e5a2730f9ffab540685badb38e636da2075dfe40e1665c4b3"} +{"kind":6,"id":"2360c67ab44b75d11ed38bd92c5b1953bf80c1699a90dd814ed264b52e297330","pubkey":"17538dc2a62769d09443f18c37cbe358fab5bbf981173542aa7c5ff171ed77c4","created_at":1755698930,"tags":[["e","64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1","","root","71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975"],["p","71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975"]],"content":"{\"tags\":[[\"t\",\"meme\"],[\"t\",\"memes\"],[\"t\",\"memestr\"],[\"t\",\"plebchain\"]],\"content\":\"https:\\/\\/smartflowsocial.s3.us-east-1.amazonaws.com\\/clients\\/cm7kdrwdk0000qyu6fwtd96ui\\/0cab65a9-0142-48e3-abd7-94d20e30d3b2.jpg\\n\\n\",\"kind\":1,\"pubkey\":\"71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975\",\"created_at\":1755694800,\"id\":\"64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1\",\"sig\":\"c000794da8c4f7549b546630b16ed17f6edc0af0269b8c46ce14f5b1937431e7575b78351bc152007ebab5720028e5fe4b738f99e8887f273d35dd2217d1cc3d\"}","sig":"b998688ce3c71e56259cf8ce49c4e2eac6de3edcba4fbc5002fac6529ee4630574381c30c3a288b61bae750b8611ed196e4217f7992758a1d674c30f3b40c828"} +{"kind":6,"id":"ed8f3f956a467a70ec3fcfee3c15e57cc8b389d3df43302ecbb331e2c993647d","pubkey":"28853cacb62492c970f0d27a76962710c0ad97f56e0163693981ffabc0faec3c","created_at":1755696182,"tags":[["e","64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1","","root"],["p","71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975"]],"content":"{\"kind\":1,\"pubkey\":\"71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975\",\"tags\":[[\"t\",\"meme\"],[\"t\",\"memes\"],[\"t\",\"memestr\"],[\"t\",\"plebchain\"]],\"id\":\"64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1\",\"content\":\"https:\\/\\/smartflowsocial.s3.us-east-1.amazonaws.com\\/clients\\/cm7kdrwdk0000qyu6fwtd96ui\\/0cab65a9-0142-48e3-abd7-94d20e30d3b2.jpg\\n\\n\",\"created_at\":1755694800,\"sig\":\"c000794da8c4f7549b546630b16ed17f6edc0af0269b8c46ce14f5b1937431e7575b78351bc152007ebab5720028e5fe4b738f99e8887f273d35dd2217d1cc3d\"}","sig":"8969b1612ff95d2b72b7599f34695089cf44fd393d9ed02f50ec884f636553a7267c89abc4d186618cac9415ef8338e762be3899a5b28c515675bfee0f32aff0"} +{"kind":6,"id":"eb8b95d3d3e53ebfd25255a9ac03ffe6ac0747497ed9d6b274168d51068265b4","pubkey":"1c9dcd8fd2d2fb879d6f02d6cc56aeefd74a9678ae48434b0f0de7a21852f704","created_at":1755694810,"tags":[["e","64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1"],["p","71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975"],["alt","Repost event"]],"content":"{\"id\":\"64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1\",\"pubkey\":\"71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975\",\"created_at\":1755694800,\"kind\":1,\"tags\":[[\"t\",\"meme\"],[\"t\",\"memes\"],[\"t\",\"memestr\"],[\"t\",\"plebchain\"]],\"content\":\"https://smartflowsocial.s3.us-east-1.amazonaws.com/clients/cm7kdrwdk0000qyu6fwtd96ui/0cab65a9-0142-48e3-abd7-94d20e30d3b2.jpg\\n\\n\",\"sig\":\"c000794da8c4f7549b546630b16ed17f6edc0af0269b8c46ce14f5b1937431e7575b78351bc152007ebab5720028e5fe4b738f99e8887f273d35dd2217d1cc3d\"}","sig":"0fd8c994fe051a12fb4bae8917de2ac2dfdaa32bedc316a480b20bb3748712ca743b8428d9422f7c5c32c4495e5f05fa1b52169eeb28e9929552af00000dc2ba"} From 76b6d5c5451bbf1404dbc3758c41fc9c449fab2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 27 Aug 2025 10:18:03 -0700 Subject: [PATCH 07/91] Update published items on the main actor MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Follows/Models/FollowersModel.swift | 3 ++- damus/Features/Profile/Models/ProfileModel.swift | 9 +++++++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/damus/Features/Follows/Models/FollowersModel.swift b/damus/Features/Follows/Models/FollowersModel.swift index 990cd0d12..df1d0ee8e 100644 --- a/damus/Features/Follows/Models/FollowersModel.swift +++ b/damus/Features/Follows/Models/FollowersModel.swift @@ -59,6 +59,7 @@ class FollowersModel: ObservableObject { self.profilesListener = nil } + @MainActor func handle_contact_event(_ ev: NostrEvent) { if has_contact.contains(ev.pubkey) { return @@ -93,7 +94,7 @@ class FollowersModel: ObservableObject { func handle_event(ev: NostrEvent) { if ev.known_kind == .contacts { - handle_contact_event(ev) + Task { await handle_contact_event(ev) } } } } diff --git a/damus/Features/Profile/Models/ProfileModel.swift b/damus/Features/Profile/Models/ProfileModel.swift index 70d52c864..a7f544533 100644 --- a/damus/Features/Profile/Models/ProfileModel.swift +++ b/damus/Features/Profile/Models/ProfileModel.swift @@ -87,7 +87,7 @@ class ProfileModel: ObservableObject, Equatable { } guard let txn = NdbTxn(ndb: damus.ndb) else { return } load_profiles(context: "profile", load: .from_events(events.events), damus_state: damus, txn: txn) - progress += 1 + await bumpUpProgress() } profileListener?.cancel() profileListener = Task { @@ -102,7 +102,7 @@ class ProfileModel: ObservableObject, Equatable { case .eose: break } } - progress += 1 + await bumpUpProgress() } conversationListener?.cancel() conversationListener = Task { @@ -110,6 +110,11 @@ class ProfileModel: ObservableObject, Equatable { } } + @MainActor + func bumpUpProgress() { + progress += 1 + } + func listenToConversations() async { // Only subscribe to conversation events if the profile is not us. guard pubkey != damus.pubkey else { From a5aff15491b1d569a1d55d3d3520f22209fb0578 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 27 Aug 2025 12:10:36 -0700 Subject: [PATCH 08/91] Improve task cancellation management in SubscriptionManager MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The widespread usage of the SubscriptionManager caused new crashes to occur when swapping apps. This was caused due to an access to Ndb memory after Ndb has been closed from the app background signal. The issue was fixed with improved task management logic and ensuring all subscription tasks are finished before closing Ndb. Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 1 + .../NostrNetworkManager.swift | 3 +- .../SubscriptionManager.swift | 65 +++++++++++++++++-- damus/Core/Storage/DamusState.swift | 6 +- damus/Shared/Utilities/Log.swift | 1 + nostrdb/Ndb.swift | 6 ++ 6 files changed, 72 insertions(+), 10 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index f9411c9ff..6d72fbdf6 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -512,6 +512,7 @@ struct ContentView: View { case .background: print("txn: 📙 DAMUS BACKGROUNDED") Task { @MainActor in + await damus_state.nostrNetwork.close() // Close ndb streaming tasks before closing ndb to avoid memory errors damus_state.ndb.close() } break diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 0e435f6d7..744513b53 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -234,7 +234,8 @@ class NostrNetworkManager { // MARK: - App lifecycle functions - func close() { + func close() async { + await self.reader.cancelAllTasks() pool.close() } } diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 4202c1162..08226f4c5 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -4,6 +4,7 @@ // // Created by Daniel D’Aquino on 2025-03-25. // +import Foundation extension NostrNetworkManager { /// Reads or fetches information from RelayPool and NostrDB, and provides an easier and unified higher-level interface. @@ -14,10 +15,12 @@ extension NostrNetworkManager { class SubscriptionManager { private let pool: RelayPool private var ndb: Ndb + private var taskManager: TaskManager init(pool: RelayPool, ndb: Ndb) { self.pool = pool self.ndb = ndb + self.taskManager = TaskManager() } // MARK: - Reading data from Nostr @@ -35,6 +38,7 @@ extension NostrNetworkManager { let ndbStreamTask = Task { do { for await item in try self.ndb.subscribe(filters: try filters.map({ try NdbFilter(from: $0) })) { + try Task.checkCancellation() switch item { case .eose: continuation.yield(.eose) @@ -48,24 +52,71 @@ extension NostrNetworkManager { } lend(unownedNote) } + try Task.checkCancellation() continuation.yield(.event(borrow: lender)) } } } catch { - Log.error("NDB streaming error: %s", for: .ndb, error.localizedDescription) + Log.error("NDB streaming error: %s", for: .subscription_manager, error.localizedDescription) } + continuation.finish() } let streamTask = Task { - for await _ in self.pool.subscribe(filters: filters, to: desiredRelays) { - // NO-OP. Notes will be automatically ingested by NostrDB - // TODO: Improve efficiency of subscriptions? + do { + for await _ in self.pool.subscribe(filters: filters, to: desiredRelays) { + // NO-OP. Notes will be automatically ingested by NostrDB + // TODO: Improve efficiency of subscriptions? + try Task.checkCancellation() + } + } + catch { + Log.error("Network streaming error: %s", for: .subscription_manager, error.localizedDescription) + } + continuation.finish() + } + + Task { + let ndbStreamTaskId = await self.taskManager.add(task: ndbStreamTask) + let streamTaskId = await self.taskManager.add(task: streamTask) + + continuation.onTermination = { @Sendable _ in + Task { + await self.taskManager.cancelAndCleanUp(taskId: ndbStreamTaskId) + await self.taskManager.cancelAndCleanUp(taskId: streamTaskId) + } } } - continuation.onTermination = { @Sendable _ in - streamTask.cancel() // Close the RelayPool stream when caller stops streaming - ndbStreamTask.cancel() + } + } + + func cancelAllTasks() async { + await self.taskManager.cancelAllTasks() + } + + actor TaskManager { + private var tasks: [UUID: Task] = [:] + + func add(task: Task) -> UUID { + let taskId = UUID() + self.tasks[taskId] = task + return taskId + } + + func cancelAndCleanUp(taskId: UUID) async { + self.tasks[taskId]?.cancel() + await self.tasks[taskId]?.value + self.tasks[taskId] = nil + return + } + + func cancelAllTasks() async { + Log.info("Cancelling all SubscriptionManager tasks", for: .subscription_manager) + for (taskId, _) in self.tasks { + Log.info("Cancelling SubscriptionManager task %s", for: .subscription_manager, taskId.uuidString) + await cancelAndCleanUp(taskId: taskId) } + Log.info("Cancelled all SubscriptionManager tasks", for: .subscription_manager) } } } diff --git a/damus/Core/Storage/DamusState.swift b/damus/Core/Storage/DamusState.swift index 1155799e4..a1821e6c2 100644 --- a/damus/Core/Storage/DamusState.swift +++ b/damus/Core/Storage/DamusState.swift @@ -164,8 +164,10 @@ class DamusState: HeadlessDamusState { try await self.push_notification_client.revoke_token() } wallet.disconnect() - nostrNetwork.close() - ndb.close() + Task { + await nostrNetwork.close() // Close ndb streaming tasks before closing ndb to avoid memory errors + ndb.close() + } } static var empty: DamusState { diff --git a/damus/Shared/Utilities/Log.swift b/damus/Shared/Utilities/Log.swift index f5fbc0e97..79b9955a4 100644 --- a/damus/Shared/Utilities/Log.swift +++ b/damus/Shared/Utilities/Log.swift @@ -14,6 +14,7 @@ enum LogCategory: String { case render case storage case networking + case subscription_manager case timeline /// Logs related to Nostr Wallet Connect components case nwc diff --git a/nostrdb/Ndb.swift b/nostrdb/Ndb.swift index d96684830..e1754d567 100644 --- a/nostrdb/Ndb.swift +++ b/nostrdb/Ndb.swift @@ -698,9 +698,13 @@ class Ndb { // Fetch initial results guard let txn = NdbTxn(ndb: self) else { throw .cannotOpenTransaction } + do { try Task.checkCancellation() } catch { throw .cancelled } + // Use our safe wrapper instead of direct C function call let noteIds = try query(with: txn, filters: filters, maxResults: maxSimultaneousResults) + do { try Task.checkCancellation() } catch { throw .cancelled } + // Create a subscription for new events let newEventsStream = ndbSubscribe(filters: filters) @@ -717,6 +721,7 @@ class Ndb { // Create a task to forward events from the subscription stream let forwardingTask = Task { for await item in newEventsStream { + try Task.checkCancellation() continuation.yield(item) } continuation.finish() @@ -876,6 +881,7 @@ extension Ndb { case cannotConvertFilter(any Error) case initialQueryFailed case timeout + case cancelled } /// An error that may happen when looking something up From 9620dcf6ef9aed720c4498c96a52451e27a32a78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 27 Aug 2025 15:17:22 -0700 Subject: [PATCH 09/91] Fix crash when loading all follows MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit fixes a crash that caused the app to crash when getting all the follows from a profile. This issue was caused by a use-after-free memory error on inherited transactions after the original transaction is deinitialized. The issue was fixed by introducing a reference count on all transactions and only deallocating the C transaction when the ref count goes to zero. Signed-off-by: Daniel D’Aquino --- nostrdb/NdbTxn.swift | 50 +++++++++++++++++++++++++++++++------------- 1 file changed, 36 insertions(+), 14 deletions(-) diff --git a/nostrdb/NdbTxn.swift b/nostrdb/NdbTxn.swift index af94f9216..e4130fe6d 100644 --- a/nostrdb/NdbTxn.swift +++ b/nostrdb/NdbTxn.swift @@ -39,6 +39,9 @@ class NdbTxn: RawNdbTxnAccessible { self.txn = active_txn self.inherited = true self.generation = Thread.current.threadDictionary["txn_generation"] as! Int + let ref_count = Thread.current.threadDictionary["ndb_txn_ref_count"] as! Int + let new_ref_count = ref_count + 1 + Thread.current.threadDictionary["ndb_txn_ref_count"] = new_ref_count } else { self.txn = ndb_txn() guard !ndb.is_closed else { return nil } @@ -52,6 +55,7 @@ class NdbTxn: RawNdbTxnAccessible { } self.generation = ndb.generation Thread.current.threadDictionary["ndb_txn"] = self.txn + Thread.current.threadDictionary["ndb_txn_ref_count"] = 1 Thread.current.threadDictionary["txn_generation"] = ndb.generation self.inherited = false } @@ -84,6 +88,20 @@ class NdbTxn: RawNdbTxnAccessible { print("txn: OLD GENERATION (\(self.generation) != \(ndb.generation)), IGNORING") return } + if ndb.is_closed { + print("txn: not closing. db closed") + return + } + if let ref_count = Thread.current.threadDictionary["ndb_txn_ref_count"] as? Int { + let new_ref_count = ref_count - 1 + Thread.current.threadDictionary["ndb_txn_ref_count"] = new_ref_count + assert(new_ref_count >= 0, "NdbTxn reference count should never be below zero") + if new_ref_count <= 0 { + ndb_end_query(&self.txn) + Thread.current.threadDictionary.removeObject(forKey: "ndb_txn") + Thread.current.threadDictionary.removeObject(forKey: "ndb_txn_ref_count") + } + } if inherited { print("txn: not closing. inherited ") return @@ -92,18 +110,11 @@ class NdbTxn: RawNdbTxnAccessible { //print("txn: not closing. moved") return } - if ndb.is_closed { - print("txn: not closing. db closed") - return - } #if TXNDEBUG txn_count -= 1; print("txn: close gen\(generation) '\(name)' \(txn_count)") #endif - ndb_end_query(&self.txn) - //self.skip_close = true - Thread.current.threadDictionary.removeObject(forKey: "ndb_txn") } // functor @@ -159,6 +170,9 @@ class SafeNdbTxn { txn = active_txn inherited = true generation = Thread.current.threadDictionary["txn_generation"] as! Int + let ref_count = Thread.current.threadDictionary["ndb_txn_ref_count"] as! Int + let new_ref_count = ref_count + 1 + Thread.current.threadDictionary["ndb_txn_ref_count"] = new_ref_count } else { txn = ndb_txn() guard !ndb.is_closed else { return nil } @@ -172,6 +186,7 @@ class SafeNdbTxn { } generation = ndb.generation Thread.current.threadDictionary["ndb_txn"] = txn + Thread.current.threadDictionary["ndb_txn_ref_count"] = 1 Thread.current.threadDictionary["txn_generation"] = ndb.generation inherited = false } @@ -199,6 +214,20 @@ class SafeNdbTxn { print("txn: OLD GENERATION (\(self.generation) != \(ndb.generation)), IGNORING") return } + if ndb.is_closed { + print("txn: not closing. db closed") + return + } + if let ref_count = Thread.current.threadDictionary["ndb_txn_ref_count"] as? Int { + let new_ref_count = ref_count - 1 + Thread.current.threadDictionary["ndb_txn_ref_count"] = new_ref_count + assert(new_ref_count >= 0, "NdbTxn reference count should never be below zero") + if new_ref_count <= 0 { + ndb_end_query(&self.txn) + Thread.current.threadDictionary.removeObject(forKey: "ndb_txn") + Thread.current.threadDictionary.removeObject(forKey: "ndb_txn_ref_count") + } + } if inherited { print("txn: not closing. inherited ") return @@ -207,18 +236,11 @@ class SafeNdbTxn { //print("txn: not closing. moved") return } - if ndb.is_closed { - print("txn: not closing. db closed") - return - } #if TXNDEBUG txn_count -= 1; print("txn: close gen\(generation) '\(name)' \(txn_count)") #endif - ndb_end_query(&self.txn) - //self.skip_close = true - Thread.current.threadDictionary.removeObject(forKey: "ndb_txn") } // functor From ab6ea7a9c170a3c84fb8e353ae6c4ec995578ac0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 27 Aug 2025 17:06:54 -0700 Subject: [PATCH 10/91] Fix issue where repost and like counts would not appear MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, HomeModel could listen to all subscriptions throughout the app, and it would handle reaction and repost counting. Once moved to the local relay model, HomeModel no longer had access to all subscriptions, causing those counts to disappear. The issue was fixed by doing the counting from ThreadModel itself, which better isolates concerns throughout the app. Signed-off-by: Daniel D’Aquino --- damus.xcodeproj/project.pbxproj | 4 ++ damus/Features/Chat/Models/ThreadModel.swift | 6 ++ .../ThreadModelTests.swift | 63 +++++++++++++++++++ 3 files changed, 73 insertions(+) create mode 100644 damusTests/NostrNetworkManagerTests/ThreadModelTests.swift diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index 84ecdfc84..c1ec36ec6 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -1756,6 +1756,7 @@ D7EB00B12CD59C8D00660C07 /* PresentFullScreenItemNotify.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */; }; D7EBF8BB2E59022A004EAE29 /* NostrNetworkManagerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */; }; D7EBF8BE2E59470D004EAE29 /* test_notes.jsonl in Resources */ = {isa = PBXBuildFile; fileRef = D7EBF8BD2E594708004EAE29 /* test_notes.jsonl */; }; + D7EBF8C02E5D39DC004EAE29 /* ThreadModelTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EBF8BF2E5D39D1004EAE29 /* ThreadModelTests.swift */; }; D7EDED152B11776B0018B19C /* LibreTranslateServer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3AE45AF5297BB2E700C1D842 /* LibreTranslateServer.swift */; }; D7EDED162B1177840018B19C /* LNUrls.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CB883B5297730E400DC99E7 /* LNUrls.swift */; }; D7EDED172B1177960018B19C /* TranslationService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3AAA95C9298DF87B00F3D526 /* TranslationService.swift */; }; @@ -2692,6 +2693,7 @@ D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PresentFullScreenItemNotify.swift; sourceTree = ""; }; D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NostrNetworkManagerTests.swift; sourceTree = ""; }; D7EBF8BD2E594708004EAE29 /* test_notes.jsonl */ = {isa = PBXFileReference; lastKnownFileType = text; path = test_notes.jsonl; sourceTree = ""; }; + D7EBF8BF2E5D39D1004EAE29 /* ThreadModelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThreadModelTests.swift; sourceTree = ""; }; D7EDED1B2B1178FE0018B19C /* NoteContent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NoteContent.swift; sourceTree = ""; }; D7EDED1D2B11797D0018B19C /* LongformEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LongformEvent.swift; sourceTree = ""; }; D7EDED202B117DCA0018B19C /* SequenceUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SequenceUtils.swift; sourceTree = ""; }; @@ -5001,6 +5003,7 @@ children = ( D7EBF8BD2E594708004EAE29 /* test_notes.jsonl */, D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */, + D7EBF8BF2E5D39D1004EAE29 /* ThreadModelTests.swift */, ); path = NostrNetworkManagerTests; sourceTree = ""; @@ -5934,6 +5937,7 @@ 4C9B0DEE2A65A75F00CBDA21 /* AttrStringTestExtensions.swift in Sources */, 4C19AE552A5D977400C90DB7 /* HashtagTests.swift in Sources */, D72927AD2BAB515C00F93E90 /* RelayURLTests.swift in Sources */, + D7EBF8C02E5D39DC004EAE29 /* ThreadModelTests.swift in Sources */, 4C0ED07F2D7A1E260020D8A2 /* Benchmarking.swift in Sources */, 3A3040ED29A5CB86008A0F29 /* ReplyDescriptionTests.swift in Sources */, D71DC1EC2A9129C3006E207C /* PostViewTests.swift in Sources */, diff --git a/damus/Features/Chat/Models/ThreadModel.swift b/damus/Features/Chat/Models/ThreadModel.swift index 332b11de9..e9df21307 100644 --- a/damus/Features/Chat/Models/ThreadModel.swift +++ b/damus/Features/Chat/Models/ThreadModel.swift @@ -190,6 +190,12 @@ class ThreadModel: ObservableObject { self.add_event(ev, keypair: damus_state.keypair) } } + else if ev.known_kind == .boost { + damus_state.boosts.add_event(ev, target: original_event.id) + } + else if ev.known_kind == .like { + damus_state.likes.add_event(ev, target: original_event.id) + } } // MARK: External control interface diff --git a/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift b/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift new file mode 100644 index 000000000..ee6b582cb --- /dev/null +++ b/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift @@ -0,0 +1,63 @@ +// +// ThreadModelTests.swift +// damus +// +// Created by Daniel D’Aquino on 2025-08-25. +// + + +import XCTest +@testable import damus + +final class ThreadModelTests: XCTestCase { + var damusState: DamusState? = nil + + override func setUpWithError() throws { + // Put setup code here. This method is called before the invocation of each test method in the class. + damusState = generate_test_damus_state(mock_profile_info: nil) + + let notesJSONL = getTestNotesJSONL() + + for noteText in notesJSONL.split(separator: "\n") { + let _ = damusState!.ndb.process_event("[\"EVENT\",\"subid\",\(String(noteText))]") + } + } + + override func tearDownWithError() throws { + // Put teardown code here. This method is called after the invocation of each test method in the class. + damusState = nil + } + + func getTestNotesJSONL() -> String { + // Get the path for the test_notes.jsonl file in the same folder as this test file + let testBundle = Bundle(for: type(of: self)) + let fileURL = testBundle.url(forResource: "test_notes", withExtension: "jsonl")! + + // Load the contents of the file + return try! String(contentsOf: fileURL, encoding: .utf8) + } + + /// Tests loading up a thread and checking if the repost count loads as expected. + func testActionBarModel() throws { + let testNoteJson = """ +{"content":"https://smartflowsocial.s3.us-east-1.amazonaws.com/clients/cm7kdrwdk0000qyu6fwtd96ui/0cab65a9-0142-48e3-abd7-94d20e30d3b2.jpg\n\n","pubkey":"71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975","tags":[["t","meme"],["t","memes"],["t","memestr"],["t","plebchain"]],"created_at":1755694800,"id":"64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1","kind":1,"sig":"c000794da8c4f7549b546630b16ed17f6edc0af0269b8c46ce14f5b1937431e7575b78351bc152007ebab5720028e5fe4b738f99e8887f273d35dd2217d1cc3d"} +""" + let testShouldComplete = XCTestExpectation(description: "Test should complete") + Task { + let note = NostrEvent.owned_from_json(json: testNoteJson)! + let threadModel = await ThreadModel(event: note, damus_state: damusState!) + await threadModel.subscribe() + let actionBarModel = make_actionbar_model(ev: note.id, damus: damusState!) + while true { + try await Task.sleep(nanoseconds: 500_000_000) + actionBarModel.update(damus: damusState!, evid: note.id) + if actionBarModel.boosts >= 5 { + break + } + } + XCTAssertEqual(actionBarModel.boosts, 5) + testShouldComplete.fulfill() + } + wait(for: [testShouldComplete], timeout: 10.0) + } +} From 739a3a0b8c47a1530135b4b441dceea2a07d1c20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 27 Aug 2025 17:45:52 -0700 Subject: [PATCH 11/91] Add more test cases to SubscriptionManager tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManagerTests/NostrNetworkManagerTests.swift | 2 ++ 1 file changed, 2 insertions(+) diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift index 91ff65dd9..cb92ccf2a 100644 --- a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -82,5 +82,7 @@ class NostrNetworkManagerTests: XCTestCase { ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(authors: [Pubkey(hex: "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245")!]), expectedCount: 22) ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.boost], referenced_ids: [NoteId(hex: "64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1")!]), expectedCount: 5) ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.text, .boost, .zap], referenced_ids: [NoteId(hex: "64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1")!], limit: 500), expectedCount: 5) + ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.text], limit: 10), expectedCount: 10) + ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.text], until: UInt32(Date.now.timeIntervalSince1970), limit: 10), expectedCount: 10) } } From 46c3667ec36547ac8e27ad6c37d38c2cf8ae1194 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 27 Aug 2025 18:41:40 -0700 Subject: [PATCH 12/91] Update setting on main actor to avoid crashes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Posting/Models/DraftsModel.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/damus/Features/Posting/Models/DraftsModel.swift b/damus/Features/Posting/Models/DraftsModel.swift index ab478691e..14d0b71b3 100644 --- a/damus/Features/Posting/Models/DraftsModel.swift +++ b/damus/Features/Posting/Models/DraftsModel.swift @@ -257,7 +257,9 @@ class Drafts: ObservableObject { damus_state.nostrNetwork.sendToNostrDB(event: draft_event) } - damus_state.settings.draft_event_ids = draft_events.map({ $0.id.hex() }) + DispatchQueue.main.async { + damus_state.settings.draft_event_ids = draft_events.map({ $0.id.hex() }) + } } } From c4c3656f906190fdf1eede1b776b16757eed2978 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 27 Aug 2025 18:59:20 -0700 Subject: [PATCH 13/91] Multi-session subscriptions and RelayPool reopening MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit implements nostr network subscriptions that survive between sessions, as well as improved handling of RelayPool opening/closing with respect to the app lifecycle. This prevents stale data after users swap out and back into Damus. Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 1 + damus/Core/NIPs/NIP65/NIP65.swift | 4 ++ .../NostrNetworkManager.swift | 1 + .../SubscriptionManager.swift | 48 +++++++++++++++++++ damus/Core/Nostr/RelayPool.swift | 3 ++ .../NostrNetworkManagerTests.swift | 2 + .../ThreadModelTests.swift | 2 + nostrdb/Ndb.swift | 1 + 8 files changed, 62 insertions(+) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index 6d72fbdf6..e87251fff 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -521,6 +521,7 @@ struct ContentView: View { break case .active: print("txn: 📙 DAMUS ACTIVE") + damus_state.nostrNetwork.connect() damus_state.nostrNetwork.ping() @unknown default: break diff --git a/damus/Core/NIPs/NIP65/NIP65.swift b/damus/Core/NIPs/NIP65/NIP65.swift index 13c9bcd61..21af546cf 100644 --- a/damus/Core/NIPs/NIP65/NIP65.swift +++ b/damus/Core/NIPs/NIP65/NIP65.swift @@ -42,6 +42,10 @@ extension NIP65 { self.relays = Self.relayOrderedDictionary(from: relays) } + init() { + self.relays = Self.relayOrderedDictionary(from: []) + } + init(relays: [RelayURL]) { let relayItemList = relays.map({ RelayItem(url: $0, rwConfiguration: .readWrite) }) self.relays = Self.relayOrderedDictionary(from: relayItemList) diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 744513b53..daa601834 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -50,6 +50,7 @@ class NostrNetworkManager { /// Connects the app to the Nostr network func connect() { self.userRelayList.connect() + self.pool.open = true } func disconnect() { diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 08226f4c5..0ee285e31 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -34,6 +34,54 @@ extension NostrNetworkManager { /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to /// - Returns: An async stream of nostr data func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { + return AsyncStream { continuation in + let subscriptionId = UUID() + Log.info("Starting subscription %s: %s", for: .subscription_manager, subscriptionId.uuidString, filters.debugDescription) + let multiSessionStreamingTask = Task { + while !Task.isCancelled { + do { + guard !self.ndb.is_closed else { + Log.info("%s: Ndb closed. Sleeping for 1 second before resuming.", for: .subscription_manager, subscriptionId.uuidString) + try await Task.sleep(nanoseconds: 1_000_000_000) + continue + } + guard self.pool.open else { + Log.info("%s: RelayPool closed. Sleeping for 1 second before resuming.", for: .subscription_manager, subscriptionId.uuidString) + try await Task.sleep(nanoseconds: 1_000_000_000) + continue + } + Log.info("%s: Streaming.", for: .subscription_manager, subscriptionId.uuidString) + for await item in self.sessionSubscribe(filters: filters, to: desiredRelays) { + try Task.checkCancellation() + continuation.yield(item) + } + Log.info("%s: Session subscription ended. Sleeping for 1 second before resuming.", for: .subscription_manager, subscriptionId.uuidString) + try await Task.sleep(nanoseconds: 1_000_000_000) + } + catch { + Log.error("%s: Error: %s", for: .subscription_manager, subscriptionId.uuidString, error.localizedDescription) + } + } + Log.info("%s: Terminated.", for: .subscription_manager, subscriptionId.uuidString) + } + continuation.onTermination = { @Sendable _ in + Log.info("%s: Cancelled.", for: .subscription_manager, subscriptionId.uuidString) + multiSessionStreamingTask.cancel() + } + } + } + + /// Subscribes to data from the user's relays + /// + /// Only survives for a single session. This exits after the app is backgrounded + /// + /// ## Implementation notes + /// + /// - When we migrate to the local relay model, we should modify this function to stream directly from NostrDB + /// + /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to + /// - Returns: An async stream of nostr data + private func sessionSubscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { return AsyncStream { continuation in let ndbStreamTask = Task { do { diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 4026bebd5..6f3f7443a 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -27,6 +27,7 @@ struct SeenEvent: Hashable { /// Establishes and manages connections and subscriptions to a list of relays. class RelayPool { private(set) var relays: [Relay] = [] + var open: Bool = false var handlers: [RelayHandler] = [] var request_queue: [QueuedRequest] = [] var seen: [NoteId: Set] = [:] @@ -46,6 +47,7 @@ class RelayPool { func close() { disconnect() relays = [] + open = false handlers = [] request_queue = [] seen.removeAll() @@ -181,6 +183,7 @@ class RelayPool { } func connect(to: [RelayURL]? = nil) { + open = true let relays = to.map{ get_relays($0) } ?? self.relays for relay in relays { relay.connection.connect() diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift index cb92ccf2a..244b4061e 100644 --- a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -15,6 +15,8 @@ class NostrNetworkManagerTests: XCTestCase { override func setUpWithError() throws { // Put setup code here. This method is called before the invocation of each test method in the class. damusState = generate_test_damus_state(mock_profile_info: nil) + try! damusState?.nostrNetwork.userRelayList.set(userRelayList: NIP65.RelayList()) + damusState?.nostrNetwork.connect() let notesJSONL = getTestNotesJSONL() diff --git a/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift b/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift index ee6b582cb..d0824b14c 100644 --- a/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift +++ b/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift @@ -15,6 +15,8 @@ final class ThreadModelTests: XCTestCase { override func setUpWithError() throws { // Put setup code here. This method is called before the invocation of each test method in the class. damusState = generate_test_damus_state(mock_profile_info: nil) + try! damusState?.nostrNetwork.userRelayList.set(userRelayList: NIP65.RelayList()) + damusState?.nostrNetwork.connect() let notesJSONL = getTestNotesJSONL() diff --git a/nostrdb/Ndb.swift b/nostrdb/Ndb.swift index e1754d567..f0de8f124 100644 --- a/nostrdb/Ndb.swift +++ b/nostrdb/Ndb.swift @@ -712,6 +712,7 @@ class Ndb { return AsyncStream { continuation in // Stream all results already present in the database for noteId in noteIds { + if Task.isCancelled { return } continuation.yield(.event(noteId)) } From 809c8c80ac70002a6982890b731f8de50c89265e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 29 Aug 2025 14:57:17 -0700 Subject: [PATCH 14/91] Fix missing relay list from profile MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Profile/Models/ProfileModel.swift | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/damus/Features/Profile/Models/ProfileModel.swift b/damus/Features/Profile/Models/ProfileModel.swift index a7f544533..5004b7258 100644 --- a/damus/Features/Profile/Models/ProfileModel.swift +++ b/damus/Features/Profile/Models/ProfileModel.swift @@ -92,8 +92,9 @@ class ProfileModel: ObservableObject, Equatable { profileListener?.cancel() profileListener = Task { var profile_filter = NostrFilter(kinds: [.contacts, .metadata, .boost]) + var relay_list_filter = NostrFilter(kinds: [.relay_list], authors: [pubkey]) profile_filter.authors = [pubkey] - for await item in damus.nostrNetwork.reader.subscribe(filters: [profile_filter]) { + for await item in damus.nostrNetwork.reader.subscribe(filters: [profile_filter, relay_list_filter]) { switch item { case .event(let borrow): try? borrow { event in From 9709e69dda2f6ab2352d61ff44703b03d4929271 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 29 Aug 2025 15:53:44 -0700 Subject: [PATCH 15/91] Fix forever loading Universe view MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Search/Models/SearchHomeModel.swift | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index ae9505d8e..0dec9fc21 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -46,8 +46,7 @@ class SearchHomeModel: ObservableObject { let to_relays = damus_state.nostrNetwork.ourRelayDescriptors .map { $0.url } .filter { !damus_state.relay_filters.is_filtered(timeline: .search, relay_id: $0) } - - for await item in damus_state.nostrNetwork.reader.subscribe(filters: [get_base_filter()], to: to_relays) { + outerLoop: for await item in damus_state.nostrNetwork.reader.subscribe(filters: [get_base_filter()], to: to_relays) { switch item { case .event(let borrow): var event: NostrEvent? = nil @@ -56,7 +55,8 @@ class SearchHomeModel: ObservableObject { } guard let event else { return } await self.handleEvent(event) - case .eose: break + case .eose: + break outerLoop } } DispatchQueue.main.async { From 0f26d50e089e0cf9623ccba3280f65a15aa2cc11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 29 Aug 2025 15:57:25 -0700 Subject: [PATCH 16/91] Prevent publishing changes to Observable outside the main thread MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Events/Models/EventsModel.swift | 7 ++++++- .../FollowPack/Models/FollowPackModel.swift | 7 +++++-- .../NIP05/Models/NIP05DomainEventsModel.swift | 15 ++++++++++----- damus/Features/Search/Models/SearchModel.swift | 3 ++- damus/Features/Timeline/Models/HomeModel.swift | 3 ++- damus/Shared/Utilities/EventHolder.swift | 6 +++++- 6 files changed, 30 insertions(+), 11 deletions(-) diff --git a/damus/Features/Events/Models/EventsModel.swift b/damus/Features/Events/Models/EventsModel.swift index 381a62700..4c586895c 100644 --- a/damus/Features/Events/Models/EventsModel.swift +++ b/damus/Features/Events/Models/EventsModel.swift @@ -78,7 +78,11 @@ class EventsModel: ObservableObject { event = ev.toOwned() } guard let event else { return } - if events.insert(event) { objectWillChange.send() } + Task { + if await events.insert(event) { + DispatchQueue.main.async { self.objectWillChange.send() } + } + } case .eose: break } @@ -93,6 +97,7 @@ class EventsModel: ObservableObject { loadingTask?.cancel() } + @MainActor private func handle_event(relay_id: RelayURL, ev: NostrEvent) { if events.insert(ev) { objectWillChange.send() diff --git a/damus/Features/FollowPack/Models/FollowPackModel.swift b/damus/Features/FollowPack/Models/FollowPackModel.swift index b542b50cd..5ead6f5b7 100644 --- a/damus/Features/FollowPack/Models/FollowPackModel.swift +++ b/damus/Features/FollowPack/Models/FollowPackModel.swift @@ -25,6 +25,7 @@ class FollowPackModel: ObservableObject { func subscribe(follow_pack_users: [Pubkey]) { loading = true + self.listener?.cancel() self.listener = Task { await self.listenForUpdates(follow_pack_users: follow_pack_users) } @@ -52,8 +53,10 @@ class FollowPackModel: ObservableObject { guard let event else { return } if event.is_textlike && should_show_event(state: damus_state, ev: event) && !event.is_reply() { - if self.events.insert(event) { - self.objectWillChange.send() + if await self.events.insert(event) { + DispatchQueue.main.async { + self.objectWillChange.send() + } } } case .eose: diff --git a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift index 8c0d4cc5c..55a866757 100644 --- a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift +++ b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift @@ -67,18 +67,21 @@ class NIP05DomainEventsModel: ObservableObject { for await item in state.nostrNetwork.reader.subscribe(filters: [filter]) { switch item { case .event(borrow: let borrow): - try? borrow { event in - self.add_event(event.toOwned()) + var event: NostrEvent? = nil + try? borrow { ev in + event = ev.toOwned() guard let txn = NdbTxn(ndb: state.ndb) else { return } load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) } + guard let event else { return } + await self.add_event(event) case .eose: continue } } } - func add_event(_ ev: NostrEvent) { + func add_event(_ ev: NostrEvent) async { if !event_matches_filter(ev, filter: filter) { return } @@ -87,8 +90,10 @@ class NIP05DomainEventsModel: ObservableObject { return } - if self.events.insert(ev) { - objectWillChange.send() + if await self.events.insert(ev) { + DispatchQueue.main.async { + self.objectWillChange.send() + } } } } diff --git a/damus/Features/Search/Models/SearchModel.swift b/damus/Features/Search/Models/SearchModel.swift index 0de7e2939..f2a9229c4 100644 --- a/damus/Features/Search/Models/SearchModel.swift +++ b/damus/Features/Search/Models/SearchModel.swift @@ -49,7 +49,7 @@ class SearchModel: ObservableObject { try? borrow { ev in let event = ev.toOwned() if event.is_textlike && event.should_show_event { - self.add_event(event) + Task { await self.add_event(event) } } } case .eose: @@ -67,6 +67,7 @@ class SearchModel: ObservableObject { listener = nil } + @MainActor func add_event(_ ev: NostrEvent) { if !event_matches_filter(ev, filter: search) { return diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index d9566364e..ab0649e49 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -765,6 +765,7 @@ class HomeModel: ContactsDelegate { } } + @MainActor func insert_home_event(_ ev: NostrEvent) { if events.insert(ev) { handle_last_event(ev: ev, timeline: .home) @@ -798,7 +799,7 @@ class HomeModel: ContactsDelegate { switch context { case .home: - insert_home_event(ev) + Task { await insert_home_event(ev) } case .notifications: handle_notification(ev: ev) case .dms, .contacts, .initialRelayList, .initialContactList, .nwc: diff --git a/damus/Shared/Utilities/EventHolder.swift b/damus/Shared/Utilities/EventHolder.swift index 144339645..868144aee 100644 --- a/damus/Shared/Utilities/EventHolder.swift +++ b/damus/Shared/Utilities/EventHolder.swift @@ -38,6 +38,7 @@ class EventHolder: ObservableObject, ScrollQueue { self.incoming = self.incoming.filter(isIncluded) } + @MainActor func insert(_ ev: NostrEvent) -> Bool { if should_queue { return insert_queued(ev) @@ -46,6 +47,7 @@ class EventHolder: ObservableObject, ScrollQueue { } } + @MainActor private func insert_immediate(_ ev: NostrEvent) -> Bool { if has_event.contains(ev.id) { return false @@ -86,7 +88,9 @@ class EventHolder: ObservableObject, ScrollQueue { } if changed { - self.objectWillChange.send() + DispatchQueue.main.async { + self.objectWillChange.send() + } } self.incoming = [] From de70d191357b5f64f39044a3ecfc57703d1e81b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 1 Sep 2025 12:13:52 -0700 Subject: [PATCH 17/91] Fix NIP-05 timeline crash MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- .../NotificationService.swift | 62 +++++++++---------- damus/Features/Chat/ReplyQuoteView.swift | 11 +++- .../FollowPack/Models/FollowPackModel.swift | 3 +- .../Muting/Models/MutedThreadsManager.swift | 1 + .../Muting/Models/MutelistManager.swift | 3 +- .../NIP05/Models/NIP05DomainEventsModel.swift | 3 +- .../Models/NotificationsManager.swift | 8 +-- .../Search/Models/SearchHomeModel.swift | 1 + .../Features/Search/Models/SearchModel.swift | 1 + .../Timeline/Models/ContentFilters.swift | 3 + .../Features/Timeline/Models/HomeModel.swift | 34 +++++++--- 11 files changed, 81 insertions(+), 49 deletions(-) diff --git a/DamusNotificationService/NotificationService.swift b/DamusNotificationService/NotificationService.swift index 443170845..0eb395b1d 100644 --- a/DamusNotificationService/NotificationService.swift +++ b/DamusNotificationService/NotificationService.swift @@ -67,40 +67,40 @@ class NotificationService: UNNotificationServiceExtension { nip05: profile?.nip05) }() let sender_pubkey = nostr_event.pubkey + + Task { - // Don't show notification details that match mute list. - // TODO: Remove this code block once we get notification suppression entitlement from Apple. It will be covered by the `guard should_display_notification` block - if state.mutelist_manager.is_event_muted(nostr_event) { - // We cannot really suppress muted notifications until we have the notification supression entitlement. - // The best we can do if we ever get those muted notifications (which we generally won't due to server-side processing) is to obscure the details - let content = UNMutableNotificationContent() - content.title = NSLocalizedString("Muted event", comment: "Title for a push notification which has been muted") - content.body = NSLocalizedString("This is an event that has been muted according to your mute list rules. We cannot suppress this notification, but we obscured the details to respect your preferences", comment: "Description for a push notification which has been muted, and explanation that we cannot suppress it") - content.sound = UNNotificationSound.default - contentHandler(content) - return - } - - guard should_display_notification(state: state, event: nostr_event, mode: .push) else { - Log.debug("should_display_notification failed", for: .push_notifications) - // We should not display notification for this event. Suppress notification. - // contentHandler(UNNotificationContent()) - // TODO: We cannot really suppress until we have the notification supression entitlement. Show the raw notification - contentHandler(request.content) - return - } - - guard let notification_object = generate_local_notification_object(ndb: state.ndb, from: nostr_event, state: state) else { - Log.debug("generate_local_notification_object failed", for: .push_notifications) - // We could not process this notification. Probably an unsupported nostr event kind. Suppress. - // contentHandler(UNNotificationContent()) - // TODO: We cannot really suppress until we have the notification supression entitlement. Show the raw notification - contentHandler(request.content) - return - } + // Don't show notification details that match mute list. + // TODO: Remove this code block once we get notification suppression entitlement from Apple. It will be covered by the `guard should_display_notification` block + if await state.mutelist_manager.is_event_muted(nostr_event) { + // We cannot really suppress muted notifications until we have the notification supression entitlement. + // The best we can do if we ever get those muted notifications (which we generally won't due to server-side processing) is to obscure the details + let content = UNMutableNotificationContent() + content.title = NSLocalizedString("Muted event", comment: "Title for a push notification which has been muted") + content.body = NSLocalizedString("This is an event that has been muted according to your mute list rules. We cannot suppress this notification, but we obscured the details to respect your preferences", comment: "Description for a push notification which has been muted, and explanation that we cannot suppress it") + content.sound = UNNotificationSound.default + contentHandler(content) + return + } + guard await should_display_notification(state: state, event: nostr_event, mode: .push) else { + Log.debug("should_display_notification failed", for: .push_notifications) + // We should not display notification for this event. Suppress notification. + // contentHandler(UNNotificationContent()) + // TODO: We cannot really suppress until we have the notification supression entitlement. Show the raw notification + contentHandler(request.content) + return + } - Task { + guard let notification_object = generate_local_notification_object(ndb: state.ndb, from: nostr_event, state: state) else { + Log.debug("generate_local_notification_object failed", for: .push_notifications) + // We could not process this notification. Probably an unsupported nostr event kind. Suppress. + // contentHandler(UNNotificationContent()) + // TODO: We cannot really suppress until we have the notification supression entitlement. Show the raw notification + contentHandler(request.content) + return + } + let sender_dn = DisplayName(name: sender_profile.name, display_name: sender_profile.display_name, pubkey: sender_pubkey) guard let (improvedContent, _) = await NotificationFormatter.shared.format_message(displayName: sender_dn.displayName, notify: notification_object, state: state) else { diff --git a/damus/Features/Chat/ReplyQuoteView.swift b/damus/Features/Chat/ReplyQuoteView.swift index 660318cb3..d69bd8fd1 100644 --- a/damus/Features/Chat/ReplyQuoteView.swift +++ b/damus/Features/Chat/ReplyQuoteView.swift @@ -15,11 +15,17 @@ struct ReplyQuoteView: View { @ObservedObject var thread: ThreadModel let options: EventViewOptions + @State var can_show_event = true + + func update_should_show_event(event: NdbNote) async { + self.can_show_event = await should_show_event(event: event, damus_state: state) + } + func content(event: NdbNote) -> some View { ZStack(alignment: .leading) { VStack(alignment: .leading) { HStack(alignment: .center) { - if should_show_event(event: event, damus_state: state) { + if can_show_event { ProfilePicView(pubkey: event.pubkey, size: 14, highlight: .reply, profiles: state.profiles, disable_animation: false) let blur_images = should_blur_images(settings: state.settings, contacts: state.contacts, ev: event, our_pubkey: state.pubkey) NoteContentView(damus_state: state, event: event, blur_images: blur_images, size: .small, options: options) @@ -56,6 +62,9 @@ struct ReplyQuoteView: View { Group { if let event = state.events.lookup(event_id) { self.content(event: event) + .onAppear { + Task { await self.update_should_show_event(event: event) } + } } } } diff --git a/damus/Features/FollowPack/Models/FollowPackModel.swift b/damus/Features/FollowPack/Models/FollowPackModel.swift index 5ead6f5b7..f938bd87a 100644 --- a/damus/Features/FollowPack/Models/FollowPackModel.swift +++ b/damus/Features/FollowPack/Models/FollowPackModel.swift @@ -51,7 +51,8 @@ class FollowPackModel: ObservableObject { event = ev.toOwned() } guard let event else { return } - if event.is_textlike && should_show_event(state: damus_state, ev: event) && !event.is_reply() + let should_show_event = await should_show_event(state: damus_state, ev: event) + if event.is_textlike && should_show_event && !event.is_reply() { if await self.events.insert(event) { DispatchQueue.main.async { diff --git a/damus/Features/Muting/Models/MutedThreadsManager.swift b/damus/Features/Muting/Models/MutedThreadsManager.swift index e4c93c757..7b463971a 100644 --- a/damus/Features/Muting/Models/MutedThreadsManager.swift +++ b/damus/Features/Muting/Models/MutedThreadsManager.swift @@ -23,6 +23,7 @@ func loadOldMutedThreads(pubkey: Pubkey) -> [NoteId] { // We need to still use it since existing users might have their muted threads stored in UserDefaults // So now all it's doing is moving a users muted threads to the new kind:10000 system // It should not be used for any purpose beyond that +@MainActor func migrate_old_muted_threads_to_new_mutelist(keypair: Keypair, damus_state: DamusState) { // Ensure that keypair is fullkeypair guard let fullKeypair = keypair.to_full() else { return } diff --git a/damus/Features/Muting/Models/MutelistManager.swift b/damus/Features/Muting/Models/MutelistManager.swift index 69864a970..c2a748b95 100644 --- a/damus/Features/Muting/Models/MutelistManager.swift +++ b/damus/Features/Muting/Models/MutelistManager.swift @@ -7,6 +7,7 @@ import Foundation +@MainActor class MutelistManager { let user_keypair: Keypair private(set) var event: NostrEvent? = nil @@ -26,7 +27,7 @@ class MutelistManager { var muted_notes_cache: [NoteId: EventMuteStatus] = [:] - init(user_keypair: Keypair) { + nonisolated init(user_keypair: Keypair) { self.user_keypair = user_keypair } diff --git a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift index 55a866757..2037b9ba9 100644 --- a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift +++ b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift @@ -76,6 +76,7 @@ class NIP05DomainEventsModel: ObservableObject { guard let event else { return } await self.add_event(event) case .eose: + DispatchQueue.main.async { self.loading = false } continue } } @@ -86,7 +87,7 @@ class NIP05DomainEventsModel: ObservableObject { return } - guard should_show_event(state: state, ev: ev) else { + guard await should_show_event(state: state, ev: ev) else { return } diff --git a/damus/Features/Notifications/Models/NotificationsManager.swift b/damus/Features/Notifications/Models/NotificationsManager.swift index 35c848feb..b266189bf 100644 --- a/damus/Features/Notifications/Models/NotificationsManager.swift +++ b/damus/Features/Notifications/Models/NotificationsManager.swift @@ -12,8 +12,8 @@ import UIKit let EVENT_MAX_AGE_FOR_NOTIFICATION: TimeInterval = 12 * 60 * 60 -func process_local_notification(state: HeadlessDamusState, event ev: NostrEvent) { - guard should_display_notification(state: state, event: ev, mode: .local) else { +func process_local_notification(state: HeadlessDamusState, event ev: NostrEvent) async { + guard await should_display_notification(state: state, event: ev, mode: .local) else { // We should not display notification. Exit. return } @@ -25,7 +25,7 @@ func process_local_notification(state: HeadlessDamusState, event ev: NostrEvent) create_local_notification(profiles: state.profiles, notify: local_notification) } -func should_display_notification(state: HeadlessDamusState, event ev: NostrEvent, mode: UserSettingsStore.NotificationsMode) -> Bool { +func should_display_notification(state: HeadlessDamusState, event ev: NostrEvent, mode: UserSettingsStore.NotificationsMode) async -> Bool { // Do not show notification if it's coming from a mode different from the one selected by our user guard state.settings.notification_mode == mode else { return false @@ -46,7 +46,7 @@ func should_display_notification(state: HeadlessDamusState, event ev: NostrEvent } // Don't show notifications that match mute list. - if state.mutelist_manager.is_event_muted(ev) { + if await state.mutelist_manager.is_event_muted(ev) { return false } diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index 0dec9fc21..9530ca811 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -34,6 +34,7 @@ class SearchHomeModel: ObservableObject { return filter } + @MainActor func filter_muted() { events.filter { should_show_event(state: damus_state, ev: $0) } self.objectWillChange.send() diff --git a/damus/Features/Search/Models/SearchModel.swift b/damus/Features/Search/Models/SearchModel.swift index f2a9229c4..36577e6aa 100644 --- a/damus/Features/Search/Models/SearchModel.swift +++ b/damus/Features/Search/Models/SearchModel.swift @@ -26,6 +26,7 @@ class SearchModel: ObservableObject { }) } + @MainActor func filter_muted() { self.events.filter { should_show_event(state: state, ev: $0) diff --git a/damus/Features/Timeline/Models/ContentFilters.swift b/damus/Features/Timeline/Models/ContentFilters.swift index a950b0dbd..238f15d24 100644 --- a/damus/Features/Timeline/Models/ContentFilters.swift +++ b/damus/Features/Timeline/Models/ContentFilters.swift @@ -34,6 +34,7 @@ func nsfw_tag_filter(ev: NostrEvent) -> Bool { return ev.referenced_hashtags.first(where: { t in t.hashtag.caseInsensitiveCompare("nsfw") == .orderedSame }) == nil } +@MainActor func get_repost_of_muted_user_filter(damus_state: DamusState) -> ((_ ev: NostrEvent) -> Bool) { return { ev in guard ev.known_kind == .boost else { return true } @@ -65,10 +66,12 @@ struct ContentFilters { } extension ContentFilters { + @MainActor static func default_filters(damus_state: DamusState) -> ContentFilters { return ContentFilters(filters: ContentFilters.defaults(damus_state: damus_state)) } + @MainActor static func defaults(damus_state: DamusState) -> [(NostrEvent) -> Bool] { var filters = Array<(NostrEvent) -> Bool>() if damus_state.settings.hide_nsfw_tagged_content { diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index ab0649e49..ed18f3c44 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -84,7 +84,9 @@ class HomeModel: ContactsDelegate { init() { self.damus_state = DamusState.empty self.setup_debouncer() - filter_events() + DispatchQueue.main.async { + self.filter_events() + } events.on_queue = preloader //self.events = EventHolder(on_queue: preloader) } @@ -353,6 +355,7 @@ class HomeModel: ContactsDelegate { } } + @MainActor func filter_events() { events.filter { ev in !damus_state.mutelist_manager.is_muted(.user(ev.pubkey, nil)) @@ -422,6 +425,7 @@ class HomeModel: ContactsDelegate { } } + @MainActor func handle_like_event(_ ev: NostrEvent) { guard let e = ev.last_refid() else { // no id ref? invalid like event @@ -682,6 +686,7 @@ class HomeModel: ContactsDelegate { case nwc } + @MainActor func handle_mute_list_event(_ ev: NostrEvent) { // we only care about our mutelist guard ev.pubkey == damus_state.pubkey else { @@ -700,6 +705,7 @@ class HomeModel: ContactsDelegate { migrate_old_muted_threads_to_new_mutelist(keypair: damus_state.keypair, damus_state: damus_state) } + @MainActor func handle_old_list_event(_ ev: NostrEvent) { // we only care about our lists guard ev.pubkey == damus_state.pubkey else { @@ -731,6 +737,7 @@ class HomeModel: ContactsDelegate { return m[kind] } + @MainActor func handle_notification(ev: NostrEvent) { // don't show notifications from ourselves guard ev.pubkey != damus_state.pubkey, @@ -750,7 +757,7 @@ class HomeModel: ContactsDelegate { } if handle_last_event(ev: ev, timeline: .notifications) { - process_local_notification(state: damus_state, event: ev) + Task { await process_local_notification(state: damus_state, event: ev) } } } @@ -773,6 +780,7 @@ class HomeModel: ContactsDelegate { } + @MainActor func handle_text_event(_ ev: NostrEvent, context: SubscriptionContext) { guard should_show_event(state: damus_state, ev: ev) else { return @@ -808,17 +816,21 @@ class HomeModel: ContactsDelegate { } func got_new_dm(notifs: NewEventsBits, ev: NostrEvent) { - notification_status.new_events = notifs - - guard should_display_notification(state: damus_state, event: ev, mode: .local), - let notification_object = generate_local_notification_object(ndb: self.damus_state.ndb, from: ev, state: damus_state) - else { - return + Task { + notification_status.new_events = notifs + + + guard await should_display_notification(state: damus_state, event: ev, mode: .local), + let notification_object = generate_local_notification_object(ndb: self.damus_state.ndb, from: ev, state: damus_state) + else { + return + } + + create_local_notification(profiles: damus_state.profiles, notify: notification_object) } - - create_local_notification(profiles: damus_state.profiles, notify: notification_object) } + @MainActor func handle_dm(_ ev: NostrEvent) { guard should_show_event(state: damus_state, ev: ev) else { return @@ -1150,6 +1162,7 @@ func event_has_our_pubkey(_ ev: NostrEvent, our_pubkey: Pubkey) -> Bool { return ev.referenced_pubkeys.contains(our_pubkey) } +@MainActor func should_show_event(event: NostrEvent, damus_state: DamusState) -> Bool { return should_show_event( state: damus_state, @@ -1157,6 +1170,7 @@ func should_show_event(event: NostrEvent, damus_state: DamusState) -> Bool { ) } +@MainActor func should_show_event(state: DamusState, ev: NostrEvent) -> Bool { let event_muted = state.mutelist_manager.is_event_muted(ev) if event_muted { From ab22206093ed3c283d56778f4b011c8fa2b757e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 1 Sep 2025 15:04:29 -0700 Subject: [PATCH 18/91] Fix broken Follow Pack timeline MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Search/Models/SearchHomeModel.swift | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index 9530ca811..acf1c0856 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -47,7 +47,11 @@ class SearchHomeModel: ObservableObject { let to_relays = damus_state.nostrNetwork.ourRelayDescriptors .map { $0.url } .filter { !damus_state.relay_filters.is_filtered(timeline: .search, relay_id: $0) } - outerLoop: for await item in damus_state.nostrNetwork.reader.subscribe(filters: [get_base_filter()], to: to_relays) { + + var follow_list_filter = NostrFilter(kinds: [.follow_list]) + follow_list_filter.until = UInt32(Date.now.timeIntervalSince1970) + + outerLoop: for await item in damus_state.nostrNetwork.reader.subscribe(filters: [get_base_filter(), follow_list_filter], to: to_relays) { switch item { case .event(let borrow): var event: NostrEvent? = nil From c43a37d2d343786768e635afc5121a65e73d9b14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 1 Sep 2025 15:13:05 -0700 Subject: [PATCH 19/91] Fix forever-loading quote repost view MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Events/Models/EventsModel.swift | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/damus/Features/Events/Models/EventsModel.swift b/damus/Features/Events/Models/EventsModel.swift index 4c586895c..f9f47739c 100644 --- a/damus/Features/Events/Models/EventsModel.swift +++ b/damus/Features/Events/Models/EventsModel.swift @@ -70,7 +70,8 @@ class EventsModel: ObservableObject { func subscribe() { loadingTask?.cancel() loadingTask = Task { - for await item in state.nostrNetwork.reader.subscribe(filters: [get_filter()]) { + DispatchQueue.main.async { self.loading = true } + outerLoop: for await item in state.nostrNetwork.reader.subscribe(filters: [get_filter()]) { switch item { case .event(let borrow): var event: NostrEvent? = nil @@ -84,10 +85,11 @@ class EventsModel: ObservableObject { } } case .eose: - break + DispatchQueue.main.async { self.loading = false } + break outerLoop } } - self.loading = false + DispatchQueue.main.async { self.loading = false } guard let txn = NdbTxn(ndb: self.state.ndb) else { return } load_profiles(context: "events_model", load: .from_events(events.all_events), damus_state: state, txn: txn) } From 4478672c109d2a1e7227996067723447c20ee835 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 3 Sep 2025 11:37:43 -0700 Subject: [PATCH 20/91] Fix occasional stale timeline issue MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changelog-Changed: Added UX hint to make it easier to load new notes Signed-off-by: Daniel D’Aquino --- .../FollowPack/Views/FollowPackTimeline.swift | 2 +- .../Timeline/Views/InnerTimelineView.swift | 20 +++++++++++++++++++ .../Timeline/Views/TimelineView.swift | 11 ++++------ damus/Shared/Utilities/EventHolder.swift | 4 ++-- 4 files changed, 27 insertions(+), 10 deletions(-) diff --git a/damus/Features/FollowPack/Views/FollowPackTimeline.swift b/damus/Features/FollowPack/Views/FollowPackTimeline.swift index 294c2321d..ce4dfe82d 100644 --- a/damus/Features/FollowPack/Views/FollowPackTimeline.swift +++ b/damus/Features/FollowPack/Views/FollowPackTimeline.swift @@ -66,7 +66,7 @@ struct FollowPackTimelineView: View { .coordinateSpace(name: "scroll") .onReceive(handle_notify(.scroll_to_top)) { () in events.flush() - self.events.should_queue = false + self.events.set_should_queue(false) scroll_to_event(scroller: scroller, id: "startblock", delay: 0.0, animate: true, anchor: .top) } } diff --git a/damus/Features/Timeline/Views/InnerTimelineView.swift b/damus/Features/Timeline/Views/InnerTimelineView.swift index 0e572f8fd..49e832968 100644 --- a/damus/Features/Timeline/Views/InnerTimelineView.swift +++ b/damus/Features/Timeline/Views/InnerTimelineView.swift @@ -29,6 +29,26 @@ struct InnerTimelineView: View { var body: some View { LazyVStack(spacing: 0) { + let incomingEvents = events.incoming.filter({ filter($0) }) + if incomingEvents.count > 0 { + Button( + action: { + notify(.scroll_to_top) + }, + label: { + HStack(spacing: 6) { + CondensedProfilePicturesView(state: state, pubkeys: incomingEvents.map({ $0.pubkey }), maxPictures: 3) + Text("Load new content", comment: "Button to load new notes in the timeline") + .bold() + } + .padding(.horizontal, 20) + .padding(.vertical, 10) + } + ) + .buttonStyle(NeutralButtonStyle(cornerRadius: 50)) + .padding(.vertical, 10) + } + let events = self.events.events if events.isEmpty { EmptyTimelineView() diff --git a/damus/Features/Timeline/Views/TimelineView.swift b/damus/Features/Timeline/Views/TimelineView.swift index 8edff6549..5ad84008d 100644 --- a/damus/Features/Timeline/Views/TimelineView.swift +++ b/damus/Features/Timeline/Views/TimelineView.swift @@ -97,7 +97,7 @@ struct TimelineView: View { .coordinateSpace(name: "scroll") .onReceive(handle_notify(.scroll_to_top)) { () in events.flush() - self.events.should_queue = false + self.events.set_should_queue(false) scroll_to_event(scroller: scroller, id: "startblock", delay: 0.0, animate: true, anchor: .top) } } @@ -122,11 +122,8 @@ protocol ScrollQueue { func handle_scroll_queue(_ proxy: GeometryProxy, queue: ScrollQueue) { let offset = -proxy.frame(in: .named("scroll")).origin.y - guard offset >= 0 else { - return - } - let val = offset > 0 - if queue.should_queue != val { - queue.set_should_queue(val) + let new_should_queue = offset > 0 + if queue.should_queue != new_should_queue { + queue.set_should_queue(new_should_queue) } } diff --git a/damus/Shared/Utilities/EventHolder.swift b/damus/Shared/Utilities/EventHolder.swift index 868144aee..cd615f33b 100644 --- a/damus/Shared/Utilities/EventHolder.swift +++ b/damus/Shared/Utilities/EventHolder.swift @@ -11,8 +11,8 @@ import Foundation class EventHolder: ObservableObject, ScrollQueue { private var has_event = Set() @Published var events: [NostrEvent] - var incoming: [NostrEvent] - var should_queue = false + @Published var incoming: [NostrEvent] + private(set) var should_queue = false var on_queue: ((NostrEvent) -> Void)? func set_should_queue(_ val: Bool) { From d766029f2be8d76a8c564af15d818eef53ef6214 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 3 Sep 2025 15:08:15 -0700 Subject: [PATCH 21/91] Improve loading UX in the home timeline MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changelog-Changed: Improved loading UX in the home timeline Signed-off-by: Daniel D’Aquino --- damus/Features/Timeline/Models/HomeModel.swift | 10 ++++++++-- .../Timeline/Views/PostingTimelineView.swift | 12 ++++++++++-- damus/Features/Timeline/Views/TimelineView.swift | 1 + 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index ed18f3c44..4a5fa062b 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -41,7 +41,7 @@ enum HomeResubFilter { } } -class HomeModel: ContactsDelegate { +class HomeModel: ContactsDelegate, ObservableObject { // The maximum amount of contacts placed on a home feed subscription filter. // If the user has more contacts, chunking or other techniques will be used to avoid sending huge filters let MAX_CONTACTS_ON_FILTER = 500 @@ -71,7 +71,7 @@ class HomeModel: ContactsDelegate { var dmsHandlerTask: Task? var nwcHandlerTask: Task? - var loading: Bool = false + @Published var loading: Bool = true var signal = SignalModel() @@ -658,6 +658,9 @@ class HomeModel: ContactsDelegate { self.homeHandlerTask?.cancel() self.homeHandlerTask = Task { + DispatchQueue.main.async { + self.loading = true + } for await item in damus_state.nostrNetwork.reader.subscribe(filters: home_filters) { switch item { case .event(let borrow): @@ -669,6 +672,9 @@ class HomeModel: ContactsDelegate { await self.process_event(ev: event, context: .home) case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + DispatchQueue.main.async { + self.loading = false + } load_profiles(context: "home", load: .from_events(events.events), damus_state: damus_state, txn: txn) } } diff --git a/damus/Features/Timeline/Views/PostingTimelineView.swift b/damus/Features/Timeline/Views/PostingTimelineView.swift index 8a37b5be3..06848ba74 100644 --- a/damus/Features/Timeline/Views/PostingTimelineView.swift +++ b/damus/Features/Timeline/Views/PostingTimelineView.swift @@ -10,7 +10,7 @@ import SwiftUI struct PostingTimelineView: View { let damus_state: DamusState - var home: HomeModel + @ObservedObject var home: HomeModel @State var search: String = "" @State var results: [NostrEvent] = [] @State var initialOffset: CGFloat? @@ -25,6 +25,14 @@ struct PostingTimelineView: View { @State var headerHeight: CGFloat = 0 @Binding var headerOffset: CGFloat @SceneStorage("PostingTimelineView.filter_state") var filter_state : FilterState = .posts_and_replies + + var loading: Binding { + Binding(get: { + return home.loading + }, set: { + home.loading = $0 + }) + } func content_filter(_ fstate: FilterState) -> ((NostrEvent) -> Bool) { var filters = ContentFilters.defaults(damus_state: damus_state) @@ -33,7 +41,7 @@ struct PostingTimelineView: View { } func contentTimelineView(filter: (@escaping (NostrEvent) -> Bool)) -> some View { - TimelineView(events: home.events, loading: .constant(false), headerHeight: $headerHeight, headerOffset: $headerOffset, damus: damus_state, show_friend_icon: false, filter: filter) + TimelineView(events: home.events, loading: self.loading, headerHeight: $headerHeight, headerOffset: $headerOffset, damus: damus_state, show_friend_icon: false, filter: filter) } func HeaderView()->some View { diff --git a/damus/Features/Timeline/Views/TimelineView.swift b/damus/Features/Timeline/Views/TimelineView.swift index 5ad84008d..96c06179a 100644 --- a/damus/Features/Timeline/Views/TimelineView.swift +++ b/damus/Features/Timeline/Views/TimelineView.swift @@ -95,6 +95,7 @@ struct TimelineView: View { } } .coordinateSpace(name: "scroll") + .disabled(self.loading) .onReceive(handle_notify(.scroll_to_top)) { () in events.flush() self.events.set_should_queue(false) From 9fb7ed741e6cd5915ed0d4fe571d1effd7528a86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 3 Sep 2025 15:32:09 -0700 Subject: [PATCH 22/91] Fix race condition on app swap that would cause ndb to remain closed MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 53 ++++++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index e87251fff..e7e1e5026 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -135,6 +135,7 @@ struct ContentView: View { @StateObject var navigationCoordinator: NavigationCoordinator = NavigationCoordinator() @AppStorage("has_seen_suggested_users") private var hasSeenOnboardingSuggestions = false let sub_id = UUID().description + @State var damusClosingTask: Task? = nil // connect retry timer let timer = Timer.publish(every: 1, on: .main, in: .common).autoconnect() @@ -478,31 +479,32 @@ struct ContentView: View { } .onReceive(NotificationCenter.default.publisher(for: UIApplication.willEnterForegroundNotification)) { obj in print("txn: 📙 DAMUS ACTIVE NOTIFY") - if damus_state.ndb.reopen() { - print("txn: NOSTRDB REOPENED") - } else { - print("txn: NOSTRDB FAILED TO REOPEN closed:\(damus_state.ndb.is_closed)") - } - if damus_state.purple.checkout_ids_in_progress.count > 0 { - // For extra assurance, run this after one second, to avoid race conditions if the app is also handling a damus purple welcome url. - DispatchQueue.main.asyncAfter(deadline: .now() + 1) { - Task { - let freshly_completed_checkout_ids = try? await damus_state.purple.check_status_of_checkouts_in_progress() - let there_is_a_completed_checkout: Bool = (freshly_completed_checkout_ids?.count ?? 0) > 0 - let account_info = try await damus_state.purple.fetch_account(pubkey: self.keypair.pubkey) - if there_is_a_completed_checkout == true && account_info?.active == true { - if damus_state.purple.onboarding_status.user_has_never_seen_the_onboarding_before() { - // Show welcome sheet - self.active_sheet = .purple_onboarding - } - else { - self.active_sheet = .purple(DamusPurpleURL.init(is_staging: damus_state.purple.environment == .staging, variant: .landing)) + Task { + await damusClosingTask?.value // Wait for the closing task to finish before reopening things, to avoid race conditions + if damus_state.ndb.reopen() { + print("txn: NOSTRDB REOPENED") + } else { + print("txn: NOSTRDB FAILED TO REOPEN closed:\(damus_state.ndb.is_closed)") + } + if damus_state.purple.checkout_ids_in_progress.count > 0 { + // For extra assurance, run this after one second, to avoid race conditions if the app is also handling a damus purple welcome url. + DispatchQueue.main.asyncAfter(deadline: .now() + 1) { + Task { + let freshly_completed_checkout_ids = try? await damus_state.purple.check_status_of_checkouts_in_progress() + let there_is_a_completed_checkout: Bool = (freshly_completed_checkout_ids?.count ?? 0) > 0 + let account_info = try await damus_state.purple.fetch_account(pubkey: self.keypair.pubkey) + if there_is_a_completed_checkout == true && account_info?.active == true { + if damus_state.purple.onboarding_status.user_has_never_seen_the_onboarding_before() { + // Show welcome sheet + self.active_sheet = .purple_onboarding + } + else { + self.active_sheet = .purple(DamusPurpleURL.init(is_staging: damus_state.purple.environment == .staging, variant: .landing)) + } } } } } - } - Task { await damus_state.purple.check_and_send_app_notifications_if_needed(handler: home.handle_damus_app_notification) } } @@ -511,7 +513,7 @@ struct ContentView: View { switch phase { case .background: print("txn: 📙 DAMUS BACKGROUNDED") - Task { @MainActor in + damusClosingTask = Task { @MainActor in await damus_state.nostrNetwork.close() // Close ndb streaming tasks before closing ndb to avoid memory errors damus_state.ndb.close() } @@ -521,8 +523,11 @@ struct ContentView: View { break case .active: print("txn: 📙 DAMUS ACTIVE") - damus_state.nostrNetwork.connect() - damus_state.nostrNetwork.ping() + Task { + await damusClosingTask?.value // Wait for the closing task to finish before reopening things, to avoid race conditions + damus_state.nostrNetwork.connect() + damus_state.nostrNetwork.ping() + } @unknown default: break } From 2550d613b2fe7e6c7e527dbbf563094c6093483f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 3 Sep 2025 15:54:00 -0700 Subject: [PATCH 23/91] Fix test compilation issues MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damusTests/LargeEventTests.swift | 5 +++-- damusTests/MutingTests.swift | 12 +++++++----- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/damusTests/LargeEventTests.swift b/damusTests/LargeEventTests.swift index 382c2ad82..16ab63e1f 100644 --- a/damusTests/LargeEventTests.swift +++ b/damusTests/LargeEventTests.swift @@ -10,7 +10,7 @@ import XCTest final class LargeEventTests: XCTestCase { - func testLongPost() throws { + func testLongPost() async throws { let json = "[\"EVENT\",\"subid\",\(test_failing_nostr_report)]" let resp = NostrResponse.owned_from_json(json: json) @@ -25,7 +25,8 @@ final class LargeEventTests: XCTestCase { XCTAssertEqual(subid, "subid") XCTAssertTrue(ev.should_show_event) XCTAssertTrue(!ev.too_big) - XCTAssertTrue(should_show_event(state: test_damus_state, ev: ev)) + let shouldShowEvent = await should_show_event(state: test_damus_state, ev: ev) + XCTAssertTrue(shouldShowEvent) XCTAssertTrue(validate_event(ev: ev) == .ok) } diff --git a/damusTests/MutingTests.swift b/damusTests/MutingTests.swift index 804d5986c..530c0d489 100644 --- a/damusTests/MutingTests.swift +++ b/damusTests/MutingTests.swift @@ -10,7 +10,7 @@ import XCTest @testable import damus final class MutingTests: XCTestCase { - func testWordMuting() { + func testWordMuting() async { // Setup some test data let test_note = NostrEvent( content: "Nostr is the super app. Because it’s actually an ecosystem of apps, all of which make each other better. People haven’t grasped that yet. They will when it’s more accessible and onboarding is more straightforward and intuitive.", @@ -25,7 +25,7 @@ final class MutingTests: XCTestCase { )! let mute_item: MuteItem = .word("airdrop", nil) - let existing_mutelist = test_damus_state.mutelist_manager.event + let existing_mutelist = await test_damus_state.mutelist_manager.event guard let full_keypair = test_damus_state.keypair.to_full(), @@ -34,10 +34,12 @@ final class MutingTests: XCTestCase { return } - test_damus_state.mutelist_manager.set_mutelist(mutelist) + await test_damus_state.mutelist_manager.set_mutelist(mutelist) test_damus_state.nostrNetwork.postbox.send(mutelist) - XCTAssert(test_damus_state.mutelist_manager.is_event_muted(spammy_test_note)) - XCTAssertFalse(test_damus_state.mutelist_manager.is_event_muted(test_note)) + let spammy_note_muted = await test_damus_state.mutelist_manager.is_event_muted(spammy_test_note) + XCTAssert(spammy_note_muted) + let test_note_muted = await test_damus_state.mutelist_manager.is_event_muted(test_note) + XCTAssertFalse(test_note_muted) } } From 7eb759a8a0310b487f7d887a0498dc949d1edc00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 5 Sep 2025 13:10:02 -0700 Subject: [PATCH 24/91] Fix issue with wallet loading MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changelog-Changed: Increased transaction list limit to 50 transactions Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManager.swift | 12 +- .../SubscriptionManager.swift | 30 ++++- damus/Core/Nostr/RelayPool.swift | 5 +- damus/Core/Storage/DamusState.swift | 6 +- .../Models/WalletConnect/WalletConnect+.swift | 74 ----------- .../Features/Wallet/Models/WalletModel.swift | 117 ++++++++++++++++++ .../Wallet/Views/SendPaymentView.swift | 4 +- damus/Features/Wallet/Views/WalletView.swift | 22 +++- damus/Shared/ErrorHandling/ErrorView.swift | 5 + 9 files changed, 180 insertions(+), 95 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index daa601834..50f51f518 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -85,8 +85,8 @@ class NostrNetworkManager { self.pool.send_raw_to_local_ndb(.typical(.event(event))) } - func send(event: NostrEvent) { - self.pool.send(.event(event)) + func send(event: NostrEvent, to targetRelays: [RelayURL]? = nil, skipEphemeralRelays: Bool = true) { + self.pool.send(.event(event), to: targetRelays, skip_ephemeral: skipEphemeralRelays) } func query(filters: [NostrFilter], to: [RelayURL]? = nil) async -> [NostrEvent] { @@ -208,14 +208,6 @@ class NostrNetworkManager { WalletConnect.pay(url: url, pool: self.pool, post: post, invoice: invoice, zap_request: nil) } - func requestTransactionList(url: WalletConnectURL, delay: TimeInterval? = 0.0, on_flush: OnFlush? = nil) { - WalletConnect.request_transaction_list(url: url, pool: self.pool, post: self.postbox, delay: delay, on_flush: on_flush) - } - - func requestBalanceInformation(url: WalletConnectURL, delay: TimeInterval? = 0.0, on_flush: OnFlush? = nil) { - WalletConnect.request_balance_information(url: url, pool: self.pool, post: self.postbox, delay: delay, on_flush: on_flush) - } - /// Send a donation zap to the Damus team func send_donation_zap(nwc: WalletConnectURL, percent: Int, base_msats: Int64) async { let percent_f = Double(percent) / 100.0 diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 0ee285e31..476a05fe7 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -25,6 +25,28 @@ extension NostrNetworkManager { // MARK: - Reading data from Nostr + /// Subscribes to data from user's relays, for a maximum period of time — after which the stream will end. + /// + /// This is useful when waiting for some specific data from Nostr, but not indefinitely. + func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration) -> AsyncStream { + return AsyncStream { continuation in + let streamingTask = Task { + for await item in self.subscribe(filters: filters, to: desiredRelays) { + try Task.checkCancellation() + continuation.yield(item) + } + } + let timeoutTask = Task { + try await Task.sleep(for: timeout) + continuation.finish() // End the stream due to timeout. + } + continuation.onTermination = { @Sendable _ in + timeoutTask.cancel() + streamingTask.cancel() + } + } + } + /// Subscribes to data from the user's relays /// /// ## Implementation notes @@ -112,10 +134,16 @@ extension NostrNetworkManager { } let streamTask = Task { do { - for await _ in self.pool.subscribe(filters: filters, to: desiredRelays) { + for await item in self.pool.subscribe(filters: filters, to: desiredRelays) { // NO-OP. Notes will be automatically ingested by NostrDB // TODO: Improve efficiency of subscriptions? try Task.checkCancellation() + switch item { + case .event(let event): + Log.debug("Session subscribe: Received kind %d event with id %s from the network", for: .subscription_manager, event.kind, event.id.hex()) + case .eose: + Log.debug("Session subscribe: Received EOSE from the network", for: .subscription_manager) + } } } catch { diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 6f3f7443a..a85fe6b9c 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -207,7 +207,10 @@ class RelayPool { func subscribe(sub_id: String, filters: [NostrFilter], handler: @escaping (RelayURL, NostrConnectionEvent) -> (), to: [RelayURL]? = nil) { Task { await register_handler(sub_id: sub_id, handler: handler) - send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) + // When the caller specifies no relays, it is implied that the user wants to use the ones in the user relay list. Skip ephemeral relays in that case. + // When the caller specifies specific relays, do not skip ephemeral relays to respect the exact list given by the caller. + let shouldSkipEphemeralRelays = to == nil ? true : false + send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to, skip_ephemeral: shouldSkipEphemeralRelays) } } diff --git a/damus/Core/Storage/DamusState.swift b/damus/Core/Storage/DamusState.swift index a1821e6c2..f663064af 100644 --- a/damus/Core/Storage/DamusState.swift +++ b/damus/Core/Storage/DamusState.swift @@ -72,7 +72,9 @@ class DamusState: HeadlessDamusState { self.favicon_cache = FaviconCache() let networkManagerDelegate = NostrNetworkManagerDelegate(settings: settings, contacts: contacts, ndb: ndb, keypair: keypair, relayModelCache: relay_model_cache, relayFilters: relay_filters) - self.nostrNetwork = NostrNetworkManager(delegate: networkManagerDelegate) + let nostrNetwork = NostrNetworkManager(delegate: networkManagerDelegate) + self.nostrNetwork = nostrNetwork + self.wallet.nostrNetwork = nostrNetwork } @MainActor @@ -122,7 +124,7 @@ class DamusState: HeadlessDamusState { events: EventCache(ndb: ndb), bookmarks: BookmarksManager(pubkey: pubkey), replies: ReplyCounter(our_pubkey: pubkey), - wallet: WalletModel(settings: settings), + wallet: WalletModel(settings: settings), // nostrNetwork is connected after initialization nav: navigationCoordinator, music: MusicController(onChange: { _ in }), video: DamusVideoCoordinator(), diff --git a/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift b/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift index 1ed038bfc..b7f3a2c1e 100644 --- a/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift +++ b/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift @@ -54,80 +54,6 @@ extension WalletConnect { return ev } - /// Sends out a wallet balance request to the NWC relay, and ensures that: - /// 1. the NWC relay is connected and we are listening to NWC events - /// 2. the NWC relay is connected and we are listening to NWC - /// - /// Note: This does not return the actual balance information. The actual balance is handled elsewhere around `HomeModel` and `WalletModel` - /// - /// - Parameters: - /// - url: The NWC wallet connection URL - /// - pool: The relay pool to connect to - /// - post: The postbox to send events in - /// - delay: The delay before actually sending the request to the network - /// - on_flush: A callback to call after the event has been flushed to the network - /// - Returns: The Nostr Event that was sent to the network, representing the request that was made - @discardableResult - static func request_balance_information(url: WalletConnectURL, pool: RelayPool, post: PostBox, delay: TimeInterval? = 0.0, on_flush: OnFlush? = nil) -> NostrEvent? { - let req = WalletConnect.Request.getBalance - guard let ev = req.to_nostr_event(to_pk: url.pubkey, keypair: url.keypair) else { - return nil - } - - try? pool.add_relay(.nwc(url: url.relay)) // Ensure the NWC relay is connected - WalletConnect.subscribe(url: url, pool: pool) // Ensure we are listening to NWC updates from the relay - post.send(ev, to: [url.relay], skip_ephemeral: false, delay: delay, on_flush: on_flush) - return ev - } - - /// Sends out a wallet transaction list request to the NWC relay, and ensures that: - /// 1. the NWC relay is connected and we are listening to NWC events - /// 2. the NWC relay is connected and we are listening to NWC - /// - /// Note: This does not return the actual transaction list. The actual transaction list is handled elsewhere around `HomeModel` and `WalletModel` - /// - /// - Parameters: - /// - url: The NWC wallet connection URL - /// - pool: The relay pool to connect to - /// - post: The postbox to send events in - /// - delay: The delay before actually sending the request to the network - /// - on_flush: A callback to call after the event has been flushed to the network - /// - Returns: The Nostr Event that was sent to the network, representing the request that was made - @discardableResult - static func request_transaction_list(url: WalletConnectURL, pool: RelayPool, post: PostBox, delay: TimeInterval? = 0.0, on_flush: OnFlush? = nil) -> NostrEvent? { - let req = WalletConnect.Request.getTransactionList(from: nil, until: nil, limit: 10, offset: 0, unpaid: false, type: "") - guard let ev = req.to_nostr_event(to_pk: url.pubkey, keypair: url.keypair) else { - return nil - } - - try? pool.add_relay(.nwc(url: url.relay)) // Ensure the NWC relay is connected - WalletConnect.subscribe(url: url, pool: pool) // Ensure we are listening to NWC updates from the relay - post.send(ev, to: [url.relay], skip_ephemeral: false, delay: delay, on_flush: on_flush) - return ev - } - - @MainActor - static func refresh_wallet_information(damus_state: DamusState) async { - damus_state.wallet.resetWalletStateInformation() - await Self.update_wallet_information(damus_state: damus_state) - } - - @MainActor - static func update_wallet_information(damus_state: DamusState) async { - guard let url = damus_state.settings.nostr_wallet_connect, - let nwc = WalletConnectURL(str: url) else { - return - } - - let flusher: OnFlush? = nil - - let delay = 0.0 // We don't need a delay when fetching a transaction list or balance - - damus_state.nostrNetwork.requestTransactionList(url: nwc, delay: delay, on_flush: flusher) - damus_state.nostrNetwork.requestBalanceInformation(url: nwc, delay: delay, on_flush: flusher) - return - } - static func handle_zap_success(state: DamusState, resp: WalletConnect.FullWalletResponse) { // find the pending zap and mark it as pending-confirmed for kv in state.zaps.our_zaps { diff --git a/damus/Features/Wallet/Models/WalletModel.swift b/damus/Features/Wallet/Models/WalletModel.swift index ccf71dcb0..34d916ac4 100644 --- a/damus/Features/Wallet/Models/WalletModel.swift +++ b/damus/Features/Wallet/Models/WalletModel.swift @@ -11,11 +11,24 @@ enum WalletConnectState { case new(WalletConnectURL) case existing(WalletConnectURL) case none + + /// Gets the currently connected NWC URL + func currentNwcUrl() -> WalletConnectURL? { + switch self { + case .new: + return nil // User has not confirmed they want to use this yet, so we cannot call it "current" + case .existing(let nwcUrl): + return nwcUrl + case .none: + return nil + } + } } /// Models and manages the user's NWC wallet based on the app's settings class WalletModel: ObservableObject { var settings: UserSettingsStore + var nostrNetwork: NostrNetworkManager? = nil private(set) var previous_state: WalletConnectState var initial_percent: Int /// The wallet's balance, in sats. @@ -37,6 +50,7 @@ class WalletModel: ObservableObject { self.previous_state = .none self.settings = settings self.initial_percent = settings.donation_percent + self.nostrNetwork = nil } init(settings: UserSettingsStore) { @@ -50,6 +64,7 @@ class WalletModel: ObservableObject { self.connect_state = .none } self.initial_percent = settings.donation_percent + self.nostrNetwork = nil } func cancel() { @@ -96,12 +111,114 @@ class WalletModel: ObservableObject { } } + + // MARK: - Wallet internal state lifecycle functions + + @MainActor func resetWalletStateInformation() { self.transactions = nil self.balance = nil } + func refreshWalletInformation() async throws { + await self.resetWalletStateInformation() + try await loadWalletInformation() + } + + func loadWalletInformation() async throws { + try await loadBalance() + try await loadTransactionList() + } + + func loadBalance() async throws { + let balance = try await fetchBalance() + DispatchQueue.main.async { + self.balance = balance + } + } + + func loadTransactionList() async throws { + let transactions = try await fetchTransactions(from: nil, until: nil, limit: 50, offset: 0, unpaid: false, type: "") + DispatchQueue.main.async { + self.transactions = transactions + } + } + + // MARK: - Easy wallet info fetching interface + + func fetchTransactions(from: UInt64?, until: UInt64?, limit: Int?, offset: Int?, unpaid: Bool?, type: String?) async throws -> [WalletConnect.Transaction] { + let response = try await self.request(.getTransactionList(from: from, until: until, limit: limit, offset: offset, unpaid: unpaid, type: type)) + guard case .list_transactions(let transactionResponse) = response else { throw FetchError.responseMismatch } + return transactionResponse.transactions + } + + + /// Fetches the balance amount from the network and returns the amount in sats + func fetchBalance() async throws -> Int64 { + let response = try await self.request(.getBalance) + guard case .get_balance(let balanceResponse) = response else { throw FetchError.responseMismatch } + return balanceResponse.balance / 1000 + } + + enum FetchError: Error { + case responseMismatch + } + + // MARK: - Easy request/response interface + + func request(_ request: WalletConnect.Request, timeout: Duration = .seconds(10)) async throws(WalletRequestError) -> WalletConnect.Response.Result { + guard let nostrNetwork else { throw .notConnectedToTheNostrNetwork } + guard let currentNwcUrl = self.connect_state.currentNwcUrl() else { throw .noConnectedWallet } + guard let requestEvent = request.to_nostr_event(to_pk: currentNwcUrl.pubkey, keypair: currentNwcUrl.keypair) else { throw .errorFormattingRequest } + + let responseFilters = [ + NostrFilter( + kinds: [.nwc_response], + referenced_ids: [requestEvent.id], + pubkeys: [currentNwcUrl.keypair.pubkey], + authors: [currentNwcUrl.pubkey] + ) + ] + + nostrNetwork.send(event: requestEvent, to: [currentNwcUrl.relay], skipEphemeralRelays: false) + for await item in nostrNetwork.reader.subscribe(filters: responseFilters, to: [currentNwcUrl.relay], timeout: timeout) { + switch item { + case .event(borrow: let borrow): + var responseEvent: NostrEvent? = nil + try? borrow { ev in responseEvent = ev.toOwned() } + guard let responseEvent else { throw .internalError } + + let fullWalletResponse: WalletConnect.FullWalletResponse + do { fullWalletResponse = try WalletConnect.FullWalletResponse(from: responseEvent, nwc: currentNwcUrl) } + catch { throw WalletRequestError.walletResponseDecodingError(error) } + + guard fullWalletResponse.req_id == requestEvent.id else { continue } // Our filters may match other responses + if let responseError = fullWalletResponse.response.error { throw .walletResponseError(responseError) } + + guard let result = fullWalletResponse.response.result else { throw .walletEmptyResponse } + return result + case .eose: + continue + } + } + do { try Task.checkCancellation() } catch { throw .cancelled } + throw .responseTimeout + } + + enum WalletRequestError: Error { + case notConnectedToTheNostrNetwork + case noConnectedWallet + case errorFormattingRequest + case internalError + case walletResponseDecodingError(WalletConnect.FullWalletResponse.InitializationError) + case walletResponseMismatch + case walletResponseError(WalletConnect.WalletResponseErr) + case walletEmptyResponse + case responseTimeout + case cancelled + } + // MARK: - Async wallet response waiting mechanism func waitForResponse(for requestId: NoteId, timeout: Duration = .seconds(10)) async throws -> WalletConnect.Response.Result { diff --git a/damus/Features/Wallet/Views/SendPaymentView.swift b/damus/Features/Wallet/Views/SendPaymentView.swift index 2f0e94eda..19f3d93ae 100644 --- a/damus/Features/Wallet/Views/SendPaymentView.swift +++ b/damus/Features/Wallet/Views/SendPaymentView.swift @@ -45,11 +45,11 @@ struct SendPaymentView: View { break case .completed: // Refresh wallet to reflect new balance after payment - Task { await WalletConnect.refresh_wallet_information(damus_state: damus_state) } + Task { try await model.refreshWalletInformation() } case .failed: // Even when a wallet says it has failed, update balance just in case it is a false negative, // This might prevent the user from accidentally sending a payment twice in case of a bug. - Task { await WalletConnect.refresh_wallet_information(damus_state: damus_state) } + Task { try await model.refreshWalletInformation() } } } } diff --git a/damus/Features/Wallet/Views/WalletView.swift b/damus/Features/Wallet/Views/WalletView.swift index 6aa40a680..6064b66ee 100644 --- a/damus/Features/Wallet/Views/WalletView.swift +++ b/damus/Features/Wallet/Views/WalletView.swift @@ -16,6 +16,7 @@ struct WalletView: View { @ObservedObject var model: WalletModel @ObservedObject var settings: UserSettingsStore @State private var showBalance: Bool = false + @State private var walletRefreshTask: Task? = nil init(damus_state: DamusState, model: WalletModel? = nil) { self.damus_state = damus_state @@ -104,11 +105,10 @@ struct WalletView: View { } } .onAppear() { - Task { await self.updateWalletInformation() } + self.refreshWalletInformation() } .refreshable { - model.resetWalletStateInformation() - await self.updateWalletInformation() + self.refreshWalletInformation() } .sheet(isPresented: $show_settings, onDismiss: { self.show_settings = false }) { ScrollView { @@ -127,8 +127,20 @@ struct WalletView: View { } @MainActor - func updateWalletInformation() async { - await WalletConnect.update_wallet_information(damus_state: damus_state) + func refreshWalletInformation() { + walletRefreshTask?.cancel() + walletRefreshTask = Task { + do { + try await self.model.refreshWalletInformation() + } + catch { + guard let error = error as? ErrorView.UserPresentableErrorProtocol else { + Log.error("Error while refreshing wallet: %s", for: .nwc, error.localizedDescription) + return + } + present_sheet(.error(error.userPresentableError)) + } + } } } diff --git a/damus/Shared/ErrorHandling/ErrorView.swift b/damus/Shared/ErrorHandling/ErrorView.swift index 0b46e18b4..a93e4c60d 100644 --- a/damus/Shared/ErrorHandling/ErrorView.swift +++ b/damus/Shared/ErrorHandling/ErrorView.swift @@ -140,6 +140,11 @@ struct ErrorView: View { let technical_info: String? } + + /// An error that can be displayed to the user, and can be sent to the Developers as well. + protocol UserPresentableErrorProtocol: Error { + var userPresentableError: UserPresentableError { get } + } } From 9bcee298d4be72813e1bd3faacd910ccd5ceaff5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 5 Sep 2025 16:39:34 -0700 Subject: [PATCH 25/91] Fix forever-loading hashtag view MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- .../Features/Search/Models/SearchModel.swift | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/damus/Features/Search/Models/SearchModel.swift b/damus/Features/Search/Models/SearchModel.swift index 36577e6aa..af0168db2 100644 --- a/damus/Features/Search/Models/SearchModel.swift +++ b/damus/Features/Search/Models/SearchModel.swift @@ -15,7 +15,7 @@ class SearchModel: ObservableObject { var search: NostrFilter let profiles_subid = UUID().description - var listener: Task? = nil + var listener: Task? = nil let limit: UInt32 = 500 init(state: DamusState, search: NostrFilter) { @@ -42,9 +42,13 @@ class SearchModel: ObservableObject { //likes_filter.ids = ref_events.referenced_ids! listener?.cancel() listener = Task { - self.loading = true + DispatchQueue.main.async { + self.loading = true + } print("subscribing to search") - for await item in await state.nostrNetwork.reader.subscribe(filters: [search]) { + try Task.checkCancellation() + outerLoop: for await item in await state.nostrNetwork.reader.subscribe(filters: [search]) { + try Task.checkCancellation() switch item { case .event(let borrow): try? borrow { ev in @@ -54,12 +58,15 @@ class SearchModel: ObservableObject { } } case .eose: - break + break outerLoop } - guard let txn = NdbTxn(ndb: state.ndb) else { return } - load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) } - self.loading = false + guard let txn = NdbTxn(ndb: state.ndb) else { return } + try Task.checkCancellation() + load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) + DispatchQueue.main.async { + self.loading = false + } } } From 2bea2faf3f3559977bacaea30c2106d8fdf80edc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 5 Sep 2025 17:55:08 -0700 Subject: [PATCH 26/91] Add load more content button to the top bar MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 2 +- .../Timeline/Views/InnerTimelineView.swift | 20 ---- .../Timeline/Views/PostingTimelineView.swift | 102 +++++++++++------- 3 files changed, 64 insertions(+), 60 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index e7e1e5026..c024145d9 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -174,7 +174,7 @@ struct ContentView: View { } case .home: - PostingTimelineView(damus_state: damus_state!, home: home, isSideBarOpened: $isSideBarOpened, active_sheet: $active_sheet, headerOffset: $headerOffset) + PostingTimelineView(damus_state: damus_state!, home: home, homeEvents: home.events, isSideBarOpened: $isSideBarOpened, active_sheet: $active_sheet, headerOffset: $headerOffset) case .notifications: NotificationsView(state: damus, notifications: home.notifications, subtitle: $menu_subtitle) diff --git a/damus/Features/Timeline/Views/InnerTimelineView.swift b/damus/Features/Timeline/Views/InnerTimelineView.swift index 49e832968..0e572f8fd 100644 --- a/damus/Features/Timeline/Views/InnerTimelineView.swift +++ b/damus/Features/Timeline/Views/InnerTimelineView.swift @@ -29,26 +29,6 @@ struct InnerTimelineView: View { var body: some View { LazyVStack(spacing: 0) { - let incomingEvents = events.incoming.filter({ filter($0) }) - if incomingEvents.count > 0 { - Button( - action: { - notify(.scroll_to_top) - }, - label: { - HStack(spacing: 6) { - CondensedProfilePicturesView(state: state, pubkeys: incomingEvents.map({ $0.pubkey }), maxPictures: 3) - Text("Load new content", comment: "Button to load new notes in the timeline") - .bold() - } - .padding(.horizontal, 20) - .padding(.vertical, 10) - } - ) - .buttonStyle(NeutralButtonStyle(cornerRadius: 50)) - .padding(.vertical, 10) - } - let events = self.events.events if events.isEmpty { EmptyTimelineView() diff --git a/damus/Features/Timeline/Views/PostingTimelineView.swift b/damus/Features/Timeline/Views/PostingTimelineView.swift index 06848ba74..42985e611 100644 --- a/damus/Features/Timeline/Views/PostingTimelineView.swift +++ b/damus/Features/Timeline/Views/PostingTimelineView.swift @@ -11,6 +11,8 @@ struct PostingTimelineView: View { let damus_state: DamusState @ObservedObject var home: HomeModel + /// Set this to `home.events`. This is separate from `home` because we need the events object to be directly observed so that we get instant view updates + @ObservedObject var homeEvents: EventHolder @State var search: String = "" @State var results: [NostrEvent] = [] @State var initialOffset: CGFloat? @@ -44,54 +46,76 @@ struct PostingTimelineView: View { TimelineView(events: home.events, loading: self.loading, headerHeight: $headerHeight, headerOffset: $headerOffset, damus: damus_state, show_friend_icon: false, filter: filter) } - func HeaderView()->some View { + func HeaderView() -> some View { VStack { - VStack(spacing: 0) { - // This is needed for the Dynamic Island - HStack {} - .frame(height: getSafeAreaTop()) - - HStack(alignment: .top) { - TopbarSideMenuButton(damus_state: damus_state, isSideBarOpened: $isSideBarOpened) - - Spacer() + VStack { + VStack(spacing: 0) { + // This is needed for the Dynamic Island + HStack {} + .frame(height: getSafeAreaTop()) - Image("damus-home") - .resizable() - .frame(width:30,height:30) - .shadow(color: DamusColors.purple, radius: 2) - .opacity(isSideBarOpened ? 0 : 1) - .animation(isSideBarOpened ? .none : .default, value: isSideBarOpened) - .onTapGesture { - isSideBarOpened.toggle() + HStack(alignment: .top) { + TopbarSideMenuButton(damus_state: damus_state, isSideBarOpened: $isSideBarOpened) + + Spacer() + + Image("damus-home") + .resizable() + .frame(width:30,height:30) + .shadow(color: DamusColors.purple, radius: 2) + .opacity(isSideBarOpened ? 0 : 1) + .animation(isSideBarOpened ? .none : .default, value: isSideBarOpened) + .onTapGesture { + isSideBarOpened.toggle() + } + .padding(.leading) + + Spacer() + + HStack(alignment: .center) { + SignalView(state: damus_state, signal: home.signal) } - .padding(.leading) - - Spacer() - - HStack(alignment: .center) { - SignalView(state: damus_state, signal: home.signal) } + .frame(maxWidth: .infinity, alignment: .trailing) + } + .padding(.horizontal, 20) + + VStack(spacing: 0) { + CustomPicker(tabs: [ + (NSLocalizedString("Notes", comment: "Label for filter for seeing only notes (instead of notes and replies)."), FilterState.posts), + (NSLocalizedString("Notes & Replies", comment: "Label for filter for seeing notes and replies (instead of only notes)."), FilterState.posts_and_replies) + ], + selection: $filter_state) + + Divider() + .frame(height: 1) } - .frame(maxWidth: .infinity, alignment: .trailing) } - .padding(.horizontal, 20) + .background { + DamusColors.adaptableWhite + .ignoresSafeArea() + } - VStack(spacing: 0) { - CustomPicker(tabs: [ - (NSLocalizedString("Notes", comment: "Label for filter for seeing only notes (instead of notes and replies)."), FilterState.posts), - (NSLocalizedString("Notes & Replies", comment: "Label for filter for seeing notes and replies (instead of only notes)."), FilterState.posts_and_replies) - ], - selection: $filter_state) - - Divider() - .frame(height: 1) + if homeEvents.incoming.count > 0 { + Button( + action: { + notify(.scroll_to_top) + }, + label: { + HStack(spacing: 6) { + CondensedProfilePicturesView(state: damus_state, pubkeys: homeEvents.incoming.map({ $0.pubkey }), maxPictures: 3) + .scaleEffect(0.75) + Text("Load new content", comment: "Button to load new notes in the timeline") + .bold() + } + .padding(.horizontal, 10) + .padding(.vertical, 5) + } + ) + .buttonStyle(NeutralButtonStyle(cornerRadius: 50)) + .padding(.vertical, 10) } } - .background { - DamusColors.adaptableWhite - .ignoresSafeArea() - } } var body: some View { From 3290e1f9d2649161f70d54e2773280c22ad1a5ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 10 Sep 2025 13:52:39 -0700 Subject: [PATCH 27/91] Improve NostrNetworkManager interfaces MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit improves NostrNetworkManager interfaces to be easier to use, and with more options on how to read data from the Nostr network This reduces the amount of duplicate logic in handling streams, and also prevents possible common mistakes when using the standard subscribe method. This fixes an issue with the mute list manager (which prompted for this interface improvement, as the root cause is similar to other similar issues). Closes: https://github.com/damus-io/damus/issues/3221 Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManager.swift | 89 ----------- .../SubscriptionManager.swift | 138 ++++++++++++++++-- .../UserRelayListManager.swift | 6 +- damus/Core/Nostr/RelayPool.swift | 7 +- damus/Features/Chat/Models/ThreadModel.swift | 6 +- damus/Features/Events/EventLoaderView.swift | 19 +-- .../Features/Events/Models/EventsModel.swift | 15 +- .../Models/LoadableNostrEventView.swift | 4 +- .../FollowPack/Models/FollowPackModel.swift | 23 ++- .../Follows/Models/FollowersModel.swift | 14 +- .../NIP05/Models/NIP05DomainEventsModel.swift | 13 +- .../Onboarding/SuggestedUsersViewModel.swift | 8 +- .../Profile/Models/ProfileModel.swift | 20 +-- .../Search/Models/SearchHomeModel.swift | 24 +-- .../Features/Search/Models/SearchModel.swift | 17 +-- .../Search/Views/SearchingEventView.swift | 6 +- .../Features/Timeline/Models/HomeModel.swift | 63 ++------ .../Features/Wallet/Models/WalletModel.swift | 6 +- damus/Features/Zaps/Models/ZapsModel.swift | 11 +- .../NostrNetworkManagerTests.swift | 4 +- nostrdb/UnownedNdbNote.swift | 115 ++++++++++++--- 21 files changed, 312 insertions(+), 296 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 50f51f518..ac11b97af 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -89,95 +89,6 @@ class NostrNetworkManager { self.pool.send(.event(event), to: targetRelays, skip_ephemeral: skipEphemeralRelays) } - func query(filters: [NostrFilter], to: [RelayURL]? = nil) async -> [NostrEvent] { - var events: [NostrEvent] = [] - for await item in self.reader.subscribe(filters: filters, to: to) { - switch item { - case .event(let borrow): - try? borrow { event in - events.append(event.toOwned()) - } - case .eose: - break - } - } - return events - } - - /// Finds a replaceable event based on an `naddr` address. - /// - /// - Parameters: - /// - naddr: the `naddr` address - func lookup(naddr: NAddr) async -> NostrEvent? { - var nostrKinds: [NostrKind]? = NostrKind(rawValue: naddr.kind).map { [$0] } - - let filter = NostrFilter(kinds: nostrKinds, authors: [naddr.author]) - - for await item in self.reader.subscribe(filters: [filter]) { - switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - if event?.referenced_params.first?.param.string() == naddr.identifier { - return event - } - case .eose: - break - } - } - return nil - } - - // TODO: Improve this. This is mostly intact to keep compatibility with its predecessor, but we can do better - func findEvent(query: FindEvent) async -> FoundEvent? { - var filter: NostrFilter? = nil - let find_from = query.find_from - let query = query.type - - switch query { - case .profile(let pubkey): - if let profile_txn = delegate.ndb.lookup_profile(pubkey), - let record = profile_txn.unsafeUnownedValue, - record.profile != nil - { - return .profile(pubkey) - } - filter = NostrFilter(kinds: [.metadata], limit: 1, authors: [pubkey]) - case .event(let evid): - if let event = delegate.ndb.lookup_note(evid)?.unsafeUnownedValue?.to_owned() { - return .event(event) - } - filter = NostrFilter(ids: [evid], limit: 1) - } - - var attempts: Int = 0 - var has_event = false - guard let filter else { return nil } - - for await item in self.reader.subscribe(filters: [filter], to: find_from) { - switch item { - case .event(let borrow): - var result: FoundEvent? = nil - try? borrow { event in - switch query { - case .profile: - if event.known_kind == .metadata { - result = .profile(event.pubkey) - } - case .event: - result = .event(event.toOwned()) - } - } - return result - case .eose: - return nil - } - } - return nil - } - func getRelay(_ id: RelayURL) -> RelayPool.Relay? { pool.get_relay(id) } diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 476a05fe7..e5ce9ab38 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -23,7 +23,29 @@ extension NostrNetworkManager { self.taskManager = TaskManager() } - // MARK: - Reading data from Nostr + // MARK: - Subscribing and Streaming data from Nostr + + /// Streams notes until the EOSE signal + func streamNotesUntilEndOfStoredEvents(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration? = nil) -> AsyncStream { + let timeout = timeout ?? .seconds(10) + return AsyncStream { continuation in + let streamingTask = Task { + outerLoop: for await item in self.subscribe(filters: filters, to: desiredRelays, timeout: timeout) { + try Task.checkCancellation() + switch item { + case .event(let lender): + continuation.yield(lender) + case .eose: + break outerLoop + } + } + continuation.finish() + } + continuation.onTermination = { @Sendable _ in + streamingTask.cancel() + } + } + } /// Subscribes to data from user's relays, for a maximum period of time — after which the stream will end. /// @@ -113,17 +135,9 @@ extension NostrNetworkManager { case .eose: continuation.yield(.eose) case .event(let noteKey): - let lender: NdbNoteLender = { lend in - guard let ndbNoteTxn = self.ndb.lookup_note_by_key(noteKey) else { - throw NdbNoteLenderError.errorLoadingNote - } - guard let unownedNote = UnownedNdbNote(ndbNoteTxn) else { - throw NdbNoteLenderError.errorLoadingNote - } - lend(unownedNote) - } + let lender = NdbNoteLender(ndb: self.ndb, noteKey: noteKey) try Task.checkCancellation() - continuation.yield(.event(borrow: lender)) + continuation.yield(.event(lender: lender)) } } } @@ -166,6 +180,106 @@ extension NostrNetworkManager { } } + // MARK: - Finding specific data from Nostr + + /// Finds a non-replaceable event based on a note ID + func lookup(noteId: NoteId, to targetRelays: [RelayURL]? = nil, timeout: Duration? = nil) async throws -> NdbNoteLender? { + let filter = NostrFilter(ids: [noteId], limit: 1) + + // Since note ids point to immutable objects, we can do a simple ndb lookup first + if let noteKey = self.ndb.lookup_note_key(noteId) { + return NdbNoteLender(ndb: self.ndb, noteKey: noteKey) + } + + // Not available in local ndb, stream from network + outerLoop: for await item in self.pool.subscribe(filters: [NostrFilter(ids: [noteId], limit: 1)], to: targetRelays, eoseTimeout: timeout) { + switch item { + case .event(let event): + return NdbNoteLender(ownedNdbNote: event) + case .eose: + break outerLoop + } + } + + return nil + } + + func query(filters: [NostrFilter], to: [RelayURL]? = nil, timeout: Duration? = nil) async -> [NostrEvent] { + var events: [NostrEvent] = [] + for await noteLender in self.streamNotesUntilEndOfStoredEvents(filters: filters, to: to, timeout: timeout) { + noteLender.justUseACopy({ events.append($0) }) + } + return events + } + + /// Finds a replaceable event based on an `naddr` address. + /// + /// - Parameters: + /// - naddr: the `naddr` address + func lookup(naddr: NAddr, to targetRelays: [RelayURL]? = nil, timeout: Duration? = nil) async -> NostrEvent? { + var nostrKinds: [NostrKind]? = NostrKind(rawValue: naddr.kind).map { [$0] } + + let filter = NostrFilter(kinds: nostrKinds, authors: [naddr.author]) + + for await noteLender in self.streamNotesUntilEndOfStoredEvents(filters: [filter], to: targetRelays, timeout: timeout) { + // TODO: This can be refactored to borrow the note instead of copying it. But we need to implement `referenced_params` on `UnownedNdbNote` to do so + guard let event = noteLender.justGetACopy() else { continue } + if event.referenced_params.first?.param.string() == naddr.identifier { + return event + } + } + + return nil + } + + // TODO: Improve this. This is mostly intact to keep compatibility with its predecessor, but we can do better + func findEvent(query: FindEvent) async -> FoundEvent? { + var filter: NostrFilter? = nil + let find_from = query.find_from + let query = query.type + + switch query { + case .profile(let pubkey): + if let profile_txn = self.ndb.lookup_profile(pubkey), + let record = profile_txn.unsafeUnownedValue, + record.profile != nil + { + return .profile(pubkey) + } + filter = NostrFilter(kinds: [.metadata], limit: 1, authors: [pubkey]) + case .event(let evid): + if let event = self.ndb.lookup_note(evid)?.unsafeUnownedValue?.to_owned() { + return .event(event) + } + filter = NostrFilter(ids: [evid], limit: 1) + } + + var attempts: Int = 0 + var has_event = false + guard let filter else { return nil } + + for await noteLender in self.streamNotesUntilEndOfStoredEvents(filters: [filter], to: find_from) { + let foundEvent: FoundEvent? = try? noteLender.borrow({ event in + switch query { + case .profile: + if event.known_kind == .metadata { + return .profile(event.pubkey) + } + case .event: + return .event(event.toOwned()) + } + return nil + }) + if let foundEvent { + return foundEvent + } + } + + return nil + } + + // MARK: - Task management + func cancelAllTasks() async { await self.taskManager.cancelAllTasks() } @@ -199,7 +313,7 @@ extension NostrNetworkManager { enum StreamItem { /// An event which can be borrowed from NostrDB - case event(borrow: NdbNoteLender) + case event(lender: NdbNoteLender) /// The end of stored events case eose } diff --git a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift index 8f7733786..01225fc53 100644 --- a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift @@ -135,15 +135,15 @@ extension NostrNetworkManager { let filter = NostrFilter(kinds: [.relay_list], authors: [delegate.keypair.pubkey]) for await item in self.reader.subscribe(filters: [filter]) { switch item { - case .event(borrow: let borrow): // Signature validity already ensured at this point + case .event(let lender): // Signature validity already ensured at this point let currentRelayListCreationDate = self.getUserCurrentRelayListCreationDate() - try? borrow { note in + try? lender.borrow({ note in guard note.pubkey == self.delegate.keypair.pubkey else { return } // Ensure this new list was ours guard note.createdAt > (currentRelayListCreationDate ?? 0) else { return } // Ensure this is a newer list guard let relayList = try? NIP65.RelayList(event: note) else { return } // Ensure it is a valid NIP-65 list try? self.set(userRelayList: relayList) // Set the validated list - } + }) case .eose: continue } } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index a85fe6b9c..d38ce7be3 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -219,9 +219,10 @@ class RelayPool { /// - Parameters: /// - filters: The filters specifying the desired content. /// - desiredRelays: The desired relays which to subsctibe to. If `nil`, it defaults to the `RelayPool`'s default list - /// - eoseTimeout: The maximum timeout which to give up waiting for the eoseSignal, in seconds + /// - eoseTimeout: The maximum timeout which to give up waiting for the eoseSignal /// - Returns: Returns an async stream that callers can easily consume via a for-loop - func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, eoseTimeout: TimeInterval = 10) -> AsyncStream { + func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, eoseTimeout: Duration? = nil) -> AsyncStream { + let eoseTimeout = eoseTimeout ?? .seconds(10) let desiredRelays = desiredRelays ?? self.relays.map({ $0.descriptor.url }) return AsyncStream { continuation in let sub_id = UUID().uuidString @@ -255,7 +256,7 @@ class RelayPool { } }, to: desiredRelays) Task { - try? await Task.sleep(nanoseconds: 1_000_000_000 * UInt64(eoseTimeout)) + try? await Task.sleep(for: eoseTimeout) if !eoseSent { continuation.yield(with: .success(.eose)) } } continuation.onTermination = { @Sendable _ in diff --git a/damus/Features/Chat/Models/ThreadModel.swift b/damus/Features/Chat/Models/ThreadModel.swift index e9df21307..e22ca6379 100644 --- a/damus/Features/Chat/Models/ThreadModel.swift +++ b/damus/Features/Chat/Models/ThreadModel.swift @@ -117,10 +117,8 @@ class ThreadModel: ObservableObject { Log.info("subscribing to thread %s ", for: .render, original_event.id.hex()) for await item in damus_state.nostrNetwork.reader.subscribe(filters: base_filters + meta_filters) { switch item { - case .event(let borrow): - try? borrow { event in - handle_event(ev: event.toOwned()) - } + case .event(let lender): + lender.justUseACopy({ handle_event(ev: $0) }) case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } load_profiles(context: "thread", load: .from_events(Array(event_map.events)), damus_state: damus_state, txn: txn) diff --git a/damus/Features/Events/EventLoaderView.swift b/damus/Features/Events/EventLoaderView.swift index 511314ca2..82b02c168 100644 --- a/damus/Features/Events/EventLoaderView.swift +++ b/damus/Features/Events/EventLoaderView.swift @@ -28,27 +28,16 @@ struct EventLoaderView: View { self.loadingTask?.cancel() } - func subscribe(filters: [NostrFilter]) { + func subscribe() { self.loadingTask?.cancel() self.loadingTask = Task { - for await item in await damus_state.nostrNetwork.reader.subscribe(filters: filters) { - switch item { - case .event(let borrow): - try? borrow { ev in - event = ev.toOwned() - } - break - case .eose: - break - } - } + let lender = try? await damus_state.nostrNetwork.reader.lookup(noteId: self.event_id) + lender?.justUseACopy({ event = $0 }) } } func load() { - subscribe(filters: [ - NostrFilter(ids: [self.event_id], limit: 1) - ]) + subscribe() } var body: some View { diff --git a/damus/Features/Events/Models/EventsModel.swift b/damus/Features/Events/Models/EventsModel.swift index f9f47739c..049ecfe76 100644 --- a/damus/Features/Events/Models/EventsModel.swift +++ b/damus/Features/Events/Models/EventsModel.swift @@ -73,16 +73,13 @@ class EventsModel: ObservableObject { DispatchQueue.main.async { self.loading = true } outerLoop: for await item in state.nostrNetwork.reader.subscribe(filters: [get_filter()]) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } + case .event(let lender): Task { - if await events.insert(event) { - DispatchQueue.main.async { self.objectWillChange.send() } - } + await lender.justUseACopy({ event in + if await events.insert(event) { + DispatchQueue.main.async { self.objectWillChange.send() } + } + }) } case .eose: DispatchQueue.main.async { self.loading = false } diff --git a/damus/Features/Events/Models/LoadableNostrEventView.swift b/damus/Features/Events/Models/LoadableNostrEventView.swift index af9bf3487..a886b821b 100644 --- a/damus/Features/Events/Models/LoadableNostrEventView.swift +++ b/damus/Features/Events/Models/LoadableNostrEventView.swift @@ -50,7 +50,7 @@ class LoadableNostrEventViewModel: ObservableObject { /// Asynchronously find an event from NostrDB or from the network (if not available on NostrDB) private func loadEvent(noteId: NoteId) async -> NostrEvent? { - let res = await damus_state.nostrNetwork.findEvent(query: .event(evid: noteId)) + let res = await damus_state.nostrNetwork.reader.findEvent(query: .event(evid: noteId)) guard let res, case .event(let ev) = res else { return nil } return ev } @@ -78,7 +78,7 @@ class LoadableNostrEventViewModel: ObservableObject { return .unknown_or_unsupported_kind } case .naddr(let naddr): - guard let event = await damus_state.nostrNetwork.lookup(naddr: naddr) else { return .not_found } + guard let event = await damus_state.nostrNetwork.reader.lookup(naddr: naddr) else { return .not_found } return .loaded(route: Route.Thread(thread: ThreadModel(event: event, damus_state: damus_state))) } } diff --git a/damus/Features/FollowPack/Models/FollowPackModel.swift b/damus/Features/FollowPack/Models/FollowPackModel.swift index f938bd87a..7049efa99 100644 --- a/damus/Features/FollowPack/Models/FollowPackModel.swift +++ b/damus/Features/FollowPack/Models/FollowPackModel.swift @@ -45,21 +45,18 @@ class FollowPackModel: ObservableObject { for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter], to: to_relays) { switch item { - case .event(borrow: let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - let should_show_event = await should_show_event(state: damus_state, ev: event) - if event.is_textlike && should_show_event && !event.is_reply() - { - if await self.events.insert(event) { - DispatchQueue.main.async { - self.objectWillChange.send() + case .event(lender: let lender): + await lender.justUseACopy({ event in + let should_show_event = await should_show_event(state: damus_state, ev: event) + if event.is_textlike && should_show_event && !event.is_reply() + { + if await self.events.insert(event) { + DispatchQueue.main.async { + self.objectWillChange.send() + } } } - } + }) case .eose: continue } diff --git a/damus/Features/Follows/Models/FollowersModel.swift b/damus/Features/Follows/Models/FollowersModel.swift index df1d0ee8e..e696525ff 100644 --- a/damus/Features/Follows/Models/FollowersModel.swift +++ b/damus/Features/Follows/Models/FollowersModel.swift @@ -38,12 +38,10 @@ class FollowersModel: ObservableObject { let filters = [filter] self.listener?.cancel() self.listener = Task { - for await item in await damus_state.nostrNetwork.reader.subscribe(filters: filters) { + for await item in damus_state.nostrNetwork.reader.subscribe(filters: filters) { switch item { - case .event(let borrow): - try? borrow { event in - self.handle_event(ev: event.toOwned()) - } + case .event(let lender): + lender.justUseACopy({ self.handle_event(ev: $0) }) case .eose: guard let txn = NdbTxn(ndb: self.damus_state.ndb) else { return } load_profiles(txn: txn) @@ -82,10 +80,8 @@ class FollowersModel: ObservableObject { self.profilesListener = Task { for await item in await damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { switch item { - case .event(let borrow): - try? borrow { event in - self.handle_event(ev: event.toOwned()) - } + case .event(let lender): + lender.justUseACopy({ self.handle_event(ev: $0) }) case .eose: break } } diff --git a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift index 2037b9ba9..545c0927f 100644 --- a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift +++ b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift @@ -66,16 +66,11 @@ class NIP05DomainEventsModel: ObservableObject { for await item in state.nostrNetwork.reader.subscribe(filters: [filter]) { switch item { - case .event(borrow: let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - guard let txn = NdbTxn(ndb: state.ndb) else { return } - load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) - } - guard let event else { return } - await self.add_event(event) + case .event(let lender): + await lender.justUseACopy({ await self.add_event($0) }) case .eose: + guard let txn = NdbTxn(ndb: state.ndb) else { return } + load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) DispatchQueue.main.async { self.loading = false } continue } diff --git a/damus/Features/Onboarding/SuggestedUsersViewModel.swift b/damus/Features/Onboarding/SuggestedUsersViewModel.swift index 1d3fc5832..d53f878b8 100644 --- a/damus/Features/Onboarding/SuggestedUsersViewModel.swift +++ b/damus/Features/Onboarding/SuggestedUsersViewModel.swift @@ -194,9 +194,9 @@ class SuggestedUsersViewModel: ObservableObject { guard !Task.isCancelled else { break } switch item { - case .event(let borrow): - try? borrow { event in - let followPack = FollowPackEvent.parse(from: event.toOwned()) + case .event(let lender): + lender.justUseACopy({ event in + let followPack = FollowPackEvent.parse(from: event) guard let id = followPack.uuid else { return } @@ -209,7 +209,7 @@ class SuggestedUsersViewModel: ObservableObject { } packsById[id] = latestPackForThisId - } + }) case .eose: break } diff --git a/damus/Features/Profile/Models/ProfileModel.swift b/damus/Features/Profile/Models/ProfileModel.swift index 5004b7258..a50e252a8 100644 --- a/damus/Features/Profile/Models/ProfileModel.swift +++ b/damus/Features/Profile/Models/ProfileModel.swift @@ -78,10 +78,8 @@ class ProfileModel: ObservableObject, Equatable { text_filter.limit = 500 for await item in damus.nostrNetwork.reader.subscribe(filters: [text_filter]) { switch item { - case .event(let borrow): - try? borrow { event in - handleNostrEvent(event.toOwned()) - } + case .event(let lender): + lender.justUseACopy({ handleNostrEvent($0) }) case .eose: break } } @@ -96,10 +94,8 @@ class ProfileModel: ObservableObject, Equatable { profile_filter.authors = [pubkey] for await item in damus.nostrNetwork.reader.subscribe(filters: [profile_filter, relay_list_filter]) { switch item { - case .event(let borrow): - try? borrow { event in - handleNostrEvent(event.toOwned()) - } + case .event(let lender): + lender.justUseACopy({ handleNostrEvent($0) }) case .eose: break } } @@ -129,8 +125,8 @@ class ProfileModel: ObservableObject, Equatable { print("subscribing to conversation events from and to profile \(pubkey)") for await item in self.damus.nostrNetwork.reader.subscribe(filters: [conversations_filter_them, conversations_filter_us]) { switch item { - case .event(borrow: let borrow): - try? borrow { ev in + case .event(let lender): + try? lender.borrow { ev in if !seen_event.contains(ev.id) { let event = ev.toOwned() Task { await self.add_event(event) } @@ -210,8 +206,8 @@ class ProfileModel: ObservableObject, Equatable { self.findRelaysListener = Task { for await item in await damus.nostrNetwork.reader.subscribe(filters: [profile_filter]) { switch item { - case .event(let borrow): - try? borrow { event in + case .event(let lender): + try? lender.borrow { event in if case .contacts = event.known_kind { // TODO: Is this correct? self.legacy_relay_list = decode_json_relays(event.content) diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index acf1c0856..723b5b56c 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -53,13 +53,8 @@ class SearchHomeModel: ObservableObject { outerLoop: for await item in damus_state.nostrNetwork.reader.subscribe(filters: [get_base_filter(), follow_list_filter], to: to_relays) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - await self.handleEvent(event) + case .event(let lender): + await lender.justUseACopy({ await self.handleEvent($0) }) case .eose: break outerLoop } @@ -136,15 +131,12 @@ func load_profiles(context: String, load: PubkeysToLoad, damus_state: DamusSt for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { let now = UInt64(Date.now.timeIntervalSince1970) switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - if event.known_kind == .metadata { - damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now) - } + case .event(let lender): + lender.justUseACopy({ event in + if event.known_kind == .metadata { + damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now) + } + }) case .eose: break } diff --git a/damus/Features/Search/Models/SearchModel.swift b/damus/Features/Search/Models/SearchModel.swift index af0168db2..3547f4630 100644 --- a/damus/Features/Search/Models/SearchModel.swift +++ b/damus/Features/Search/Models/SearchModel.swift @@ -47,20 +47,13 @@ class SearchModel: ObservableObject { } print("subscribing to search") try Task.checkCancellation() - outerLoop: for await item in await state.nostrNetwork.reader.subscribe(filters: [search]) { - try Task.checkCancellation() - switch item { - case .event(let borrow): - try? borrow { ev in - let event = ev.toOwned() - if event.is_textlike && event.should_show_event { - Task { await self.add_event(event) } - } - } - case .eose: - break outerLoop + let events = await state.nostrNetwork.reader.query(filters: [search]) + for event in events { + if event.is_textlike && event.should_show_event { + await self.add_event(event) } } + guard let txn = NdbTxn(ndb: state.ndb) else { return } try Task.checkCancellation() load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) diff --git a/damus/Features/Search/Views/SearchingEventView.swift b/damus/Features/Search/Views/SearchingEventView.swift index 7f132bc32..b39a92bbf 100644 --- a/damus/Features/Search/Views/SearchingEventView.swift +++ b/damus/Features/Search/Views/SearchingEventView.swift @@ -78,7 +78,7 @@ struct SearchingEventView: View { case .event(let note_id): Task { - let res = await state.nostrNetwork.findEvent(query: .event(evid: note_id)) + let res = await state.nostrNetwork.reader.findEvent(query: .event(evid: note_id)) guard case .event(let ev) = res else { self.search_state = .not_found return @@ -87,7 +87,7 @@ struct SearchingEventView: View { } case .profile(let pubkey): Task { - let res = await state.nostrNetwork.findEvent(query: .profile(pubkey: pubkey)) + let res = await state.nostrNetwork.reader.findEvent(query: .profile(pubkey: pubkey)) guard case .profile(let pubkey) = res else { self.search_state = .not_found return @@ -96,7 +96,7 @@ struct SearchingEventView: View { } case .naddr(let naddr): Task { - let res = await state.nostrNetwork.lookup(naddr: naddr) + let res = await state.nostrNetwork.reader.lookup(naddr: naddr) guard let res = res else { self.search_state = .not_found return diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index 4a5fa062b..37f64d887 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -453,13 +453,8 @@ class HomeModel: ContactsDelegate, ObservableObject { let filter = NostrFilter(kinds: [.contacts], limit: 1, authors: [damus_state.pubkey]) for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - await process_event(ev: event, context: .initialContactList) + case .event(let lender): + await lender.justUseACopy({ await process_event(ev: $0, context: .initialContactList) }) continue case .eose: if !done_init { @@ -476,13 +471,8 @@ class HomeModel: ContactsDelegate, ObservableObject { let relayListFilter = NostrFilter(kinds: [.relay_list], limit: 1, authors: [damus_state.pubkey]) for await item in damus_state.nostrNetwork.reader.subscribe(filters: [relayListFilter]) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - await process_event(ev: event, context: .initialRelayList) + case .event(let lender): + await lender.justUseACopy({ await process_event(ev: $0, context: .initialRelayList) }) case .eose: break } } @@ -545,13 +535,8 @@ class HomeModel: ContactsDelegate, ObservableObject { self.contactsHandlerTask = Task { for await item in damus_state.nostrNetwork.reader.subscribe(filters: contacts_filters) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - var event = ev.toOwned() - } - guard let event else { return } - await self.process_event(ev: event, context: .contacts) + case .event(let lender): + await lender.justUseACopy({ await process_event(ev: $0, context: .contacts) }) case .eose: continue } } @@ -560,13 +545,8 @@ class HomeModel: ContactsDelegate, ObservableObject { self.notificationsHandlerTask = Task { for await item in damus_state.nostrNetwork.reader.subscribe(filters: notifications_filters) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let theEvent = event else { return } - await self.process_event(ev: theEvent, context: .notifications) + case .event(let lender): + await lender.justUseACopy({ await process_event(ev: $0, context: .notifications) }) case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } load_profiles(context: "notifications", load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn) @@ -577,13 +557,8 @@ class HomeModel: ContactsDelegate, ObservableObject { self.dmsHandlerTask = Task { for await item in damus_state.nostrNetwork.reader.subscribe(filters: dms_filters) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - await self.process_event(ev: event, context: .dms) + case .event(let lender): + await lender.justUseACopy({ await process_event(ev: $0, context: .dms) }) case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } var dms = dms.dms.flatMap { $0.events } @@ -602,13 +577,8 @@ class HomeModel: ContactsDelegate, ObservableObject { filter.limit = 0 for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter], to: [nwc.relay]) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - await self.process_event(ev: event, context: .nwc) + case .event(let lender): + await lender.justUseACopy({ await process_event(ev: $0, context: .nwc) }) case .eose: continue } } @@ -663,13 +633,8 @@ class HomeModel: ContactsDelegate, ObservableObject { } for await item in damus_state.nostrNetwork.reader.subscribe(filters: home_filters) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - await self.process_event(ev: event, context: .home) + case .event(let lender): + await lender.justUseACopy({ await process_event(ev: $0, context: .home) }) case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } DispatchQueue.main.async { diff --git a/damus/Features/Wallet/Models/WalletModel.swift b/damus/Features/Wallet/Models/WalletModel.swift index 34d916ac4..4c7d90367 100644 --- a/damus/Features/Wallet/Models/WalletModel.swift +++ b/damus/Features/Wallet/Models/WalletModel.swift @@ -184,10 +184,8 @@ class WalletModel: ObservableObject { nostrNetwork.send(event: requestEvent, to: [currentNwcUrl.relay], skipEphemeralRelays: false) for await item in nostrNetwork.reader.subscribe(filters: responseFilters, to: [currentNwcUrl.relay], timeout: timeout) { switch item { - case .event(borrow: let borrow): - var responseEvent: NostrEvent? = nil - try? borrow { ev in responseEvent = ev.toOwned() } - guard let responseEvent else { throw .internalError } + case .event(let lender): + guard let responseEvent = try? lender.getCopy() else { throw .internalError } let fullWalletResponse: WalletConnect.FullWalletResponse do { fullWalletResponse = try WalletConnect.FullWalletResponse(from: responseEvent, nwc: currentNwcUrl) } diff --git a/damus/Features/Zaps/Models/ZapsModel.swift b/damus/Features/Zaps/Models/ZapsModel.swift index 49af62d5c..a3e26e67f 100644 --- a/damus/Features/Zaps/Models/ZapsModel.swift +++ b/damus/Features/Zaps/Models/ZapsModel.swift @@ -35,13 +35,10 @@ class ZapsModel: ObservableObject { zapCommsListener = Task { for await item in state.nostrNetwork.reader.subscribe(filters: [filter]) { switch item { - case .event(let borrow): - var event: NostrEvent? = nil - try? borrow { ev in - event = ev.toOwned() - } - guard let event else { return } - await self.handle_event(ev: event) + case .event(let lender): + await lender.justUseACopy({ event in + await self.handle_event(ev: event) + }) case .eose: let events = state.events.lookup_zaps(target: target).map { $0.request.ev } guard let txn = NdbTxn(ndb: state.ndb) else { return } diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift index 244b4061e..271c6f15f 100644 --- a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -47,8 +47,8 @@ class NostrNetworkManagerTests: XCTestCase { Task { for await item in self.damusState!.nostrNetwork.reader.subscribe(filters: [filter]) { switch item { - case .event(borrow: let borrow): - try? borrow { event in + case .event(let lender): + try? lender.borrow { event in receivedCount += 1 if eventIds.contains(event.id) { XCTFail("Got duplicate event ID: \(event.id) ") diff --git a/nostrdb/UnownedNdbNote.swift b/nostrdb/UnownedNdbNote.swift index ee03cbed8..550c4b71c 100644 --- a/nostrdb/UnownedNdbNote.swift +++ b/nostrdb/UnownedNdbNote.swift @@ -5,7 +5,7 @@ // Created by Daniel D’Aquino on 2025-03-25. // -/// A function that allows an unowned NdbNote to be lent out temporarily +/// Allows an unowned note to be safely lent out temporarily. /// /// Use this to provide access to NostrDB unowned notes in a way that has much better compile-time safety guarantees. /// @@ -14,16 +14,9 @@ /// ## Lending out or providing Ndb notes /// /// ```swift +/// let noteKey = functionThatDoesSomeLookupOrSubscriptionOnNDB() /// // Define the lender -/// let lender: NdbNoteLender = { lend in -/// guard let ndbNoteTxn = ndb.lookup_note(noteId) else { // Note: Must have access to `Ndb` -/// throw NdbNoteLenderError.errorLoadingNote // Throw errors if loading fails -/// } -/// guard let unownedNote = UnownedNdbNote(ndbNoteTxn) else { -/// throw NdbNoteLenderError.errorLoadingNote -/// } -/// lend(unownedNote) // Lend out the Unowned Ndb note -/// } +/// let lender = NdbNoteLender(ndb: self.ndb, noteKey: noteKey) /// return lender // Return or pass the lender to another class /// ``` /// @@ -32,17 +25,101 @@ /// Assuming you are given a lender, here is how you can use it: /// /// ```swift -/// let borrow: NdbNoteLender = functionThatProvidesALender() -/// try? borrow { note in // You can optionally handle errors if borrowing fails -/// self.date = note.createdAt // You can do things with the note without copying it over -/// // self.note = note // Not allowed by the compiler -/// self.note = note.toOwned() // You can copy the note if needed +/// func getTimestampForMyMutelist() throws -> UInt32 { +/// let lender = functionThatSomehowReturnsMyMutelist() +/// return try lender.borrow({ event in // Here we are only borrowing, so the compiler won't allow us to copy `event` to an external variable +/// return event.created_at // No need to copy the entire note, we only need the timestamp +/// }) /// } /// ``` -typealias NdbNoteLender = ((_: borrowing UnownedNdbNote) -> Void) throws -> Void - -enum NdbNoteLenderError: Error { - case errorLoadingNote +/// +/// If you need to retain the entire note, you may need to copy it. Here is how: +/// +/// ```swift +/// func getTimestampForMyContactList() throws -> NdbNote { +/// let lender = functionThatSomehowReturnsMyContactList() +/// return try lender.getNoteCopy() // This will automatically make an owned copy of the note, which can be passed around safely. +/// } +/// ``` +enum NdbNoteLender: Sendable { + case ndbNoteKey(Ndb, NoteKey) + case owned(NdbNote) + + init(ndb: Ndb, noteKey: NoteKey) { + self = .ndbNoteKey(ndb, noteKey) + } + + init(ownedNdbNote: NdbNote) { + self = .owned(ownedNdbNote) + } + + /// Borrows the note temporarily + func borrow(_ lendingFunction: (_: borrowing UnownedNdbNote) throws -> T) throws -> T { + switch self { + case .ndbNoteKey(let ndb, let noteKey): + guard !ndb.is_closed else { throw LendingError.ndbClosed } + guard let ndbNoteTxn = ndb.lookup_note_by_key(noteKey) else { throw LendingError.errorLoadingNote } + guard let unownedNote = UnownedNdbNote(ndbNoteTxn) else { throw LendingError.errorLoadingNote } + return try lendingFunction(unownedNote) + case .owned(let note): + return try lendingFunction(UnownedNdbNote(note)) + } + + } + + /// Gets an owned copy of the note + func getCopy() throws -> NdbNote { + return try self.borrow({ ev in + return ev.toOwned() + }) + } + + /// A lenient and simple function to just use a copy, where implementing custom error handling is unfeasible or too burdensome and failures should not stop flow. + /// + /// Since the errors related to borrowing and copying are unlikely, instead of implementing custom error handling, a simple default error handling logic may be used. + /// + /// This implements error handling in the following way: + /// - On debug builds, it will throw an assertion to alert developers that something is off + /// - On production builds, an error will be printed to the logs. + func justUseACopy(_ useFunction: (_: NdbNote) throws -> T) rethrows -> T? { + guard let event = self.justGetACopy() else { return nil } + return try useFunction(event) + } + + /// A lenient and simple function to just use a copy, where implementing custom error handling is unfeasible or too burdensome and failures should not stop flow. + /// + /// Since the errors related to borrowing and copying are unlikely, instead of implementing custom error handling, a simple default error handling logic may be used. + /// + /// This implements error handling in the following way: + /// - On debug builds, it will throw an assertion to alert developers that something is off + /// - On production builds, an error will be printed to the logs. + func justUseACopy(_ useFunction: (_: NdbNote) async throws -> T) async rethrows -> T? { + guard let event = self.justGetACopy() else { return nil } + return try await useFunction(event) + } + + /// A lenient and simple function to just get a copy, where implementing custom error handling is unfeasible or too burdensome and failures should not stop flow. + /// + /// Since the errors related to borrowing and copying are unlikely, instead of implementing custom error handling, a simple default error handling logic may be used. + /// + /// This implements error handling in the following way: + /// - On debug builds, it will throw an assertion to alert developers that something is off + /// - On production builds, an error will be printed to the logs. + func justGetACopy() -> NdbNote? { + do { + return try self.getCopy() + } + catch { + assertionFailure("Unexpected error while fetching a copy of an NdbNote: \(error.localizedDescription)") + Log.error("Unexpected error while fetching a copy of an NdbNote: %s", for: .ndb, error.localizedDescription) + } + return nil + } + + enum LendingError: Error { + case errorLoadingNote + case ndbClosed + } } From fcd8131063bb71649988d23e278f6fc3b71e73c5 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 19 Mar 2025 12:57:53 -0700 Subject: [PATCH 28/91] nostrdb: config: custom writer scratch size making more things configurable if you have memory constraints Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 27 +++++++++++++++++++-------- nostrdb/src/nostrdb.h | 6 ++++++ 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 600eda9fc..296263c45 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -43,6 +43,9 @@ // the maximum size of inbox queues static const int DEFAULT_QUEUE_SIZE = 32768; +// 2mb scratch size for the writer thread +static const int DEFAULT_WRITER_SCRATCH_SIZE = 2097152; + // increase if we need bigger filters #define NDB_FILTER_PAGES 64 @@ -163,6 +166,7 @@ struct ndb_writer { struct ndb_lmdb *lmdb; struct ndb_monitor *monitor; + int scratch_size; uint32_t ndb_flags; void *queue_buf; int queue_buflen; @@ -4558,15 +4562,13 @@ static void *ndb_writer_thread(void *data) struct ndb_writer *writer = data; struct ndb_writer_msg msgs[THREAD_QUEUE_BATCH], *msg; struct written_note written_notes[THREAD_QUEUE_BATCH]; - size_t scratch_size; int i, popped, done, needs_commit, num_notes; uint64_t note_nkey; struct ndb_txn txn; unsigned char *scratch; - // 8mb scratch buffer for parsing note content - scratch_size = 8 * 1024 * 1024; - scratch = malloc(scratch_size); + // 2MB scratch buffer for parsing note content + scratch = malloc(writer->scratch_size); MDB_txn *mdb_txn = NULL; ndb_txn_from_mdb(&txn, writer->lmdb, mdb_txn); @@ -4615,7 +4617,7 @@ static void *ndb_writer_thread(void *data) &txn, &msg->profile, scratch, - scratch_size, + writer->scratch_size, writer->ndb_flags); if (note_nkey > 0) { @@ -4631,7 +4633,7 @@ static void *ndb_writer_thread(void *data) case NDB_WRITER_NOTE: note_nkey = ndb_write_note(&txn, &msg->note, scratch, - scratch_size, + writer->scratch_size, writer->ndb_flags); if (note_nkey > 0) { @@ -4769,11 +4771,13 @@ static void *ndb_ingester_thread(void *data) static int ndb_writer_init(struct ndb_writer *writer, struct ndb_lmdb *lmdb, - struct ndb_monitor *monitor, uint32_t ndb_flags) + struct ndb_monitor *monitor, uint32_t ndb_flags, + int scratch_size) { writer->lmdb = lmdb; writer->monitor = monitor; writer->ndb_flags = ndb_flags; + writer->scratch_size = scratch_size; writer->queue_buflen = sizeof(struct ndb_writer_msg) * DEFAULT_QUEUE_SIZE; writer->queue_buf = malloc(writer->queue_buflen); if (writer->queue_buf == NULL) { @@ -5064,7 +5068,8 @@ int ndb_init(struct ndb **pndb, const char *filename, const struct ndb_config *c ndb_monitor_init(&ndb->monitor, config->sub_cb, config->sub_cb_ctx); - if (!ndb_writer_init(&ndb->writer, &ndb->lmdb, &ndb->monitor, ndb->flags)) { + if (!ndb_writer_init(&ndb->writer, &ndb->lmdb, &ndb->monitor, ndb->flags, + config->writer_scratch_buffer_size)) { fprintf(stderr, "ndb_writer_init failed\n"); return 0; } @@ -6771,6 +6776,7 @@ void ndb_default_config(struct ndb_config *config) config->filter_context = NULL; config->sub_cb_ctx = NULL; config->sub_cb = NULL; + config->writer_scratch_buffer_size = DEFAULT_WRITER_SCRATCH_SIZE; } void ndb_config_set_subscription_callback(struct ndb_config *config, ndb_sub_fn fn, void *context) @@ -6779,6 +6785,11 @@ void ndb_config_set_subscription_callback(struct ndb_config *config, ndb_sub_fn config->sub_cb = fn; } +void ndb_config_set_writer_scratch_buffer_size(struct ndb_config *config, int scratch_size) +{ + config->writer_scratch_buffer_size = scratch_size; +} + void ndb_config_set_ingest_threads(struct ndb_config *config, int threads) { config->ingester_threads = threads; diff --git a/nostrdb/src/nostrdb.h b/nostrdb/src/nostrdb.h index e5231cec0..ac9839084 100644 --- a/nostrdb/src/nostrdb.h +++ b/nostrdb/src/nostrdb.h @@ -273,6 +273,7 @@ struct ndb_filter { struct ndb_config { int flags; int ingester_threads; + int writer_scratch_buffer_size; size_t mapsize; void *filter_context; ndb_ingest_filter_fn ingest_filter; @@ -459,6 +460,11 @@ void ndb_config_set_mapsize(struct ndb_config *config, size_t mapsize); void ndb_config_set_ingest_filter(struct ndb_config *config, ndb_ingest_filter_fn fn, void *); void ndb_config_set_subscription_callback(struct ndb_config *config, ndb_sub_fn fn, void *ctx); +/// Configurable scratch buffer size for the writer thread. Default is 2MB. If you have smaller notes +/// you can decrease this to reduce memory usage. If you have bigger notes you should increase this so +/// that the writer thread can properly parse larger notes. +void ndb_config_set_writer_scratch_buffer_size(struct ndb_config *config, int scratch_size); + // HELPERS int ndb_calculate_id(struct ndb_note *note, unsigned char *buf, int buflen); int ndb_sign_id(struct ndb_keypair *keypair, unsigned char id[32], unsigned char sig[64]); From 684701931d1729215c74b3a1a31cf8b303843df7 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 19 Mar 2025 15:12:52 -0700 Subject: [PATCH 29/91] nostrdb: Initial relay index implementation Add relay indexing for existing notes This patch introduces a relay index for new notes and notes that have already been stored, allowing the database to track additional relay sources for a given note. Changes: - Added `NDB_WRITER_NOTE_RELAY` to handle relay indexing separately from new note ingestion. - Implemented `ndb_write_note_relay()` and `ndb_write_note_relay_kind_index()` to store relay URLs. - Modified `ndb_ingester_process_event()` to check for existing notes and append relay info if necessary. - Introduced `ndb_note_has_relay()` to prevent duplicate relay entries. - Updated LMDB schema with `NDB_DB_NOTE_RELAYS` (note_id -> relay) and `NDB_DB_NOTE_RELAY_KIND` (relay + kind + created_at -> note). - Refactored `ndb_process_event()` to use `ndb_ingest_meta` for tracking relay sources. - Ensured proper memory management for relay strings in writer thread. With this change, nostrdb can better track where notes are seen across different relays, improving query capabilities for relay-based data retrieval. Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 430 +++++++++++++++++++++++++++++++++++++++--- nostrdb/src/nostrdb.h | 17 ++ 2 files changed, 416 insertions(+), 31 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 296263c45..0e423fad5 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -129,6 +129,8 @@ struct ndb_ingest_controller { MDB_txn *read_txn; struct ndb_lmdb *lmdb; + struct ndb_note *note; + uint64_t note_key; }; enum ndb_writer_msgtype { @@ -139,6 +141,7 @@ enum ndb_writer_msgtype { NDB_WRITER_PROFILE_LAST_FETCH, // when profiles were last fetched NDB_WRITER_BLOCKS, // write parsed note blocks NDB_WRITER_MIGRATE, // migrate the database + NDB_WRITER_NOTE_RELAY, // we already have the note, but we have more relays to write }; // keys used for storing data in the NDB metadata database (NDB_DB_NDB_META) @@ -1475,6 +1478,7 @@ static int ndb_db_is_index(enum ndb_dbs index) case NDB_DB_NDB_META: case NDB_DB_PROFILE_SEARCH: case NDB_DB_PROFILE_LAST_FETCH: + case NDB_DB_NOTE_RELAYS: case NDB_DBS: return 0; case NDB_DB_PROFILE_PK: @@ -1484,6 +1488,7 @@ static int ndb_db_is_index(enum ndb_dbs index) case NDB_DB_NOTE_TAGS: case NDB_DB_NOTE_PUBKEY: case NDB_DB_NOTE_PUBKEY_KIND: + case NDB_DB_NOTE_RELAY_KIND: return 1; } @@ -1499,6 +1504,125 @@ static inline void ndb_id_u64_ts_init(struct ndb_id_u64_ts *key, key->timestamp = timestamp; } +// formats the relay url buffer for the NDB_DB_NOTE_RELAYS value. It's a +// null terminated string padded to 8 bytes (we must keep the entire database +// aligned to 8 bytes at all times) +static int prepare_relay_buf(char *relay_buf, int bufsize, const char *relay, + int relay_len) +{ + struct cursor cur; + + // make sure the size of the buffer is aligned + assert((bufsize % 8) == 0); + + make_cursor((unsigned char *)relay_buf, (unsigned char *)relay_buf + bufsize, &cur); + + // push the relay string + if (!cursor_push(&cur, (unsigned char *)relay, relay_len)) + return 0; + + // relay urls are null terminated for convenience + if (!cursor_push_byte(&cur, 0)) + return 0; + + // align the buffer + if (!cursor_align(&cur, 8)) + return 0; + + return cur.p - cur.start; +} + +// Write to the note_id -> relay_url database. This records where notes +// have been seen +static int ndb_write_note_relay(struct ndb_txn *txn, uint64_t note_key, + const char *relay, int relay_len) +{ + char relay_buf[256]; + int rc, len; + MDB_val k, v; + + if (relay == NULL || relay_len == 0) + return 0; + + if (!(len = prepare_relay_buf(relay_buf, sizeof(relay_buf), relay, relay_len))) { + fprintf(stderr, "relay url '%s' too large when writing note relay index\n", relay); + return 0; + } + + assert((len % 8) == 0); + + k.mv_data = ¬e_key; + k.mv_size = sizeof(note_key); + + v.mv_data = relay_buf; + v.mv_size = len; + + // NODUPDATA is specified so that we don't accidently add duplicate + // key/value pairs + if ((rc = mdb_put(txn->mdb_txn, txn->lmdb->dbs[NDB_DB_NOTE_RELAYS], + &k, &v, MDB_NODUPDATA))) + { + ndb_debug("ndb_write_note_relay failed: %s\n", mdb_strerror(rc)); + return 0; + } + + return 1; +} + +static int ndb_write_note_relay_kind_index(struct ndb_txn *txn, + uint64_t kind, + uint64_t note_key, + uint64_t created_at, + const char *relay, + int relay_len) +{ + // The relay kind key has a layout like so + // + // - note_key: 00 + 8 bytes + // - kind: 08 + 8 bytes + // - created_at: 16 + 8 bytes + // - relay_url_size: 24 + 1 byte + // - relay_url: 25 + n byte null-terminated string + // - pad to 8 byte alignment + + unsigned char buf[256]; + int rc; + struct cursor cur; + MDB_val k, v; + + // come on bro + if (relay_len > 248) + return 0; + + if (relay == NULL || relay_len == 0) + return 0; + + make_cursor(buf, buf + sizeof(buf), &cur); + + if (!cursor_push(&cur, (unsigned char *)¬e_key, 8)) return 0; + if (!cursor_push(&cur, (unsigned char *)&kind, 8)) return 0; + if (!cursor_push(&cur, (unsigned char *)&created_at, 8)) return 0; + if (!cursor_push_byte(&cur, (uint8_t)relay_len)) return 0; + if (!cursor_push(&cur, (unsigned char *)relay, relay_len)) return 0; + if (!cursor_align(&cur, 8)) return 0; + + assert(((cur.p-cur.start)%8) == 0); + + k.mv_data = cur.start; + k.mv_size = cur.p - cur.start; + + v.mv_data = NULL; + v.mv_size = 0; + + if ((rc = mdb_put(txn->mdb_txn, txn->lmdb->dbs[NDB_DB_NOTE_RELAY_KIND], &k, &v, 0))) { + fprintf(stderr, "write note relay kind index failed: %s\n", + mdb_strerror(rc)); + return 0; + } + + return 1; +} + static int ndb_write_note_pubkey_index(struct ndb_txn *txn, struct ndb_note *note, uint64_t note_key) { @@ -1601,6 +1725,7 @@ static int ndb_rebuild_note_indices(struct ndb_txn *txn, enum ndb_dbs *indices, case NDB_DB_NDB_META: case NDB_DB_PROFILE_SEARCH: case NDB_DB_PROFILE_LAST_FETCH: + case NDB_DB_NOTE_RELAYS: case NDB_DBS: // this should never happen since we check at // the start @@ -1620,6 +1745,9 @@ static int ndb_rebuild_note_indices(struct ndb_txn *txn, enum ndb_dbs *indices, goto cleanup; } break; + case NDB_DB_NOTE_RELAY_KIND: + fprintf(stderr, "it doesn't make sense to rebuild note relay kind index\n"); + return 0; case NDB_DB_NOTE_PUBKEY_KIND: if (!ndb_write_note_pubkey_kind_index(txn, note, note_key)) { count = -1; @@ -1817,14 +1945,23 @@ enum ndb_ingester_msgtype { }; struct ndb_ingester_event { + const char *relay; char *json; unsigned client : 1; // ["EVENT", {...}] messages unsigned len : 31; }; +struct ndb_writer_note_relay { + const char *relay; + uint64_t note_key; + uint64_t kind; + uint64_t created_at; +}; + struct ndb_writer_note { struct ndb_note *note; size_t note_len; + const char *relay; }; struct ndb_writer_profile { @@ -1862,6 +1999,7 @@ struct ndb_writer_blocks { struct ndb_writer_msg { enum ndb_writer_msgtype type; union { + struct ndb_writer_note_relay note_relay; struct ndb_writer_note note; struct ndb_writer_profile profile; struct ndb_writer_ndb_meta ndb_meta; @@ -2075,6 +2213,7 @@ static int ndb_cursor_start(MDB_cursor *cur, MDB_val *k, MDB_val *v) return 1; } + // get some value based on a clustered id key int ndb_get_tsid(struct ndb_txn *txn, enum ndb_dbs db, const unsigned char *id, MDB_val *val) @@ -2241,9 +2380,10 @@ static enum ndb_idres ndb_ingester_json_controller(void *data, const char *hexid hex_decode(hexid, 64, id, sizeof(id)); // let's see if we already have it - ndb_txn_from_mdb(&txn, c->lmdb, c->read_txn); - if (!ndb_has_note(&txn, id)) + c->note = ndb_get_note_by_id(&txn, id, NULL, &c->note_key); + + if (c->note == NULL) return NDB_IDRES_CONT; return NDB_IDRES_STOP; @@ -2330,7 +2470,8 @@ int ndb_process_profile_note(struct ndb_note *note, } static int ndb_ingester_queue_event(struct ndb_ingester *ingester, - char *json, unsigned len, unsigned client) + char *json, unsigned len, + unsigned client, const char *relay) { struct ndb_ingester_msg msg; msg.type = NDB_INGEST_EVENT; @@ -2338,14 +2479,22 @@ static int ndb_ingester_queue_event(struct ndb_ingester *ingester, msg.event.json = json; msg.event.len = len; msg.event.client = client; + msg.event.relay = relay; return threadpool_dispatch(&ingester->tp, &msg); } +void ndb_ingest_meta_init(struct ndb_ingest_meta *meta, unsigned client, const char *relay) +{ + meta->client = client; + meta->relay = relay; +} static int ndb_ingest_event(struct ndb_ingester *ingester, const char *json, - int len, unsigned client) + int len, struct ndb_ingest_meta *meta) { + const char *relay = meta->relay; + // Without this, we get bus errors in the json parser inside when // trying to ingest empty kind 6 reposts... we should probably do fuzz // testing on inputs to the json parser @@ -2362,7 +2511,13 @@ static int ndb_ingest_event(struct ndb_ingester *ingester, const char *json, if (json_copy == NULL) return 0; - return ndb_ingester_queue_event(ingester, json_copy, len, client); + if (relay != NULL) { + relay = strdup(meta->relay); + if (relay == NULL) + return 0; + } + + return ndb_ingester_queue_event(ingester, json_copy, len, meta->client, relay); } @@ -2370,9 +2525,12 @@ static int ndb_ingester_process_note(secp256k1_context *ctx, struct ndb_note *note, size_t note_size, struct ndb_writer_msg *out, - struct ndb_ingester *ingester) + struct ndb_ingester *ingester, + const char *relay) { enum ndb_ingest_filter_action action; + struct ndb_ingest_meta meta; + action = NDB_INGEST_ACCEPT; if (ingester->filter) @@ -2412,24 +2570,81 @@ static int ndb_ingester_process_note(secp256k1_context *ctx, } else if (note->kind == 6) { // process the repost if we have a repost event ndb_debug("processing kind 6 repost\n"); + // dup the relay string + ndb_ingest_meta_init(&meta, 0, relay); ndb_ingest_event(ingester, ndb_note_content(note), - ndb_note_content_length(note), 0); + ndb_note_content_length(note), + &meta); } out->type = NDB_WRITER_NOTE; out->note.note = note; out->note.note_len = note_size; + out->note.relay = relay; return 1; } +int ndb_note_seen_on_relay(struct ndb_txn *txn, uint64_t note_key, const char *relay) +{ + MDB_val k, v; + MDB_cursor *cur; + int rc, len; + char relay_buf[256]; + + if (relay == NULL) + return 0; + + len = strlen(relay); + + if (!(len = prepare_relay_buf(relay_buf, sizeof(relay_buf), relay, len))) + return 0; + + assert((len % 8) == 0); + + k.mv_data = ¬e_key; + k.mv_size = sizeof(note_key); + + v.mv_data = relay_buf; + v.mv_size = len; + + if ((rc = mdb_cursor_open(txn->mdb_txn, txn->lmdb->dbs[NDB_DB_NOTE_RELAYS], &cur)) != MDB_SUCCESS) + return 0; + + rc = mdb_cursor_get(cur, &k, &v, MDB_GET_BOTH); + mdb_cursor_close(cur); + + return rc == MDB_SUCCESS; +} + +// process the relay for the note. this is called when we already have the +// note in the database but still need to check if the relay needs to be +// written to the relay indexes for corresponding note +static int ndb_process_note_relay(struct ndb_txn *txn, struct ndb_writer_msg *out, + uint64_t note_key, struct ndb_note *note, + const char *relay) +{ + // query to see if we already have the relay on this note + if (ndb_note_seen_on_relay(txn, note_key, relay)) { + return 0; + } + + // if not, tell the writer thread to emit a NOTE_RELAY event + out->type = NDB_WRITER_NOTE_RELAY; + + out->note_relay.relay = relay; + out->note_relay.note_key = note_key; + out->note_relay.kind = ndb_note_kind(note); + out->note_relay.created_at = ndb_note_created_at(note); + + return 1; +} static int ndb_ingester_process_event(secp256k1_context *ctx, struct ndb_ingester *ingester, struct ndb_ingester_event *ev, struct ndb_writer_msg *out, - MDB_txn *read_txn - ) + MDB_txn *read_txn) { struct ndb_tce tce; struct ndb_fce fce; @@ -2463,10 +2678,29 @@ static int ndb_ingester_process_event(secp256k1_context *ctx, ndb_client_event_from_json(ev->json, ev->len, &fce, buf, bufsize, &cb) : ndb_ws_event_from_json(ev->json, ev->len, &tce, buf, bufsize, &cb); + // This is a result from our special json parser. It parsed the id + // and found that we already have it in the database if ((int)note_size == -42) { - // we already have this! - //ndb_debug("already have id??\n"); - goto cleanup; + assert(controller.note != NULL); + assert(controller.note_key != 0); + struct ndb_txn txn; + ndb_txn_from_mdb(&txn, ingester->lmdb, read_txn); + + // we still need to process the relays on the note even + // if we already have it + if (ev->relay && ndb_process_note_relay(&txn, out, + controller.note_key, + controller.note, + ev->relay)) + { + // free note buf here since we don't pass the note to the writer thread + free(buf); + goto success; + } else { + // we already have the note and there are no new + // relays to process. nothing to write. + goto cleanup; + } } else if (note_size == 0) { ndb_debug("failed to parse '%.*s'\n", ev->len, ev->json); goto cleanup; @@ -2484,13 +2718,12 @@ static int ndb_ingester_process_event(secp256k1_context *ctx, } if (!ndb_ingester_process_note(ctx, note, note_size, - out, ingester)) { + out, ingester, + ev->relay)) { ndb_debug("failed to process note\n"); goto cleanup; } else { - // we're done with the original json, free it - free(ev->json); - return 1; + goto success; } } } else { @@ -2507,20 +2740,26 @@ static int ndb_ingester_process_event(secp256k1_context *ctx, } if (!ndb_ingester_process_note(ctx, note, note_size, - out, ingester)) { + out, ingester, + ev->relay)) { ndb_debug("failed to process note\n"); goto cleanup; } else { - // we're done with the original json, free it - free(ev->json); - return 1; + goto success; } } } +success: + free(ev->json); + // we don't free relay or buf since those are passed to the writer thread + return 1; + cleanup: free(ev->json); + if (ev->relay) + free((void*)ev->relay); free(buf); return ok; @@ -2628,6 +2867,68 @@ int ndb_search_profile_next(struct ndb_search *search) return 1; } +// +// The relay kind index has a layout like so (so we don't need dupsort) +// +// - note_id: 00 + 8 bytes +// - kind: 08 + 8 bytes +// - created_at: 16 + 8 bytes +// - relay_url_size: 24 + 1 byte +// - relay_url: 25 + n byte null-terminated string +// - pad to 8 byte alignment +// +// The key sort order is: +// +// relay_url, kind, created_at +// +static int ndb_relay_kind_cmp(const MDB_val *a, const MDB_val *b) +{ + int cmp; + MDB_val va, vb; + uint64_t iva, ivb; + unsigned char *ad = (unsigned char *)a->mv_data; + unsigned char *bd = (unsigned char *)b->mv_data; + assert(((uint64_t)a->mv_data % 8) == 0); + + va.mv_size = *(ad + 24); + va.mv_data = ad + 25; + + vb.mv_size = *(bd + 24); + vb.mv_data = bd + 25; + + cmp = mdb_cmp_memn(&va, &vb); + if (cmp) return cmp; + + // kind + iva = *(uint64_t*)(ad + 8); + ivb = *(uint64_t*)(bd + 8); + + if (iva < ivb) + return -1; + else if (iva > ivb) + return 1; + + // created_at + iva = *(uint64_t*)(ad + 16); + ivb = *(uint64_t*)(bd + 16); + + if (iva < ivb) + return -1; + else if (iva > ivb) + return 1; + + // note_id (so we don't need dupsort logic) + iva = *(uint64_t*)ad; + ivb = *(uint64_t*)bd; + + if (iva < ivb) + return -1; + else if (iva > ivb) + return 1; + + return 0; +} + static int ndb_search_key_cmp(const MDB_val *a, const MDB_val *b) { int cmp; @@ -4356,7 +4657,7 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, unsigned char *scratch, size_t scratch_size, uint32_t ndb_flags) { - int rc; + int rc, relay_len = 0; uint64_t note_key, kind; MDB_dbi note_db; MDB_val key, val; @@ -4384,11 +4685,18 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, return 0; } + if (note->relay != NULL) + relay_len = strlen(note->relay); + ndb_write_note_id_index(txn, note->note, note_key); ndb_write_note_kind_index(txn, note->note, note_key); ndb_write_note_tag_index(txn, note->note, note_key); ndb_write_note_pubkey_index(txn, note->note, note_key); ndb_write_note_pubkey_kind_index(txn, note->note, note_key); + ndb_write_note_relay_kind_index(txn, kind, note_key, + ndb_note_created_at(note->note), + note->relay, relay_len); + ndb_write_note_relay(txn, note_key, note->relay, relay_len); // only parse content and do fulltext index on text and longform notes if (kind == 1 || kind == 30023) { @@ -4562,7 +4870,7 @@ static void *ndb_writer_thread(void *data) struct ndb_writer *writer = data; struct ndb_writer_msg msgs[THREAD_QUEUE_BATCH], *msg; struct written_note written_notes[THREAD_QUEUE_BATCH]; - int i, popped, done, needs_commit, num_notes; + int i, popped, done, relay_len, needs_commit, num_notes; uint64_t note_nkey; struct ndb_txn txn; unsigned char *scratch; @@ -4590,6 +4898,7 @@ static void *ndb_writer_thread(void *data) case NDB_WRITER_PROFILE_LAST_FETCH: needs_commit = 1; break; case NDB_WRITER_BLOCKS: needs_commit = 1; break; case NDB_WRITER_MIGRATE: needs_commit = 1; break; + case NDB_WRITER_NOTE_RELAY: needs_commit = 1; break; case NDB_WRITER_QUIT: break; } } @@ -4643,6 +4952,20 @@ static void *ndb_writer_thread(void *data) }; } break; + case NDB_WRITER_NOTE_RELAY: + relay_len = strlen(msg->note_relay.relay); + ndb_write_note_relay(&txn, + msg->note_relay.note_key, + msg->note_relay.relay, + relay_len); + ndb_write_note_relay_kind_index( + &txn, + msg->note_relay.kind, + msg->note_relay.note_key, + msg->note_relay.created_at, + msg->note_relay.relay, + relay_len); + break; case NDB_WRITER_DBMETA: ndb_write_version(&txn, msg->ndb_meta.version); break; @@ -4683,11 +5006,15 @@ static void *ndb_writer_thread(void *data) msg = &msgs[i]; if (msg->type == NDB_WRITER_NOTE) { free(msg->note.note); + if (msg->note.relay) + free((void*)msg->note.relay); } else if (msg->type == NDB_WRITER_PROFILE) { free(msg->profile.note.note); //ndb_profile_record_builder_free(&msg->profile.record); - } else if (msg->type == NDB_WRITER_BLOCKS) { + } else if (msg->type == NDB_WRITER_BLOCKS) { ndb_blocks_free(msg->blocks.blocks); + } else if (msg->type == NDB_WRITER_NOTE_RELAY) { + free((void*)msg->note_relay.relay); } } } @@ -4928,6 +5255,20 @@ static int ndb_init_lmdb(const char *filename, struct ndb_lmdb *lmdb, size_t map return 0; } + // relay kind index. maps primary keys to relay records + // see ndb_relay_kind_cmp function for more details on the key format + if ((rc = mdb_dbi_open(txn, "relay_kind", MDB_CREATE, &lmdb->dbs[NDB_DB_NOTE_RELAY_KIND]))) { + fprintf(stderr, "mdb_dbi_open profile last fetch, error %d\n", rc); + return 0; + } + mdb_set_compare(txn, lmdb->dbs[NDB_DB_NOTE_RELAY_KIND], ndb_relay_kind_cmp); + + // note_id -> relay index + if ((rc = mdb_dbi_open(txn, "note_relays", MDB_CREATE | MDB_DUPSORT | MDB_DUPFIXED, &lmdb->dbs[NDB_DB_NOTE_RELAYS]))) { + fprintf(stderr, "mdb_dbi_open profile last fetch, error %d\n", rc); + return 0; + } + // id+ts index flags unsigned int tsid_flags = MDB_CREATE | MDB_DUPSORT | MDB_DUPFIXED; @@ -5109,6 +5450,7 @@ void ndb_destroy(struct ndb *ndb) free(ndb); } + // Process a nostr event from a client // // ie: ["EVENT", {"content":"..."} ...] @@ -5116,7 +5458,10 @@ void ndb_destroy(struct ndb *ndb) // The client-sent variation of ndb_process_event int ndb_process_client_event(struct ndb *ndb, const char *json, int len) { - return ndb_ingest_event(&ndb->ingester, json, len, 1); + struct ndb_ingest_meta meta; + ndb_ingest_meta_init(&meta, 1, NULL); + + return ndb_ingest_event(&ndb->ingester, json, len, &meta); } // Process anostr event from a relay, @@ -5138,25 +5483,32 @@ int ndb_process_client_event(struct ndb *ndb, const char *json, int len) // int ndb_process_event(struct ndb *ndb, const char *json, int json_len) { - return ndb_ingest_event(&ndb->ingester, json, json_len, 0); + struct ndb_ingest_meta meta; + ndb_ingest_meta_init(&meta, 0, NULL); + + return ndb_ingest_event(&ndb->ingester, json, json_len, &meta); } +int ndb_process_event_with(struct ndb *ndb, const char *json, int json_len, + struct ndb_ingest_meta *meta) +{ + return ndb_ingest_event(&ndb->ingester, json, json_len, meta); +} -int _ndb_process_events(struct ndb *ndb, const char *ldjson, size_t json_len, int client) +int _ndb_process_events(struct ndb *ndb, const char *ldjson, size_t json_len, + struct ndb_ingest_meta *meta) { const char *start, *end, *very_end; start = ldjson; end = start + json_len; very_end = ldjson + json_len; - int (* process)(struct ndb *, const char *, int); #if DEBUG int processed = 0; #endif - process = client ? ndb_process_client_event : ndb_process_event; while ((end = fast_strchr(start, '\n', very_end - start))) { //printf("processing '%.*s'\n", (int)(end-start), start); - if (!process(ndb, start, end - start)) { + if (!ndb_process_event_with(ndb, start, end - start, meta)) { ndb_debug("ndb_process_client_event failed\n"); return 0; } @@ -5194,14 +5546,26 @@ int ndb_process_events_stream(struct ndb *ndb, FILE* fp) } #endif +int ndb_process_events_with(struct ndb *ndb, const char *ldjson, size_t json_len, + struct ndb_ingest_meta *meta) +{ + return _ndb_process_events(ndb, ldjson, json_len, meta); +} + int ndb_process_client_events(struct ndb *ndb, const char *ldjson, size_t json_len) { - return _ndb_process_events(ndb, ldjson, json_len, 1); + struct ndb_ingest_meta meta; + ndb_ingest_meta_init(&meta, 1, NULL); + + return _ndb_process_events(ndb, ldjson, json_len, &meta); } int ndb_process_events(struct ndb *ndb, const char *ldjson, size_t json_len) { - return _ndb_process_events(ndb, ldjson, json_len, 0); + struct ndb_ingest_meta meta; + ndb_ingest_meta_init(&meta, 0, NULL); + + return _ndb_process_events(ndb, ldjson, json_len, &meta); } static inline int cursor_push_tag(struct cursor *cur, struct ndb_tag *tag) @@ -7086,6 +7450,10 @@ const char *ndb_db_name(enum ndb_dbs db) return "note_pubkey_index"; case NDB_DB_NOTE_PUBKEY_KIND: return "note_pubkey_kind_index"; + case NDB_DB_NOTE_RELAY_KIND: + return "note_relay_kind_index"; + case NDB_DB_NOTE_RELAYS: + return "note_relays"; case NDB_DBS: return "count"; } diff --git a/nostrdb/src/nostrdb.h b/nostrdb/src/nostrdb.h index ac9839084..7eb3ba3bd 100644 --- a/nostrdb/src/nostrdb.h +++ b/nostrdb/src/nostrdb.h @@ -55,6 +55,11 @@ struct ndb_str { }; }; +struct ndb_ingest_meta { + unsigned client; + const char *relay; +}; + struct ndb_keypair { unsigned char pubkey[32]; unsigned char secret[32]; @@ -189,6 +194,8 @@ enum ndb_dbs { NDB_DB_NOTE_TAGS, // note tags index NDB_DB_NOTE_PUBKEY, // note pubkey index NDB_DB_NOTE_PUBKEY_KIND, // note pubkey kind index + NDB_DB_NOTE_RELAY_KIND, // relay+kind+created -> note_id + NDB_DB_NOTE_RELAYS, // note_id -> relays NDB_DBS, }; @@ -475,14 +482,23 @@ int ndb_note_verify(void *secp_ctx, unsigned char pubkey[32], unsigned char id[3 // NDB int ndb_init(struct ndb **ndb, const char *dbdir, const struct ndb_config *); int ndb_db_version(struct ndb_txn *txn); + +// NOTE PROCESSING int ndb_process_event(struct ndb *, const char *json, int len); +void ndb_ingest_meta_init(struct ndb_ingest_meta *meta, unsigned client, const char *relay); +// Process an event, recording the relay where it came from. +int ndb_process_event_with(struct ndb *, const char *json, int len, struct ndb_ingest_meta *meta); int ndb_process_events(struct ndb *, const char *ldjson, size_t len); +int ndb_process_events_with(struct ndb *ndb, const char *ldjson, size_t json_len, struct ndb_ingest_meta *meta); #ifndef _WIN32 // TODO: fix on windows int ndb_process_events_stream(struct ndb *, FILE* fp); #endif +// deprecated: use ndb_ingest_event_with int ndb_process_client_event(struct ndb *, const char *json, int len); +// deprecated: use ndb_ingest_events_with int ndb_process_client_events(struct ndb *, const char *json, size_t len); + int ndb_begin_query(struct ndb *, struct ndb_txn *); int ndb_search_profile(struct ndb_txn *txn, struct ndb_search *search, const char *query); int ndb_search_profile_next(struct ndb_search *search); @@ -497,6 +513,7 @@ uint64_t ndb_get_profilekey_by_pubkey(struct ndb_txn *txn, const unsigned char * struct ndb_note *ndb_get_note_by_id(struct ndb_txn *txn, const unsigned char *id, size_t *len, uint64_t *primkey); struct ndb_note *ndb_get_note_by_key(struct ndb_txn *txn, uint64_t key, size_t *len); void *ndb_get_note_meta(struct ndb_txn *txn, const unsigned char *id, size_t *len); +int ndb_note_seen_on_relay(struct ndb_txn *txn, uint64_t note_key, const char *relay); void ndb_destroy(struct ndb *); // BUILDER From a877a19c25fc158d547209f848df3facb74b34cc Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 20 Mar 2025 13:37:23 -0700 Subject: [PATCH 30/91] nostrdb: relay: add note relay iteration This is a simple cursor that walks the NDB_DB_NOTE_RELAYS db Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 51 +++++++++++++++++++++++++++++++++++++++++++ nostrdb/src/nostrdb.h | 12 ++++++++++ 2 files changed, 63 insertions(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 0e423fad5..de8f811d9 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -7328,6 +7328,57 @@ struct ndb_note * ndb_note_from_bytes(unsigned char *bytes) return note; } +int ndb_note_relay_iterate_start(struct ndb_txn *txn, + struct ndb_note_relay_iterator *iter, + uint64_t note_key) +{ + if (mdb_cursor_open(txn->mdb_txn, txn->lmdb->dbs[NDB_DB_NOTE_RELAYS], + (MDB_cursor**)&iter->mdb_cur)) { + return 0; + } + + iter->txn = txn; + iter->cursor_op = MDB_SET_KEY; + iter->note_key = note_key; + + return 1; +} + +const char *ndb_note_relay_iterate_next(struct ndb_note_relay_iterator *iter) +{ + int rc; + MDB_val k, v; + + if (iter->mdb_cur == NULL) + return NULL; + + k.mv_data = &iter->note_key; + k.mv_size = sizeof(iter->note_key); + + if ((rc = mdb_cursor_get((MDB_cursor *)iter->mdb_cur, &k, &v, + (MDB_cursor_op)iter->cursor_op))) + { + //fprintf(stderr, "autoclosing %d '%s'\n", iter->cursor_op, mdb_strerror(rc)); + // autoclose + ndb_note_relay_iterate_close(iter); + return NULL; + } + + iter->cursor_op = MDB_NEXT_DUP; + + return (const char*)v.mv_data; +} + +void ndb_note_relay_iterate_close(struct ndb_note_relay_iterator *iter) +{ + if (!iter || iter->mdb_cur == NULL) + return; + + mdb_cursor_close((MDB_cursor*)iter->mdb_cur); + + iter->mdb_cur = NULL; +} + void ndb_tags_iterate_start(struct ndb_note *note, struct ndb_iterator *iter) { iter->note = note; diff --git a/nostrdb/src/nostrdb.h b/nostrdb/src/nostrdb.h index 7eb3ba3bd..13707fb3c 100644 --- a/nostrdb/src/nostrdb.h +++ b/nostrdb/src/nostrdb.h @@ -229,6 +229,13 @@ struct ndb_builder { struct ndb_tag *current_tag; }; +struct ndb_note_relay_iterator { + struct ndb_txn *txn; + uint64_t note_key; + int cursor_op; + void *mdb_cur; +}; + struct ndb_iterator { struct ndb_note *note; struct ndb_tag *tag; @@ -615,6 +622,11 @@ int ndb_tags_iterate_next(struct ndb_iterator *iter); struct ndb_str ndb_iter_tag_str(struct ndb_iterator *iter, int ind); struct ndb_str ndb_tag_str(struct ndb_note *note, struct ndb_tag *tag, int ind); +// RELAY ITER +int ndb_note_relay_iterate_start(struct ndb_txn *txn, struct ndb_note_relay_iterator *iter, uint64_t note_key); +const char *ndb_note_relay_iterate_next(struct ndb_note_relay_iterator *iter); +void ndb_note_relay_iterate_close(struct ndb_note_relay_iterator *iter); + // NAMES const char *ndb_db_name(enum ndb_dbs db); const char *ndb_kind_name(enum ndb_common_kind ck); From 5961bf7958493e0a3c6f4b59397f1fedac3abeec Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 20 Mar 2025 14:34:50 -0700 Subject: [PATCH 31/91] nostrdb: ndb: add print-relay-kind-index-keys for debugging Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index de8f811d9..a07f213e6 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -7176,6 +7176,28 @@ void ndb_config_set_ingest_filter(struct ndb_config *config, config->filter_context = filter_ctx; } +int ndb_print_relay_kind_index(struct ndb_txn *txn) +{ + MDB_cursor *cur; + MDB_val k, v; + int i; + + if (mdb_cursor_open(txn->mdb_txn, txn->lmdb->dbs[NDB_DB_NOTE_RELAY_KIND], &cur)) + return 0; + + i = 1; + printf("relay\tkind\tcreated_at\tnote_id\n"); + while (mdb_cursor_get(cur, &k, &v, MDB_NEXT) == 0) { + printf("%s\t", (const char *)(k.mv_data + 25)); + printf("%" PRIu64 "\t", *(uint64_t*)(k.mv_data + 8)); + printf("%" PRIu64 "\t", *(uint64_t*)(k.mv_data + 16)); + printf("%" PRIu64 "\n", *(uint64_t*)(k.mv_data + 0)); + i++; + } + + return i; +} + int ndb_print_tag_index(struct ndb_txn *txn) { MDB_cursor *cur; From c6674199deffa9fde49485486ed04583662c6c07 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 20 Mar 2025 15:04:11 -0700 Subject: [PATCH 32/91] nostrdb: win: fix build on windows --- nostrdb/src/nostrdb.c | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index a07f213e6..657e90d8f 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -7179,6 +7179,7 @@ void ndb_config_set_ingest_filter(struct ndb_config *config, int ndb_print_relay_kind_index(struct ndb_txn *txn) { MDB_cursor *cur; + unsigned char *d; MDB_val k, v; int i; @@ -7188,10 +7189,11 @@ int ndb_print_relay_kind_index(struct ndb_txn *txn) i = 1; printf("relay\tkind\tcreated_at\tnote_id\n"); while (mdb_cursor_get(cur, &k, &v, MDB_NEXT) == 0) { - printf("%s\t", (const char *)(k.mv_data + 25)); - printf("%" PRIu64 "\t", *(uint64_t*)(k.mv_data + 8)); - printf("%" PRIu64 "\t", *(uint64_t*)(k.mv_data + 16)); - printf("%" PRIu64 "\n", *(uint64_t*)(k.mv_data + 0)); + d = (unsigned char *)k.mv_data; + printf("%s\t", (const char *)(d + 25)); + printf("%" PRIu64 "\t", *(uint64_t*)(d + 8)); + printf("%" PRIu64 "\t", *(uint64_t*)(d + 16)); + printf("%" PRIu64 "\n", *(uint64_t*)(d + 0)); i++; } From c29027ff5ba262c7083cf356b6585a4ee714c1ec Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 21 Mar 2025 12:52:29 -0700 Subject: [PATCH 33/91] nostrdb: note: always write relay index This fixes a race condition where if multiple of the same note is processed at the same time, we still manage to write the note relays Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 657e90d8f..5cb922881 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -4665,8 +4665,14 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, kind = note->note->kind; // let's quickly sanity check if we already have this note - if (ndb_get_notekey_by_id(txn, note->note->id)) + if (ndb_get_notekey_by_id(txn, note->note->id)) { + // even if we do we still need to write relay index + ndb_write_note_relay_kind_index(txn, kind, note_key, + ndb_note_created_at(note->note), + note->relay, relay_len); + ndb_write_note_relay(txn, note_key, note->relay, relay_len); return 0; + } // get dbs note_db = txn->lmdb->dbs[NDB_DB_NOTE]; From f0521ba406d2bc8d95afffe1ab68e298038e196a Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 21 Mar 2025 13:27:37 -0700 Subject: [PATCH 34/91] nostrdb: relay-index: fix a few bugs There were a few race conditions and lmdb bugs in the relay index implementation. Fix those! Signed-off-by: William Casarin --- nostrdb/src/content_parser.c | 4 +++- nostrdb/src/cursor.h | 1 - nostrdb/src/nostrdb.c | 22 ++++++++++++++++------ 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/nostrdb/src/content_parser.c b/nostrdb/src/content_parser.c index ca0e358f7..a8352db12 100644 --- a/nostrdb/src/content_parser.c +++ b/nostrdb/src/content_parser.c @@ -24,8 +24,10 @@ struct ndb_content_parser { static int parse_digit(struct cursor *cur, int *digit) { int c; - if ((c = peek_char(cur, 0)) == -1) + if ((c = peek_char(cur, 0)) == -1) { + *digit = 0; return 0; + } c -= '0'; diff --git a/nostrdb/src/cursor.h b/nostrdb/src/cursor.h index 3d8fc1e5d..e24ff16eb 100644 --- a/nostrdb/src/cursor.h +++ b/nostrdb/src/cursor.h @@ -715,7 +715,6 @@ static inline int cursor_align(struct cursor *cur, int bytes) { // pad to n-byte alignment pad = ((size + (bytes-1)) & ~(bytes-1)) - size; - if (pad > 0 && !cursor_memset(cur, 0, pad)) return 0; diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 5cb922881..15d7001e2 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -1541,8 +1541,12 @@ static int ndb_write_note_relay(struct ndb_txn *txn, uint64_t note_key, int rc, len; MDB_val k, v; - if (relay == NULL || relay_len == 0) + if (relay == NULL || relay_len == 0) { + ndb_debug("relay is NULL in ndb_write_note_relay? '%s' %d\n", relay, relay_len); return 0; + } + + ndb_debug("writing note_relay '%s' for notekey:%" PRIu64 "\n", relay, note_key); if (!(len = prepare_relay_buf(relay_buf, sizeof(relay_buf), relay, relay_len))) { fprintf(stderr, "relay url '%s' too large when writing note relay index\n", relay); @@ -1566,6 +1570,8 @@ static int ndb_write_note_relay(struct ndb_txn *txn, uint64_t note_key, return 0; } + ndb_debug("wrote %d bytes to note relay: '%s'\n", len, relay_buf); + return 1; } @@ -1597,6 +1603,8 @@ static int ndb_write_note_relay_kind_index(struct ndb_txn *txn, if (relay == NULL || relay_len == 0) return 0; + ndb_debug("writing note_relay_kind_index '%s' for notekey:%" PRIu64 "\n", relay, note_key); + make_cursor(buf, buf + sizeof(buf), &cur); if (!cursor_push(&cur, (unsigned char *)¬e_key, 8)) return 0; @@ -1604,6 +1612,7 @@ static int ndb_write_note_relay_kind_index(struct ndb_txn *txn, if (!cursor_push(&cur, (unsigned char *)&created_at, 8)) return 0; if (!cursor_push_byte(&cur, (uint8_t)relay_len)) return 0; if (!cursor_push(&cur, (unsigned char *)relay, relay_len)) return 0; + if (!cursor_push_byte(&cur, 0)) return 0; if (!cursor_align(&cur, 8)) return 0; assert(((cur.p-cur.start)%8) == 0); @@ -2612,6 +2621,7 @@ int ndb_note_seen_on_relay(struct ndb_txn *txn, uint64_t note_key, const char *r return 0; rc = mdb_cursor_get(cur, &k, &v, MDB_GET_BOTH); + ndb_debug("seen_on_relay result: %s\n", mdb_strerror(rc)); mdb_cursor_close(cur); return rc == MDB_SUCCESS; @@ -4664,6 +4674,9 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, kind = note->note->kind; + if (note->relay != NULL) + relay_len = strlen(note->relay); + // let's quickly sanity check if we already have this note if (ndb_get_notekey_by_id(txn, note->note->id)) { // even if we do we still need to write relay index @@ -4691,9 +4704,6 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, return 0; } - if (note->relay != NULL) - relay_len = strlen(note->relay); - ndb_write_note_id_index(txn, note->note, note_key); ndb_write_note_kind_index(txn, note->note, note_key); ndb_write_note_tag_index(txn, note->note, note_key); @@ -5270,7 +5280,7 @@ static int ndb_init_lmdb(const char *filename, struct ndb_lmdb *lmdb, size_t map mdb_set_compare(txn, lmdb->dbs[NDB_DB_NOTE_RELAY_KIND], ndb_relay_kind_cmp); // note_id -> relay index - if ((rc = mdb_dbi_open(txn, "note_relays", MDB_CREATE | MDB_DUPSORT | MDB_DUPFIXED, &lmdb->dbs[NDB_DB_NOTE_RELAYS]))) { + if ((rc = mdb_dbi_open(txn, "note_relays", MDB_CREATE | MDB_DUPSORT, &lmdb->dbs[NDB_DB_NOTE_RELAYS]))) { fprintf(stderr, "mdb_dbi_open profile last fetch, error %d\n", rc); return 0; } @@ -7196,7 +7206,7 @@ int ndb_print_relay_kind_index(struct ndb_txn *txn) printf("relay\tkind\tcreated_at\tnote_id\n"); while (mdb_cursor_get(cur, &k, &v, MDB_NEXT) == 0) { d = (unsigned char *)k.mv_data; - printf("%s\t", (const char *)(d + 25)); + printf("'%s'\t", (const char *)(d + 25)); printf("%" PRIu64 "\t", *(uint64_t*)(d + 8)); printf("%" PRIu64 "\t", *(uint64_t*)(d + 16)); printf("%" PRIu64 "\n", *(uint64_t*)(d + 0)); From e42b14cc6f42188b42e8f1e85ab3db4a9b8fc775 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 21 Mar 2025 14:46:26 -0700 Subject: [PATCH 35/91] nostrdb: debug: add a print for debugging rust integration Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 1 + 1 file changed, 1 insertion(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 15d7001e2..2ac9ee759 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -2642,6 +2642,7 @@ static int ndb_process_note_relay(struct ndb_txn *txn, struct ndb_writer_msg *ou // if not, tell the writer thread to emit a NOTE_RELAY event out->type = NDB_WRITER_NOTE_RELAY; + ndb_debug("pushing NDB_WRITER_NOTE_RELAY with note_key %" PRIu64 "\n", note_key); out->note_relay.relay = relay; out->note_relay.note_key = note_key; out->note_relay.kind = ndb_note_kind(note); From a0b85129d4af71e52e56af3eeb2c2f01cf67414d Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 21 Mar 2025 14:53:59 -0700 Subject: [PATCH 36/91] nostrdb: relay: fix race condition bug Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 2ac9ee759..f6eaf92a6 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -4679,7 +4679,7 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, relay_len = strlen(note->relay); // let's quickly sanity check if we already have this note - if (ndb_get_notekey_by_id(txn, note->note->id)) { + if ((note_key = ndb_get_notekey_by_id(txn, note->note->id))) { // even if we do we still need to write relay index ndb_write_note_relay_kind_index(txn, kind, note_key, ndb_note_created_at(note->note), From af2298dcb7f9fe9df115ad224f5142134e72dafc Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 21 Mar 2025 15:39:01 -0700 Subject: [PATCH 37/91] nostrdb: relay: fix potential relay index corruption Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index f6eaf92a6..662aea77f 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -1973,6 +1973,13 @@ struct ndb_writer_note { const char *relay; }; +static void ndb_writer_note_init(struct ndb_writer_note *writer_note, struct ndb_note *note, size_t note_len, const char *relay) +{ + writer_note->note = note; + writer_note->note_len = note_len; + writer_note->relay = relay; +} + struct ndb_writer_profile { struct ndb_writer_note note; struct ndb_profile_record_builder record; @@ -2073,8 +2080,7 @@ static int ndb_migrate_utf8_profile_names(struct ndb_txn *txn) copied_note = malloc(len); memcpy(copied_note, note, len); - profile.note.note = copied_note; - profile.note.note_len = len; + ndb_writer_note_init(&profile.note, copied_note, len, NULL); // we don't pass in flags when migrating... a bit sketchy but // whatever. noone is using this to customize nostrdb atm @@ -2573,8 +2579,7 @@ static int ndb_ingester_process_note(secp256k1_context *ctx, ndb_process_profile_note(note, b); out->type = NDB_WRITER_PROFILE; - out->profile.note.note = note; - out->profile.note.note_len = note_size; + ndb_writer_note_init(&out->profile.note, note, note_size, relay); return 1; } else if (note->kind == 6) { // process the repost if we have a repost event @@ -2587,9 +2592,7 @@ static int ndb_ingester_process_note(secp256k1_context *ctx, } out->type = NDB_WRITER_NOTE; - out->note.note = note; - out->note.note_len = note_size; - out->note.relay = relay; + ndb_writer_note_init(&out->note, note, note_size, relay); return 1; } From 0f66e87fafa3678decc10c56cb33a7baffbfd724 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 21 Mar 2025 21:02:14 -0700 Subject: [PATCH 38/91] nostrdb: Relay queries Add support for relay-based filtering in nostr queries. Filters can now include a "relays" field. Optimal performance when you include a kind as well: {"relays":["wss://pyramid.fiatjaf.com/"], "kinds":[1]} This corresponds to a `ndb` query like so: $ ndb query -r wss://pyramid.fiatjaf.com/ -k 1 -l 1 using filter '{"relays":["wss://pyramid.fiatjaf.com/"],"kinds":[1],"limit":1}' 1 results in 0.094929 ms {"id":"277dd4ed26d0b44576..} Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 463 +++++++++++++++++++++++++++++++++++------- nostrdb/src/nostrdb.h | 2 + 2 files changed, 387 insertions(+), 78 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 662aea77f..1982824e4 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -238,6 +238,7 @@ enum ndb_query_plan { NDB_PLAN_CREATED, NDB_PLAN_TAGS, NDB_PLAN_SEARCH, + NDB_PLAN_RELAY_KINDS, }; // A id + u64 + timestamp @@ -751,6 +752,7 @@ static const char *ndb_filter_field_name(enum ndb_filter_fieldtype field) case NDB_FILTER_UNTIL: return "until"; case NDB_FILTER_LIMIT: return "limit"; case NDB_FILTER_SEARCH: return "search"; + case NDB_FILTER_RELAYS: return "relays"; } return "unknown"; @@ -862,6 +864,15 @@ static int ndb_filter_add_element(struct ndb_filter *filter, union ndb_filter_el } // push a pointer of the string in the databuf as an element break; + case NDB_FILTER_RELAYS: + if (current->field.elem_type != NDB_ELEMENT_STRING) { + return 0; + } + if (!cursor_push(&filter->data_buf, (unsigned char *)el.string.string, el.string.len)) + return 0; + if (!cursor_push_byte(&filter->data_buf, 0)) + return 0; + break; } if (!cursor_push(&filter->elem_buf, (unsigned char *)&offset, @@ -920,6 +931,7 @@ int ndb_filter_add_str_element_len(struct ndb_filter *filter, const char *str, i return 0; } break; + case NDB_FILTER_RELAYS: case NDB_FILTER_TAGS: break; } @@ -950,6 +962,7 @@ int ndb_filter_add_int_element(struct ndb_filter *filter, uint64_t integer) case NDB_FILTER_AUTHORS: case NDB_FILTER_TAGS: case NDB_FILTER_SEARCH: + case NDB_FILTER_RELAYS: return 0; case NDB_FILTER_KINDS: case NDB_FILTER_SINCE: @@ -981,6 +994,7 @@ int ndb_filter_add_id_element(struct ndb_filter *filter, const unsigned char *id case NDB_FILTER_LIMIT: case NDB_FILTER_KINDS: case NDB_FILTER_SEARCH: + case NDB_FILTER_RELAYS: return 0; case NDB_FILTER_IDS: case NDB_FILTER_AUTHORS: @@ -1086,6 +1100,31 @@ static int compare_ids(const void *pa, const void *pb) return memcmp(a, b, 32); } +static int compare_strs(const void *pa, const void *pb) +{ + const char *a = *(const char **)pa; + const char *b = *(const char **)pb; + + return strcmp(a, b); +} + +static int search_strs(const void *ctx, const void *mstr_ptr) +{ + // we reuse search_id_state here and just cast to (const char *) when + // needed + struct search_id_state *state; + const char *mstr_str; + uint32_t mstr; + + state = (struct search_id_state *)ctx; + mstr = *(uint32_t *)mstr_ptr; + + mstr_str = (const char *)ndb_filter_elements_data(state->filter, mstr); + assert(mstr_str); + + return strcmp((const char *)state->key, mstr_str); +} + static int search_ids(const void *ctx, const void *mid_ptr) { struct search_id_state *state; @@ -1120,7 +1159,8 @@ static int compare_kinds(const void *pa, const void *pb) // // returns 1 if a filter matches a note static int ndb_filter_matches_with(struct ndb_filter *filter, - struct ndb_note *note, int already_matched) + struct ndb_note *note, int already_matched, + struct ndb_note_relay_iterator *relay_iter) { int i, j; struct ndb_filter_elements *els; @@ -1139,11 +1179,27 @@ static int ndb_filter_matches_with(struct ndb_filter *filter, continue; switch (els->field.type) { - case NDB_FILTER_KINDS: - for (j = 0; j < els->count; j++) { - if ((unsigned int)els->elements[j] == note->kind) + case NDB_FILTER_KINDS: + for (j = 0; j < els->count; j++) { + if ((unsigned int)els->elements[j] == note->kind) + goto cont; + } + break; + case NDB_FILTER_RELAYS: + // for each relay the note was seen on, see if any match + if (!relay_iter) { + assert(!"expected relay iterator..."); + break; + } + while ((state.key = (unsigned char *)ndb_note_relay_iterate_next(relay_iter))) { + // relays in filters are always sorted + if (bsearch(&state, &els->elements[0], els->count, + sizeof(els->elements[0]), search_strs)) { + ndb_note_relay_iterate_close(relay_iter); goto cont; + } } + ndb_note_relay_iterate_close(relay_iter); break; case NDB_FILTER_IDS: state.key = ndb_note_id(note); @@ -1201,7 +1257,14 @@ static int ndb_filter_matches_with(struct ndb_filter *filter, int ndb_filter_matches(struct ndb_filter *filter, struct ndb_note *note) { - return ndb_filter_matches_with(filter, note, 0); + return ndb_filter_matches_with(filter, note, 0, NULL); +} + +int ndb_filter_matches_with_relay(struct ndb_filter *filter, + struct ndb_note *note, + struct ndb_note_relay_iterator *note_relay_iter) +{ + return ndb_filter_matches_with(filter, note, 0, note_relay_iter); } // because elements are stored as offsets and qsort doesn't support context, @@ -1298,6 +1361,9 @@ void ndb_filter_end_field(struct ndb_filter *filter) case NDB_FILTER_AUTHORS: sort_filter_elements(filter, cur, compare_ids); break; + case NDB_FILTER_RELAYS: + sort_filter_elements(filter, cur, compare_strs); + break; case NDB_FILTER_KINDS: qsort(&cur->elements[0], cur->count, sizeof(cur->elements[0]), compare_kinds); @@ -1535,7 +1601,7 @@ static int prepare_relay_buf(char *relay_buf, int bufsize, const char *relay, // Write to the note_id -> relay_url database. This records where notes // have been seen static int ndb_write_note_relay(struct ndb_txn *txn, uint64_t note_key, - const char *relay, int relay_len) + const char *relay, uint8_t relay_len) { char relay_buf[256]; int rc, len; @@ -1575,12 +1641,90 @@ static int ndb_write_note_relay(struct ndb_txn *txn, uint64_t note_key, return 1; } -static int ndb_write_note_relay_kind_index(struct ndb_txn *txn, - uint64_t kind, - uint64_t note_key, - uint64_t created_at, - const char *relay, - int relay_len) +struct ndb_relay_kind_key { + uint64_t note_key; + uint64_t kind; + uint64_t created_at; + uint8_t relay_len; + const char *relay; +}; + +static int ndb_relay_kind_key_init( + struct ndb_relay_kind_key *key, + uint64_t note_key, + uint64_t kind, + uint64_t created_at, + const char *relay) +{ + if (relay == NULL) + return 0; + + key->relay = relay; + key->relay_len = strlen(relay); + if (key->relay_len > 248) + return 0; + + key->note_key = note_key; + key->kind = kind; + key->created_at = created_at; + return 1; +} + + +// create a range key for a relay kind query +static int ndb_relay_kind_key_init_high( + struct ndb_relay_kind_key *key, + const char *relay, + uint64_t kind, + uint64_t until) +{ + return ndb_relay_kind_key_init(key, UINT64_MAX, kind, UINT64_MAX, relay); +} + +static void ndb_parse_relay_kind_key(struct ndb_relay_kind_key *key, unsigned char *buf) +{ + // WE ARE ASSUMING WE ARE PARSING FROM AN ALIGNED BUFFER HERE + assert((uint64_t)buf % 8 == 0); + // - note_id: 00 + 8 bytes + // - kind: 08 + 8 bytes + // - created_at: 16 + 8 bytes + // - relay_url_size: 24 + 1 byte + // - relay_url: 25 + n byte null-terminated string + // - pad to 8 byte alignment + key->note_key = *(uint64_t*) (buf + 0); + key->kind = *(uint64_t*) (buf + 8); + key->created_at = *(uint64_t*) (buf + 16); + key->relay_len = *(uint8_t*) (buf + 24); + key->relay = (const char*) (buf + 25); +} + +static void ndb_debug_relay_kind_key(struct ndb_relay_kind_key *key) +{ + ndb_debug("note_key:%" PRIu64 " kind:%" PRIu64 " created_at:%" PRIu64 " '%s'\n", + key->note_key, key->kind, key->created_at, key->relay); +} + +static int ndb_build_relay_kind_key(unsigned char *buf, int bufsize, struct ndb_relay_kind_key *key) +{ + struct cursor cur; + make_cursor(buf, buf + bufsize, &cur); + + if (!cursor_push(&cur, (unsigned char *)&key->note_key, 8)) return 0; + if (!cursor_push(&cur, (unsigned char *)&key->kind, 8)) return 0; + if (!cursor_push(&cur, (unsigned char *)&key->created_at, 8)) return 0; + if (!cursor_push_byte(&cur, key->relay_len)) return 0; + if (!cursor_push(&cur, (unsigned char *)key->relay, key->relay_len)) return 0; + if (!cursor_push_byte(&cur, 0)) return 0; + if (!cursor_align(&cur, 8)) return 0; + + assert(((cur.p-cur.start)%8) == 0); + + return cur.p - cur.start; +} + +static int ndb_write_note_relay_kind_index( + struct ndb_txn *txn, + struct ndb_relay_kind_key *key) { // The relay kind key has a layout like so // @@ -1592,33 +1736,20 @@ static int ndb_write_note_relay_kind_index(struct ndb_txn *txn, // - pad to 8 byte alignment unsigned char buf[256]; - int rc; - struct cursor cur; + int rc, len; MDB_val k, v; // come on bro - if (relay_len > 248) - return 0; - - if (relay == NULL || relay_len == 0) + if (key->relay_len > 248 || key->relay == NULL || key->relay_len == 0) return 0; - ndb_debug("writing note_relay_kind_index '%s' for notekey:%" PRIu64 "\n", relay, note_key); - - make_cursor(buf, buf + sizeof(buf), &cur); - - if (!cursor_push(&cur, (unsigned char *)¬e_key, 8)) return 0; - if (!cursor_push(&cur, (unsigned char *)&kind, 8)) return 0; - if (!cursor_push(&cur, (unsigned char *)&created_at, 8)) return 0; - if (!cursor_push_byte(&cur, (uint8_t)relay_len)) return 0; - if (!cursor_push(&cur, (unsigned char *)relay, relay_len)) return 0; - if (!cursor_push_byte(&cur, 0)) return 0; - if (!cursor_align(&cur, 8)) return 0; + ndb_debug("writing note_relay_kind_index '%s' for notekey:%" PRIu64 "\n", key->relay, key->note_key); - assert(((cur.p-cur.start)%8) == 0); + if (!(len = ndb_build_relay_kind_key(buf, sizeof(buf), key))) + return 0; - k.mv_data = cur.start; - k.mv_size = cur.p - cur.start; + k.mv_data = buf; + k.mv_size = len; v.mv_data = NULL; v.mv_size = 0; @@ -1632,6 +1763,14 @@ static int ndb_write_note_relay_kind_index(struct ndb_txn *txn, return 1; } +// writes the relay note kind index and the note_id -> relay db +static int ndb_write_note_relay_indexes(struct ndb_txn *txn, struct ndb_relay_kind_key *key) +{ + ndb_write_note_relay_kind_index(txn, key); + ndb_write_note_relay(txn, key->note_key, key->relay, key->relay_len); + return 1; +} + static int ndb_write_note_pubkey_index(struct ndb_txn *txn, struct ndb_note *note, uint64_t note_key) { @@ -2212,17 +2351,23 @@ int ndb_write_last_profile_fetch(struct ndb *ndb, const unsigned char *pubkey, // after the first element, so we have to go back one. static int ndb_cursor_start(MDB_cursor *cur, MDB_val *k, MDB_val *v) { + int rc; // Position cursor at the next key greater than or equal to the // specified key - if (mdb_cursor_get(cur, k, v, MDB_SET_RANGE)) { + + if ((rc = mdb_cursor_get(cur, k, v, MDB_SET_RANGE))) { // Failed :(. It could be the last element? - if (mdb_cursor_get(cur, k, v, MDB_LAST)) + if ((rc = mdb_cursor_get(cur, k, v, MDB_LAST))) { + ndb_debug("MDB_LAST failed: '%s'\n", mdb_strerror(rc)); return 0; + } } else { // if set range worked and our key exists, it should be // the one right before this one - if (mdb_cursor_get(cur, k, v, MDB_PREV)) + if ((rc = mdb_cursor_get(cur, k, v, MDB_PREV))) { + ndb_debug("moving back failed: '%s'\n", mdb_strerror(rc)); return 0; + } } return 1; @@ -3415,7 +3560,7 @@ static int ndb_query_plan_execute_ids(struct ndb_txn *txn, MDB_cursor *cur; MDB_dbi db; MDB_val k, v; - int rc, i; + int rc, i, need_relays = 0; struct ndb_filter_elements *ids; struct ndb_note *note; struct ndb_query_result res; @@ -3423,6 +3568,7 @@ static int ndb_query_plan_execute_ids(struct ndb_txn *txn, uint64_t note_id, until, *pint; size_t note_size; unsigned char *id; + struct ndb_note_relay_iterator note_relay_iter; until = UINT64_MAX; @@ -3432,6 +3578,9 @@ static int ndb_query_plan_execute_ids(struct ndb_txn *txn, if ((pint = ndb_filter_get_int(filter, NDB_FILTER_UNTIL))) until = *pint; + if (ndb_filter_find_elements(filter, NDB_FILTER_RELAYS)) + need_relays = 1; + db = txn->lmdb->dbs[NDB_DB_NOTE_ID]; if ((rc = mdb_cursor_open(txn->mdb_txn, db, &cur))) return 0; @@ -3460,12 +3609,16 @@ static int ndb_query_plan_execute_ids(struct ndb_txn *txn, if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) continue; + if (need_relays) + ndb_note_relay_iterate_start(txn, ¬e_relay_iter, note_id); + // Sure this particular lookup matched the index query, but // does it match the entire filter? Check! We also pass in // things we've already matched via the filter so we don't have // to check again. This can be pretty important for filters // with a large number of entries. - if (!ndb_filter_matches_with(filter, note, 1 << NDB_FILTER_IDS)) + if (!ndb_filter_matches_with(filter, note, 1 << NDB_FILTER_IDS, + need_relays ? ¬e_relay_iter : NULL)) continue; ndb_query_result_init(&res, note, note_size, note_id); @@ -3521,7 +3674,7 @@ static int ndb_query_plan_execute_authors(struct ndb_txn *txn, { MDB_val k, v; MDB_cursor *cur; - int rc, i; + int rc, i, need_relays = 0; uint64_t *pint, until, since, note_key; unsigned char *author; struct ndb_note *note; @@ -3529,6 +3682,7 @@ static int ndb_query_plan_execute_authors(struct ndb_txn *txn, struct ndb_filter_elements *authors; struct ndb_query_result res; struct ndb_tsid tsid, *ptsid; + struct ndb_note_relay_iterator note_relay_iter; enum ndb_dbs db; db = txn->lmdb->dbs[NDB_DB_NOTE_PUBKEY]; @@ -3544,6 +3698,9 @@ static int ndb_query_plan_execute_authors(struct ndb_txn *txn, if ((pint = ndb_filter_get_int(filter, NDB_FILTER_SINCE))) since = *pint; + if (ndb_filter_find_elements(filter, NDB_FILTER_RELAYS)) + need_relays = 1; + if ((rc = mdb_cursor_open(txn->mdb_txn, db, &cur))) return 0; @@ -3576,8 +3733,15 @@ static int ndb_query_plan_execute_authors(struct ndb_txn *txn, if (!(note = ndb_get_note_by_key(txn, note_key, ¬e_size))) goto next; - if (!ndb_filter_matches_with(filter, note, 1 << NDB_FILTER_AUTHORS)) + if (need_relays) + ndb_note_relay_iterate_start(txn, ¬e_relay_iter, note_key); + + if (!ndb_filter_matches_with(filter, note, + 1 << NDB_FILTER_AUTHORS, + need_relays ? ¬e_relay_iter : NULL)) + { goto next; + } ndb_query_result_init(&res, note, note_size, note_key); if (!push_query_result(results, &res)) @@ -3601,12 +3765,13 @@ static int ndb_query_plan_execute_created_at(struct ndb_txn *txn, MDB_dbi db; MDB_val k, v; MDB_cursor *cur; - int rc; + int rc, need_relays = 0; struct ndb_note *note; struct ndb_tsid key, *pkey; uint64_t *pint, until, since, note_id; size_t note_size; struct ndb_query_result res; + struct ndb_note_relay_iterator note_relay_iter; unsigned char high_key[32] = {0xFF}; db = txn->lmdb->dbs[NDB_DB_NOTE_ID]; @@ -3619,6 +3784,9 @@ static int ndb_query_plan_execute_created_at(struct ndb_txn *txn, if ((pint = ndb_filter_get_int(filter, NDB_FILTER_SINCE))) since = *pint; + if (ndb_filter_find_elements(filter, NDB_FILTER_RELAYS)) + need_relays = 1; + if ((rc = mdb_cursor_open(txn->mdb_txn, db, &cur))) return 0; @@ -3642,8 +3810,11 @@ static int ndb_query_plan_execute_created_at(struct ndb_txn *txn, if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) goto next; + if (need_relays) + ndb_note_relay_iterate_start(txn, ¬e_relay_iter, note_id); + // does this entry match our filter? - if (!ndb_filter_matches_with(filter, note, 0)) + if (!ndb_filter_matches_with(filter, note, 0, need_relays ? ¬e_relay_iter : NULL)) goto next; ndb_query_result_init(&res, note, (uint64_t)note_size, note_id); @@ -3666,7 +3837,7 @@ static int ndb_query_plan_execute_tags(struct ndb_txn *txn, MDB_cursor *cur; MDB_dbi db; MDB_val k, v; - int len, taglen, rc, i; + int len, taglen, rc, i, need_relays = 0; uint64_t *pint, until, note_id; size_t note_size; unsigned char key_buffer[255]; @@ -3674,12 +3845,16 @@ static int ndb_query_plan_execute_tags(struct ndb_txn *txn, struct ndb_filter_elements *tags; unsigned char *tag; struct ndb_query_result res; + struct ndb_note_relay_iterator note_relay_iter; db = txn->lmdb->dbs[NDB_DB_NOTE_TAGS]; if (!(tags = ndb_filter_find_elements(filter, NDB_FILTER_TAGS))) return 0; + if (ndb_filter_find_elements(filter, NDB_FILTER_RELAYS)) + need_relays = 1; + until = UINT64_MAX; if ((pint = ndb_filter_get_int(filter, NDB_FILTER_UNTIL))) until = *pint; @@ -3722,7 +3897,12 @@ static int ndb_query_plan_execute_tags(struct ndb_txn *txn, if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) goto next; - if (!ndb_filter_matches_with(filter, note, 1 << NDB_FILTER_TAGS)) + if (need_relays) + ndb_note_relay_iterate_start(txn, ¬e_relay_iter, note_id); + + if (!ndb_filter_matches_with(filter, note, + 1 << NDB_FILTER_TAGS, + need_relays ? ¬e_relay_iter : NULL)) goto next; ndb_query_result_init(&res, note, note_size, note_id); @@ -3739,6 +3919,120 @@ static int ndb_query_plan_execute_tags(struct ndb_txn *txn, return 1; } +static int ndb_query_plan_execute_relay_kinds( + struct ndb_txn *txn, + struct ndb_filter *filter, + struct ndb_query_results *results, + int limit) +{ + MDB_cursor *cur; + MDB_dbi db; + MDB_val k, v; + struct ndb_note *note; + struct ndb_filter_elements *kinds, *relays; + struct ndb_query_result res; + uint64_t kind, note_id, until, since, *pint; + size_t note_size; + const char *relay; + int i, j, rc, len; + struct ndb_relay_kind_key relay_key; + unsigned char keybuf[256]; + + // we should have kinds in a kinds filter! + if (!(kinds = ndb_filter_find_elements(filter, NDB_FILTER_KINDS))) + return 0; + + if (!(relays = ndb_filter_find_elements(filter, NDB_FILTER_RELAYS))) + return 0; + + until = UINT64_MAX; + if ((pint = ndb_filter_get_int(filter, NDB_FILTER_UNTIL))) + until = *pint; + + since = 0; + if ((pint = ndb_filter_get_int(filter, NDB_FILTER_SINCE))) + since = *pint; + + db = txn->lmdb->dbs[NDB_DB_NOTE_RELAY_KIND]; + + if ((rc = mdb_cursor_open(txn->mdb_txn, db, &cur))) + return 0; + + for (j = 0; j < relays->count; j++) { + if (query_is_full(results, limit)) + break; + + if (!(relay = ndb_filter_get_string_element(filter, relays, j))) + continue; + + for (i = 0; i < kinds->count; i++) { + if (query_is_full(results, limit)) + break; + + kind = kinds->elements[i]; + ndb_debug("kind %" PRIu64 "\n", kind); + + if (!ndb_relay_kind_key_init_high(&relay_key, relay, kind, until)) { + ndb_debug("ndb_relay_kind_key_init_high failed in relay query\n"); + continue; + } + + if (!(len = ndb_build_relay_kind_key(keybuf, sizeof(keybuf), &relay_key))) { + ndb_debug("ndb_build_relay_kind_key failed in relay query\n"); + ndb_debug_relay_kind_key(&relay_key); + continue; + } + + k.mv_data = keybuf; + k.mv_size = len; + + ndb_debug("starting with key "); + ndb_debug_relay_kind_key(&relay_key); + + if (!ndb_cursor_start(cur, &k, &v)) + continue; + + // scan the kind subindex + while (!query_is_full(results, limit)) { + ndb_parse_relay_kind_key(&relay_key, k.mv_data); + + ndb_debug("inside kind subindex "); + ndb_debug_relay_kind_key(&relay_key); + + if (relay_key.kind != kind) + break; + + if (strcmp(relay_key.relay, relay)) + break; + + // don't continue the scan if we're below `since` + if (relay_key.created_at < since) + break; + + note_id = relay_key.note_key; + if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) + goto next; + + if (!ndb_filter_matches_with(filter, note, + (1 << NDB_FILTER_KINDS) | (1 << NDB_FILTER_RELAYS), + NULL)) + goto next; + + ndb_query_result_init(&res, note, note_size, note_id); + if (!push_query_result(results, &res)) + break; + +next: + if (mdb_cursor_get(cur, &k, &v, MDB_PREV)) + break; + } + } + } + + mdb_cursor_close(cur); + return 1; +} + static int ndb_query_plan_execute_kinds(struct ndb_txn *txn, struct ndb_filter *filter, struct ndb_query_results *results, @@ -3753,12 +4047,16 @@ static int ndb_query_plan_execute_kinds(struct ndb_txn *txn, struct ndb_query_result res; uint64_t kind, note_id, until, since, *pint; size_t note_size; - int i, rc; + int i, rc, need_relays = 0; + struct ndb_note_relay_iterator note_relay_iter; // we should have kinds in a kinds filter! if (!(kinds = ndb_filter_find_elements(filter, NDB_FILTER_KINDS))) return 0; + if (ndb_filter_find_elements(filter, NDB_FILTER_RELAYS)) + need_relays = 1; + until = UINT64_MAX; if ((pint = ndb_filter_get_int(filter, NDB_FILTER_UNTIL))) until = *pint; @@ -3800,7 +4098,12 @@ static int ndb_query_plan_execute_kinds(struct ndb_txn *txn, if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) goto next; - if (!ndb_filter_matches_with(filter, note, 1 << NDB_FILTER_KINDS)) + if (need_relays) + ndb_note_relay_iterate_start(txn, ¬e_relay_iter, note_id); + + if (!ndb_filter_matches_with(filter, note, + 1 << NDB_FILTER_KINDS, + need_relays ? ¬e_relay_iter : NULL)) goto next; ndb_query_result_init(&res, note, note_size, note_id); @@ -3819,19 +4122,22 @@ static int ndb_query_plan_execute_kinds(struct ndb_txn *txn, static enum ndb_query_plan ndb_filter_plan(struct ndb_filter *filter) { - struct ndb_filter_elements *ids, *kinds, *authors, *tags, *search; + struct ndb_filter_elements *ids, *kinds, *authors, *tags, *search, *relays; ids = ndb_filter_find_elements(filter, NDB_FILTER_IDS); search = ndb_filter_find_elements(filter, NDB_FILTER_SEARCH); kinds = ndb_filter_find_elements(filter, NDB_FILTER_KINDS); authors = ndb_filter_find_elements(filter, NDB_FILTER_AUTHORS); tags = ndb_filter_find_elements(filter, NDB_FILTER_TAGS); + relays = ndb_filter_find_elements(filter, NDB_FILTER_RELAYS); // this is rougly similar to the heuristic in strfry's dbscan if (search) { return NDB_PLAN_SEARCH; } else if (ids) { return NDB_PLAN_IDS; + } else if (relays && kinds && !authors) { + return NDB_PLAN_RELAY_KINDS; } else if (kinds && authors && authors->count <= 10) { return NDB_PLAN_AUTHOR_KINDS; } else if (authors && authors->count <= 10) { @@ -3845,7 +4151,7 @@ static enum ndb_query_plan ndb_filter_plan(struct ndb_filter *filter) return NDB_PLAN_CREATED; } -static const char *ndb_query_plan_name(int plan_id) +static const char *ndb_query_plan_name(enum ndb_query_plan plan_id) { switch (plan_id) { case NDB_PLAN_IDS: return "ids"; @@ -3854,6 +4160,8 @@ static const char *ndb_query_plan_name(int plan_id) case NDB_PLAN_TAGS: return "tags"; case NDB_PLAN_CREATED: return "created"; case NDB_PLAN_AUTHORS: return "authors"; + case NDB_PLAN_RELAY_KINDS: return "relay_kinds"; + case NDB_PLAN_AUTHOR_KINDS: return "author_kinds"; } return "unknown"; @@ -3884,18 +4192,19 @@ static int ndb_query_filter(struct ndb_txn *txn, struct ndb_filter *filter, if (!ndb_query_plan_execute_ids(txn, filter, &results, limit)) return 0; break; - + case NDB_PLAN_RELAY_KINDS: + if (!ndb_query_plan_execute_relay_kinds(txn, filter, &results, limit)) + return 0; + break; case NDB_PLAN_SEARCH: if (!ndb_query_plan_execute_search(txn, filter, &results, limit)) return 0; break; - // We have just kinds, just scan the kind index case NDB_PLAN_KINDS: if (!ndb_query_plan_execute_kinds(txn, filter, &results, limit)) return 0; break; - case NDB_PLAN_TAGS: if (!ndb_query_plan_execute_tags(txn, filter, &results, limit)) return 0; @@ -4671,23 +4980,18 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, unsigned char *scratch, size_t scratch_size, uint32_t ndb_flags) { - int rc, relay_len = 0; + int rc; uint64_t note_key, kind; + struct ndb_relay_kind_key relay_key; MDB_dbi note_db; MDB_val key, val; kind = note->note->kind; - if (note->relay != NULL) - relay_len = strlen(note->relay); - // let's quickly sanity check if we already have this note if ((note_key = ndb_get_notekey_by_id(txn, note->note->id))) { - // even if we do we still need to write relay index - ndb_write_note_relay_kind_index(txn, kind, note_key, - ndb_note_created_at(note->note), - note->relay, relay_len); - ndb_write_note_relay(txn, note_key, note->relay, relay_len); + if (ndb_relay_kind_key_init(&relay_key, note_key, kind, ndb_note_created_at(note->note), note->relay)) + ndb_write_note_relay_indexes(txn, &relay_key); return 0; } @@ -4713,10 +5017,9 @@ static uint64_t ndb_write_note(struct ndb_txn *txn, ndb_write_note_tag_index(txn, note->note, note_key); ndb_write_note_pubkey_index(txn, note->note, note_key); ndb_write_note_pubkey_kind_index(txn, note->note, note_key); - ndb_write_note_relay_kind_index(txn, kind, note_key, - ndb_note_created_at(note->note), - note->relay, relay_len); - ndb_write_note_relay(txn, note_key, note->relay, relay_len); + + if (ndb_relay_kind_key_init(&relay_key, note_key, kind, ndb_note_created_at(note->note), note->relay)) + ndb_write_note_relay_indexes(txn, &relay_key); // only parse content and do fulltext index on text and longform notes if (kind == 1 || kind == 30023) { @@ -4890,7 +5193,7 @@ static void *ndb_writer_thread(void *data) struct ndb_writer *writer = data; struct ndb_writer_msg msgs[THREAD_QUEUE_BATCH], *msg; struct written_note written_notes[THREAD_QUEUE_BATCH]; - int i, popped, done, relay_len, needs_commit, num_notes; + int i, popped, done, needs_commit, num_notes; uint64_t note_nkey; struct ndb_txn txn; unsigned char *scratch; @@ -4973,18 +5276,15 @@ static void *ndb_writer_thread(void *data) } break; case NDB_WRITER_NOTE_RELAY: - relay_len = strlen(msg->note_relay.relay); - ndb_write_note_relay(&txn, - msg->note_relay.note_key, - msg->note_relay.relay, - relay_len); - ndb_write_note_relay_kind_index( - &txn, - msg->note_relay.kind, - msg->note_relay.note_key, - msg->note_relay.created_at, - msg->note_relay.relay, - relay_len); + struct ndb_relay_kind_key relay_key; + if (ndb_relay_kind_key_init(&relay_key, + msg->note_relay.note_key, + msg->note_relay.kind, + msg->note_relay.created_at, + msg->note_relay.relay)) + { + ndb_write_note_relay_indexes(&txn, &relay_key); + } break; case NDB_WRITER_DBMETA: ndb_write_version(&txn, msg->ndb_meta.version); @@ -5058,7 +5358,7 @@ static void *ndb_ingester_thread(void *data) int rc; ctx = secp256k1_context_create(SECP256K1_CONTEXT_VERIFY); - ndb_debug("started ingester thread\n"); + //ndb_debug("started ingester thread\n"); done = 0; while (!done) { @@ -6040,6 +6340,12 @@ int ndb_filter_json(const struct ndb_filter *filter, char *buf, int buflen) if (!cursor_push_int_str(c, ndb_filter_get_int_element(elems, 0))) return 0; break; + case NDB_FILTER_RELAYS: + if (!cursor_push_str(c, "\"relays\":")) + return 0; + if (!cursor_push_json_elem_array(c, filter, elems)) + return 0; + break; } if (i != filter->num_elements-1) { @@ -6868,6 +7174,7 @@ static int ndb_filter_parse_json(struct ndb_json_parser *parser, return 0; } break; + case NDB_FILTER_RELAYS: case NDB_FILTER_TAGS: if (!ndb_filter_parse_json_elems(parser, filter)) { ndb_debug("failed to parse filter tags\n"); @@ -7210,7 +7517,7 @@ int ndb_print_relay_kind_index(struct ndb_txn *txn) printf("relay\tkind\tcreated_at\tnote_id\n"); while (mdb_cursor_get(cur, &k, &v, MDB_NEXT) == 0) { d = (unsigned char *)k.mv_data; - printf("'%s'\t", (const char *)(d + 25)); + printf("%s\t", (const char *)(d + 25)); printf("%" PRIu64 "\t", *(uint64_t*)(d + 8)); printf("%" PRIu64 "\t", *(uint64_t*)(d + 16)); printf("%" PRIu64 "\n", *(uint64_t*)(d + 0)); diff --git a/nostrdb/src/nostrdb.h b/nostrdb/src/nostrdb.h index 13707fb3c..172d829e7 100644 --- a/nostrdb/src/nostrdb.h +++ b/nostrdb/src/nostrdb.h @@ -162,6 +162,7 @@ enum ndb_filter_fieldtype { NDB_FILTER_UNTIL = 6, NDB_FILTER_LIMIT = 7, NDB_FILTER_SEARCH = 8, + NDB_FILTER_RELAYS = 9, }; #define NDB_NUM_FILTERS 7 @@ -569,6 +570,7 @@ struct ndb_filter_elements *ndb_filter_get_elements(const struct ndb_filter *, i int ndb_filter_start_field(struct ndb_filter *, enum ndb_filter_fieldtype); int ndb_filter_start_tag_field(struct ndb_filter *, char tag); int ndb_filter_matches(struct ndb_filter *, struct ndb_note *); +int ndb_filter_matches_with_relay(struct ndb_filter *, struct ndb_note *, struct ndb_note_relay_iterator *iter); int ndb_filter_clone(struct ndb_filter *dst, struct ndb_filter *src); int ndb_filter_end(struct ndb_filter *); void ndb_filter_end_field(struct ndb_filter *); From c728210be8bc3c63d43d84bf9995603cf09dc301 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 23 Mar 2025 12:44:47 -0700 Subject: [PATCH 39/91] nostrdb: query: implement author_kind query plan This should help author kind query performance --- nostrdb/src/nostrdb.c | 135 ++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 131 insertions(+), 4 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 1982824e4..30f868761 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -2356,6 +2356,7 @@ static int ndb_cursor_start(MDB_cursor *cur, MDB_val *k, MDB_val *v) // specified key if ((rc = mdb_cursor_get(cur, k, v, MDB_SET_RANGE))) { + ndb_debug("MDB_SET_RANGE failed: '%s'\n", mdb_strerror(rc)); // Failed :(. It could be the last element? if ((rc = mdb_cursor_get(cur, k, v, MDB_LAST))) { ndb_debug("MDB_LAST failed: '%s'\n", mdb_strerror(rc)); @@ -3919,6 +3920,113 @@ static int ndb_query_plan_execute_tags(struct ndb_txn *txn, return 1; } +static int ndb_query_plan_execute_author_kinds( + struct ndb_txn *txn, + struct ndb_filter *filter, + struct ndb_query_results *results, + int limit) +{ + MDB_cursor *cur; + MDB_dbi db; + MDB_val k, v; + struct ndb_note *note; + struct ndb_filter_elements *kinds, *relays, *authors; + struct ndb_query_result res; + uint64_t kind, note_id, until, since, *pint; + size_t note_size; + unsigned char *author; + int i, j, rc; + struct ndb_id_u64_ts key, *pkey; + struct ndb_note_relay_iterator note_relay_iter; + + // we should have kinds in a kinds filter! + if (!(kinds = ndb_filter_find_elements(filter, NDB_FILTER_KINDS))) + return 0; + // + // we should have kinds in a kinds filter! + if (!(authors = ndb_filter_find_elements(filter, NDB_FILTER_AUTHORS))) + return 0; + + relays = ndb_filter_find_elements(filter, NDB_FILTER_RELAYS); + + until = UINT64_MAX; + if ((pint = ndb_filter_get_int(filter, NDB_FILTER_UNTIL))) + until = *pint; + + since = 0; + if ((pint = ndb_filter_get_int(filter, NDB_FILTER_SINCE))) + since = *pint; + + db = txn->lmdb->dbs[NDB_DB_NOTE_PUBKEY_KIND]; + + if ((rc = mdb_cursor_open(txn->mdb_txn, db, &cur))) + return 0; + + for (j = 0; j < authors->count; j++) { + if (query_is_full(results, limit)) + break; + + if (!(author = ndb_filter_get_id_element(filter, authors, j))) + continue; + + for (i = 0; i < kinds->count; i++) { + if (query_is_full(results, limit)) + break; + + kind = kinds->elements[i]; + + ndb_debug("finding kind %"PRIu64"\n", kind); + + ndb_id_u64_ts_init(&key, author, kind, until); + + k.mv_data = &key; + k.mv_size = sizeof(key); + + if (!ndb_cursor_start(cur, &k, &v)) + continue; + + // scan the kind subindex + while (!query_is_full(results, limit)) { + pkey = (struct ndb_id_u64_ts*)k.mv_data; + + ndb_debug("scanning subindex kind:%"PRIu64" created_at:%"PRIu64" pubkey:", + pkey->u64, + pkey->timestamp); + + if (pkey->u64 != kind) + break; + + // don't continue the scan if we're below `since` + if (pkey->timestamp < since) + break; + + note_id = *(uint64_t*)v.mv_data; + if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) + goto next; + + if (relays) + ndb_note_relay_iterate_start(txn, ¬e_relay_iter, note_id); + + if (!ndb_filter_matches_with(filter, note, + (1 << NDB_FILTER_KINDS) | (1 << NDB_FILTER_AUTHORS), + relays? ¬e_relay_iter : NULL)) + goto next; + + ndb_query_result_init(&res, note, note_size, note_id); + if (!push_query_result(results, &res)) + break; + +next: + if (mdb_cursor_get(cur, &k, &v, MDB_PREV)) + break; + } + } + } + + mdb_cursor_close(cur); + return 1; +} + static int ndb_query_plan_execute_relay_kinds( struct ndb_txn *txn, struct ndb_filter *filter, @@ -4218,12 +4326,8 @@ static int ndb_query_filter(struct ndb_txn *txn, struct ndb_filter *filter, return 0; break; case NDB_PLAN_AUTHOR_KINDS: - /* TODO: author kinds if (!ndb_query_plan_execute_author_kinds(txn, filter, &results, limit)) return 0; - */ - if (!ndb_query_plan_execute_authors(txn, filter, &results, limit)) - return 0; break; } @@ -7503,6 +7607,29 @@ void ndb_config_set_ingest_filter(struct ndb_config *config, config->filter_context = filter_ctx; } +int ndb_print_author_kind_index(struct ndb_txn *txn) +{ + MDB_cursor *cur; + struct ndb_id_u64_ts *key; + MDB_val k, v; + int i; + + if (mdb_cursor_open(txn->mdb_txn, txn->lmdb->dbs[NDB_DB_NOTE_PUBKEY_KIND], &cur)) + return 0; + + i = 1; + printf("author\tkind\tcreated_at\tnote_id\n"); + while (mdb_cursor_get(cur, &k, &v, MDB_NEXT) == 0) { + key = (struct ndb_id_u64_ts *)k.mv_data; + print_hex(key->id, 32); + printf("\t%" PRIu64 "\t%" PRIu64 "\t%" PRIu64 "\n", + key->u64, key->timestamp, *(uint64_t*)v.mv_data); + i++; + } + + return i; +} + int ndb_print_relay_kind_index(struct ndb_txn *txn) { MDB_cursor *cur; From 9cff8608f661c05767a7d8d128c3a554e572ac25 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 23 Mar 2025 18:41:45 -0700 Subject: [PATCH 40/91] nostrdb: fix build on macos --- nostrdb/src/nostrdb.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 30f868761..4f409cf00 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -5301,6 +5301,7 @@ static void *ndb_writer_thread(void *data) uint64_t note_nkey; struct ndb_txn txn; unsigned char *scratch; + struct ndb_relay_kind_key relay_key; // 2MB scratch buffer for parsing note content scratch = malloc(writer->scratch_size); @@ -5380,7 +5381,6 @@ static void *ndb_writer_thread(void *data) } break; case NDB_WRITER_NOTE_RELAY: - struct ndb_relay_kind_key relay_key; if (ndb_relay_kind_key_init(&relay_key, msg->note_relay.note_key, msg->note_relay.kind, From 0b8090cb287fb7cf440c3078e4321dc8272e2f19 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Sun, 30 Mar 2025 09:09:30 -0700 Subject: [PATCH 41/91] nostrdb: query: implement profile search query plans The basic idea of this is to allow you to use the standard nip50 query interface to search for profiles using our profile index. query: {"search":"jb55", "kinds":[0]} will result in a profile_search query plan that searches kind0 profiles for the corresponding `name` or `display_name`. Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 74 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 4f409cf00..0792a1700 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -239,6 +239,7 @@ enum ndb_query_plan { NDB_PLAN_TAGS, NDB_PLAN_SEARCH, NDB_PLAN_RELAY_KINDS, + NDB_PLAN_PROFILE_SEARCH, }; // A id + u64 + timestamp @@ -4027,6 +4028,67 @@ static int ndb_query_plan_execute_author_kinds( return 1; } +static int ndb_query_plan_execute_profile_search( + struct ndb_txn *txn, + struct ndb_filter *filter, + struct ndb_query_results *results, + int limit) +{ + const char *search; + int i; + + // The filter pubkey is updated inplace for each note search + unsigned char *filter_pubkey; + unsigned char pubkey[32] = {0}; + struct ndb_filter_elements *els; + struct ndb_search profile_search; + struct ndb_filter note_filter, *f = ¬e_filter; + + if (!(search = ndb_filter_find_search(filter))) + return 0; + + if (!ndb_filter_init_with(f, 1)) + return 0; + + ndb_filter_start_field(f, NDB_FILTER_KINDS); + ndb_filter_add_int_element(f, 0); + ndb_filter_end_field(f); + + ndb_filter_start_field(f, NDB_FILTER_AUTHORS); + ndb_filter_add_id_element(f, pubkey); + ndb_filter_end_field(f); + ndb_filter_end(f); + + // get the authors element after we finalize the filter, since + // the data could have moved + if (!(els = ndb_filter_find_elements(f, NDB_FILTER_AUTHORS))) + return 0; + + // grab pointer to pubkey in the filter so that we can + // update the filter as we go + if (!(filter_pubkey = ndb_filter_get_id_element(f, els, 0))) + return 0; + + for (i = 0; !query_is_full(results, limit); i++) { + if (i == 0) { + if (!ndb_search_profile(txn, &profile_search, search)) + break; + } else { + if (!ndb_search_profile_next(&profile_search)) + break; + } + + // Copy pubkey into filter + memcpy(filter_pubkey, profile_search.key->id, 32); + + // Look up the corresponding note associated with that pubkey + if (!ndb_query_plan_execute_author_kinds(txn, f, results, limit)) + return 0; + } + + return 1; +} + static int ndb_query_plan_execute_relay_kinds( struct ndb_txn *txn, struct ndb_filter *filter, @@ -4239,6 +4301,11 @@ static enum ndb_query_plan ndb_filter_plan(struct ndb_filter *filter) tags = ndb_filter_find_elements(filter, NDB_FILTER_TAGS); relays = ndb_filter_find_elements(filter, NDB_FILTER_RELAYS); + // profile search + if (kinds && kinds->count == 1 && kinds->elements[0] == 0 && search) { + return NDB_PLAN_PROFILE_SEARCH; + } + // this is rougly similar to the heuristic in strfry's dbscan if (search) { return NDB_PLAN_SEARCH; @@ -4270,6 +4337,7 @@ static const char *ndb_query_plan_name(enum ndb_query_plan plan_id) case NDB_PLAN_AUTHORS: return "authors"; case NDB_PLAN_RELAY_KINDS: return "relay_kinds"; case NDB_PLAN_AUTHOR_KINDS: return "author_kinds"; + case NDB_PLAN_PROFILE_SEARCH: return "profile_search"; } return "unknown"; @@ -4308,6 +4376,12 @@ static int ndb_query_filter(struct ndb_txn *txn, struct ndb_filter *filter, if (!ndb_query_plan_execute_search(txn, filter, &results, limit)) return 0; break; + + case NDB_PLAN_PROFILE_SEARCH: + if (!ndb_query_plan_execute_profile_search(txn, filter, &results, limit)) + return 0; + break; + // We have just kinds, just scan the kind index case NDB_PLAN_KINDS: if (!ndb_query_plan_execute_kinds(txn, filter, &results, limit)) From 64c16e7cc8b9256b41e03ca23a80c321bbf45573 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 8 Apr 2025 16:29:16 -0700 Subject: [PATCH 42/91] nostrdb: filter: add initial custom filtering logic This adds some helpers for adding custom filtering logic to nostr filters. These are just a callback and a closure. There can only be one custom callback filter per filter. Fixes: https://github.com/damus-io/nostrdb/issues/33 Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 74 +++++++++++++++++++++++++++++++++++++++++++ nostrdb/src/nostrdb.h | 15 ++++++++- 2 files changed, 88 insertions(+), 1 deletion(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 0792a1700..f24ecd7e1 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -671,6 +671,12 @@ ndb_filter_elements_data(const struct ndb_filter *filter, int offset) return data; } +struct ndb_filter_custom * +ndb_filter_get_custom_element(const struct ndb_filter *filter, const struct ndb_filter_elements *els) +{ + return (struct ndb_filter_custom *)ndb_filter_elements_data(filter, els->elements[0]); +} + unsigned char * ndb_filter_get_id_element(const struct ndb_filter *filter, const struct ndb_filter_elements *els, int index) { @@ -754,6 +760,7 @@ static const char *ndb_filter_field_name(enum ndb_filter_fieldtype field) case NDB_FILTER_LIMIT: return "limit"; case NDB_FILTER_SEARCH: return "search"; case NDB_FILTER_RELAYS: return "relays"; + case NDB_FILTER_CUSTOM: return "custom"; } return "unknown"; @@ -821,6 +828,10 @@ static int ndb_filter_add_element(struct ndb_filter *filter, union ndb_filter_el offset = filter->data_buf.p - filter->data_buf.start; switch (current->field.type) { + case NDB_FILTER_CUSTOM: + if (!cursor_push(&filter->data_buf, (unsigned char *)&el, sizeof(el))) + return 0; + break; case NDB_FILTER_IDS: case NDB_FILTER_AUTHORS: if (!cursor_push(&filter->data_buf, (unsigned char *)el.id, 32)) @@ -861,6 +872,7 @@ static int ndb_filter_add_element(struct ndb_filter *filter, union ndb_filter_el case NDB_ELEMENT_INT: // ints are not allowed in tag filters case NDB_ELEMENT_UNKNOWN: + case NDB_ELEMENT_CUSTOM: return 0; } // push a pointer of the string in the databuf as an element @@ -925,6 +937,7 @@ int ndb_filter_add_str_element_len(struct ndb_filter *filter, const char *str, i case NDB_FILTER_IDS: case NDB_FILTER_AUTHORS: case NDB_FILTER_KINDS: + case NDB_FILTER_CUSTOM: return 0; case NDB_FILTER_SEARCH: if (current->count == 1) { @@ -951,6 +964,41 @@ int ndb_filter_add_str_element(struct ndb_filter *filter, const char *str) return ndb_filter_add_str_element_len(filter, str, strlen(str)); } +int ndb_filter_add_custom_filter_element(struct ndb_filter *filter, ndb_filter_callback_fn *cb, void *ctx) +{ + union ndb_filter_element el; + struct ndb_filter_elements *current; + struct ndb_filter_custom custom; + + custom.cb = cb; + custom.ctx = ctx; + + if (!(current = ndb_filter_current_element(filter))) + return 0; + + switch (current->field.type) { + case NDB_FILTER_CUSTOM: + break; + case NDB_FILTER_IDS: + case NDB_FILTER_AUTHORS: + case NDB_FILTER_TAGS: + case NDB_FILTER_SEARCH: + case NDB_FILTER_RELAYS: + case NDB_FILTER_KINDS: + case NDB_FILTER_SINCE: + case NDB_FILTER_UNTIL: + case NDB_FILTER_LIMIT: + return 0; + } + + if (!ndb_filter_set_elem_type(filter, NDB_ELEMENT_CUSTOM)) + return 0; + + el.custom_filter = custom; + + return ndb_filter_add_element(filter, el); +} + int ndb_filter_add_int_element(struct ndb_filter *filter, uint64_t integer) { union ndb_filter_element el; @@ -964,6 +1012,7 @@ int ndb_filter_add_int_element(struct ndb_filter *filter, uint64_t integer) case NDB_FILTER_TAGS: case NDB_FILTER_SEARCH: case NDB_FILTER_RELAYS: + case NDB_FILTER_CUSTOM: return 0; case NDB_FILTER_KINDS: case NDB_FILTER_SINCE: @@ -996,6 +1045,7 @@ int ndb_filter_add_id_element(struct ndb_filter *filter, const unsigned char *id case NDB_FILTER_KINDS: case NDB_FILTER_SEARCH: case NDB_FILTER_RELAYS: + case NDB_FILTER_CUSTOM: return 0; case NDB_FILTER_IDS: case NDB_FILTER_AUTHORS: @@ -1079,6 +1129,7 @@ static int ndb_tag_filter_matches(struct ndb_filter *filter, case NDB_ELEMENT_INT: // int elements int tag queries are not supported case NDB_ELEMENT_UNKNOWN: + case NDB_ELEMENT_CUSTOM: return 0; } } @@ -1166,6 +1217,7 @@ static int ndb_filter_matches_with(struct ndb_filter *filter, int i, j; struct ndb_filter_elements *els; struct search_id_state state; + struct ndb_filter_custom *custom; state.filter = filter; @@ -1244,6 +1296,12 @@ static int ndb_filter_matches_with(struct ndb_filter *filter, // the search index will be walked for these kinds // of queries. continue; + case NDB_FILTER_CUSTOM: + custom = ndb_filter_get_custom_element(filter, els); + if (custom->cb(custom->ctx, note)) + continue; + break; + case NDB_FILTER_LIMIT: cont: continue; @@ -1297,6 +1355,7 @@ static int ndb_filter_field_eq(struct ndb_filter *a_filt, const char *a_str, *b_str; unsigned char *a_id, *b_id; uint64_t a_int, b_int; + struct ndb_filter_custom *a_custom, *b_custom; if (a_field->count != b_field->count) return 0; @@ -1318,6 +1377,11 @@ static int ndb_filter_field_eq(struct ndb_filter *a_filt, for (i = 0; i < a_field->count; i++) { switch (a_field->field.elem_type) { + case NDB_ELEMENT_CUSTOM: + a_custom = ndb_filter_get_custom_element(a_filt, a_field); + b_custom = ndb_filter_get_custom_element(b_filt, b_field); + if (memcmp(a_custom, b_custom, sizeof(*a_custom))) + return 0; case NDB_ELEMENT_UNKNOWN: return 0; case NDB_ELEMENT_STRING: @@ -1373,6 +1437,7 @@ void ndb_filter_end_field(struct ndb_filter *filter) // TODO: generic tag search sorting break; case NDB_FILTER_SINCE: + case NDB_FILTER_CUSTOM: case NDB_FILTER_UNTIL: case NDB_FILTER_LIMIT: case NDB_FILTER_SEARCH: @@ -6412,6 +6477,9 @@ static int cursor_push_json_elem_array(struct cursor *cur, for (i = 0; i < elems->count; i++) { switch (elems->field.elem_type) { + case NDB_ELEMENT_CUSTOM: + // can't serialize custom functions + break; case NDB_ELEMENT_STRING: str = ndb_filter_get_string_element(filter, elems, i); if (!cursor_push_jsonstr(cur, str)) @@ -6464,6 +6532,9 @@ int ndb_filter_json(const struct ndb_filter *filter, char *buf, int buflen) for (i = 0; i < filter->num_elements; i++) { elems = ndb_filter_get_elements(filter, i); switch (elems->field.type) { + case NDB_FILTER_CUSTOM: + // nothing to encode these as + break; case NDB_FILTER_IDS: if (!cursor_push_str(c, "\"ids\":")) return 0; @@ -7325,6 +7396,9 @@ static int ndb_filter_parse_json(struct ndb_json_parser *parser, // we parsed a top-level field switch(field) { + case NDB_FILTER_CUSTOM: + // can't really parse these yet + break; case NDB_FILTER_AUTHORS: case NDB_FILTER_IDS: if (!ndb_filter_parse_json_ids(parser, filter)) { diff --git a/nostrdb/src/nostrdb.h b/nostrdb/src/nostrdb.h index 172d829e7..fff4af144 100644 --- a/nostrdb/src/nostrdb.h +++ b/nostrdb/src/nostrdb.h @@ -2,6 +2,7 @@ #define NOSTRDB_H #include +#include #include "win.h" #include "cursor.h" @@ -48,6 +49,7 @@ struct ndb_t { }; struct ndb_str { + // NDB_PACKED_STR, NDB_PACKED_ID unsigned char flag; union { const char *str; @@ -163,8 +165,9 @@ enum ndb_filter_fieldtype { NDB_FILTER_LIMIT = 7, NDB_FILTER_SEARCH = 8, NDB_FILTER_RELAYS = 9, + NDB_FILTER_CUSTOM = 10, }; -#define NDB_NUM_FILTERS 7 +#define NDB_NUM_FILTERS 10 // when matching generic tags, we need to know if we're dealing with // a pointer to a 32-byte ID or a null terminated string @@ -173,6 +176,7 @@ enum ndb_generic_element_type { NDB_ELEMENT_STRING = 1, NDB_ELEMENT_ID = 2, NDB_ELEMENT_INT = 3, + NDB_ELEMENT_CUSTOM = 4, }; enum ndb_search_order { @@ -250,10 +254,18 @@ struct ndb_filter_string { int len; }; +typedef bool ndb_filter_callback_fn(void *, struct ndb_note *); + +struct ndb_filter_custom { + void *ctx; + ndb_filter_callback_fn *cb; +}; + union ndb_filter_element { struct ndb_filter_string string; const unsigned char *id; uint64_t integer; + struct ndb_filter_custom custom_filter; }; struct ndb_filter_field { @@ -551,6 +563,7 @@ int ndb_filter_init_with(struct ndb_filter *filter, int pages); int ndb_filter_add_id_element(struct ndb_filter *, const unsigned char *id); int ndb_filter_add_int_element(struct ndb_filter *, uint64_t integer); int ndb_filter_add_str_element(struct ndb_filter *, const char *str); +int ndb_filter_add_custom_filter_element(struct ndb_filter *filter, ndb_filter_callback_fn *cb, void *ctx); int ndb_filter_eq(const struct ndb_filter *, const struct ndb_filter *); /// is `a` a subset of `b` From 5c75e87ed5a3786dd8034445323db21d2020160a Mon Sep 17 00:00:00 2001 From: William Casarin Date: Wed, 9 Apr 2025 11:33:51 -0700 Subject: [PATCH 43/91] nostrdb: eq: fix fallthrough bug Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 1 + 1 file changed, 1 insertion(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index f24ecd7e1..9dd0e50ca 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -1382,6 +1382,7 @@ static int ndb_filter_field_eq(struct ndb_filter *a_filt, b_custom = ndb_filter_get_custom_element(b_filt, b_field); if (memcmp(a_custom, b_custom, sizeof(*a_custom))) return 0; + break; case NDB_ELEMENT_UNKNOWN: return 0; case NDB_ELEMENT_STRING: From 3a37a6c18e375be523b536b63c9c565cc87da3c7 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 29 Apr 2025 16:02:04 -0700 Subject: [PATCH 44/91] nostrdb: change <=10 author search queries to ==1 These queries are broken anyways. Rely on scans until we fix this Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 9dd0e50ca..f500533e4 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -4372,18 +4372,18 @@ static enum ndb_query_plan ndb_filter_plan(struct ndb_filter *filter) return NDB_PLAN_PROFILE_SEARCH; } - // this is rougly similar to the heuristic in strfry's dbscan + // TODO: fix multi-author queries if (search) { return NDB_PLAN_SEARCH; } else if (ids) { return NDB_PLAN_IDS; } else if (relays && kinds && !authors) { return NDB_PLAN_RELAY_KINDS; - } else if (kinds && authors && authors->count <= 10) { + } else if (kinds && authors && authors->count == 1) { return NDB_PLAN_AUTHOR_KINDS; - } else if (authors && authors->count <= 10) { + } else if (authors && authors->count == 1) { return NDB_PLAN_AUTHORS; - } else if (tags && tags->count <= 10) { + } else if (tags && tags->count == 1) { return NDB_PLAN_TAGS; } else if (kinds) { return NDB_PLAN_KINDS; From fa2d240ddf79d2ca57418b34a01b0bd44e55e8fa Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 24 Jun 2025 10:21:22 -0700 Subject: [PATCH 45/91] nostrdb: nostrdb: calculate id in ndb_note_verify Rogue relays could in theory attack nostrdb by replaying ids and signatures from other notes. This fixes this weakness by calculating the id again in ndb_note_verify. There is no known relays exploiting this, but lets get ahead of it before we switch to the outbox model in damus iOS/notedeck Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 58 +++++++++++++++++++++++++++++++++---------- nostrdb/src/nostrdb.h | 4 +-- 2 files changed, 47 insertions(+), 15 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index f500533e4..29044e213 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -185,6 +185,8 @@ struct ndb_ingester { struct prot_queue *writer_inbox; void *filter_context; ndb_ingest_filter_fn filter; + + int scratch_size; }; struct ndb_filter_group { @@ -2321,17 +2323,34 @@ int ndb_end_query(struct ndb_txn *txn) return mdb_txn_commit(txn->mdb_txn) == 0; } -int ndb_note_verify(void *ctx, unsigned char pubkey[32], unsigned char id[32], - unsigned char sig[64]) +int ndb_note_verify(void *ctx, unsigned char *scratch, size_t scratch_size, + struct ndb_note *note) { + unsigned char id[32]; secp256k1_xonly_pubkey xonly_pubkey; int ok; - ok = secp256k1_xonly_pubkey_parse((secp256k1_context*)ctx, &xonly_pubkey, - pubkey) != 0; + // first, we ensure the id is valid by calculating the id independently + // from what is given to us + if (!ndb_calculate_id(note, scratch, scratch_size, id)) { + ndb_debug("ndb_note_verify: scratch buffer size too small"); + return 0; + } + + if (memcmp(id, note->id, 32)) { + ndb_debug("ndb_note_verify: note id does not match!"); + return 0; + } + + // id is ok, let's check signature + + ok = secp256k1_xonly_pubkey_parse((secp256k1_context*)ctx, + &xonly_pubkey, + ndb_note_pubkey(note)) != 0; if (!ok) return 0; - ok = secp256k1_schnorrsig_verify((secp256k1_context*)ctx, sig, id, 32, + ok = secp256k1_schnorrsig_verify((secp256k1_context*)ctx, + ndb_note_sig(note), id, 32, &xonly_pubkey) > 0; if (!ok) return 0; @@ -2754,6 +2773,7 @@ static int ndb_ingester_process_note(secp256k1_context *ctx, size_t note_size, struct ndb_writer_msg *out, struct ndb_ingester *ingester, + unsigned char *scratch, const char *relay) { enum ndb_ingest_filter_action action; @@ -2774,8 +2794,8 @@ static int ndb_ingester_process_note(secp256k1_context *ctx, } else { // verify! If it's an invalid note we don't need to // bother writing it to the database - if (!ndb_note_verify(ctx, note->pubkey, note->id, note->sig)) { - ndb_debug("signature verification failed\n"); + if (!ndb_note_verify(ctx, scratch, ingester->scratch_size, note)) { + ndb_debug("note verification failed\n"); return 0; } } @@ -2871,6 +2891,7 @@ static int ndb_ingester_process_event(secp256k1_context *ctx, struct ndb_ingester *ingester, struct ndb_ingester_event *ev, struct ndb_writer_msg *out, + unsigned char *scratch, MDB_txn *read_txn) { struct ndb_tce tce; @@ -2945,7 +2966,7 @@ static int ndb_ingester_process_event(secp256k1_context *ctx, } if (!ndb_ingester_process_note(ctx, note, note_size, - out, ingester, + out, ingester, scratch, ev->relay)) { ndb_debug("failed to process note\n"); goto cleanup; @@ -2967,7 +2988,7 @@ static int ndb_ingester_process_event(secp256k1_context *ctx, } if (!ndb_ingester_process_note(ctx, note, note_size, - out, ingester, + out, ingester, scratch, ev->relay)) { ndb_debug("failed to process note\n"); goto cleanup; @@ -5599,8 +5620,13 @@ static void *ndb_ingester_thread(void *data) struct ndb_writer_msg outs[THREAD_QUEUE_BATCH], *out; int i, to_write, popped, done, any_event; MDB_txn *read_txn = NULL; + unsigned char *scratch; int rc; + // this is used in note verification and anything else that + // needs a temporary buffer + scratch = malloc(ingester->scratch_size); + ctx = secp256k1_context_create(SECP256K1_CONTEXT_VERIFY); //ndb_debug("started ingester thread\n"); @@ -5638,6 +5664,7 @@ static void *ndb_ingester_thread(void *data) out = &outs[to_write]; if (ndb_ingester_process_event(ctx, ingester, &msg->event, out, + scratch, read_txn)) { to_write++; } @@ -5657,6 +5684,7 @@ static void *ndb_ingester_thread(void *data) ndb_debug("quitting ingester thread\n"); secp256k1_context_destroy(ctx); + free(scratch); return NULL; } @@ -5694,6 +5722,7 @@ static int ndb_writer_init(struct ndb_writer *writer, struct ndb_lmdb *lmdb, static int ndb_ingester_init(struct ndb_ingester *ingester, struct ndb_lmdb *lmdb, struct prot_queue *writer_inbox, + int scratch_size, const struct ndb_config *config) { int elem_size, num_elems; @@ -5703,6 +5732,7 @@ static int ndb_ingester_init(struct ndb_ingester *ingester, elem_size = sizeof(struct ndb_ingester_msg); num_elems = DEFAULT_QUEUE_SIZE; + ingester->scratch_size = scratch_size; ingester->writer_inbox = writer_inbox; ingester->lmdb = lmdb; ingester->flags = config->flags; @@ -5979,7 +6009,8 @@ int ndb_init(struct ndb **pndb, const char *filename, const struct ndb_config *c return 0; } - if (!ndb_ingester_init(&ndb->ingester, &ndb->lmdb, &ndb->writer.inbox, config)) { + if (!ndb_ingester_init(&ndb->ingester, &ndb->lmdb, &ndb->writer.inbox, + config->writer_scratch_buffer_size, config)) { fprintf(stderr, "failed to initialize %d ingester thread(s)\n", config->ingester_threads); return 0; @@ -6612,7 +6643,7 @@ int ndb_filter_json(const struct ndb_filter *filter, char *buf, int buflen) return cur.p - cur.start; } -int ndb_calculate_id(struct ndb_note *note, unsigned char *buf, int buflen) { +int ndb_calculate_id(struct ndb_note *note, unsigned char *buf, int buflen, unsigned char *id) { int len; if (!(len = ndb_event_commitment(note, buf, buflen))) @@ -6620,7 +6651,7 @@ int ndb_calculate_id(struct ndb_note *note, unsigned char *buf, int buflen) { //fprintf(stderr, "%.*s\n", len, buf); - sha256((struct sha256*)note->id, buf, len); + sha256((struct sha256*)id, buf, len); return 1; } @@ -6696,7 +6727,8 @@ int ndb_builder_finalize(struct ndb_builder *builder, struct ndb_note **note, ndb_builder_set_pubkey(builder, keypair->pubkey); - if (!ndb_calculate_id(builder->note, start, end - start)) + if (!ndb_calculate_id(builder->note, start, end - start, + builder->note->id)) return 0; if (!ndb_sign_id(keypair, (*note)->id, (*note)->sig)) diff --git a/nostrdb/src/nostrdb.h b/nostrdb/src/nostrdb.h index fff4af144..a25e8d436 100644 --- a/nostrdb/src/nostrdb.h +++ b/nostrdb/src/nostrdb.h @@ -493,11 +493,11 @@ void ndb_config_set_subscription_callback(struct ndb_config *config, ndb_sub_fn void ndb_config_set_writer_scratch_buffer_size(struct ndb_config *config, int scratch_size); // HELPERS -int ndb_calculate_id(struct ndb_note *note, unsigned char *buf, int buflen); +int ndb_calculate_id(struct ndb_note *note, unsigned char *buf, int buflen, unsigned char *id); int ndb_sign_id(struct ndb_keypair *keypair, unsigned char id[32], unsigned char sig[64]); int ndb_create_keypair(struct ndb_keypair *key); int ndb_decode_key(const char *secstr, struct ndb_keypair *keypair); -int ndb_note_verify(void *secp_ctx, unsigned char pubkey[32], unsigned char id[32], unsigned char signature[64]); +int ndb_note_verify(void *secp_ctx, unsigned char *scratch, size_t scratch_size, struct ndb_note *note); // NDB int ndb_init(struct ndb **ndb, const char *dbdir, const struct ndb_config *); From 0847c53a39748fb862d3415e1df8125af65f7429 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 3 Jul 2025 16:07:29 -0700 Subject: [PATCH 46/91] nostrdb: add ndb_builder_push_tag_id Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 10 ++++++++++ nostrdb/src/nostrdb.h | 1 + 2 files changed, 11 insertions(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 29044e213..eff9609d1 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -7739,6 +7739,16 @@ int ndb_builder_push_tag_str(struct ndb_builder *builder, return ndb_builder_finalize_tag(builder, pstr); } +/// Push an id element to the current tag. Needs to be 32 bytes +int ndb_builder_push_tag_id(struct ndb_builder *builder, + unsigned char *id) +{ + union ndb_packed_str pstr; + if (!ndb_builder_push_packed_id(builder, id, &pstr)) + return 0; + return ndb_builder_finalize_tag(builder, pstr); +} + // // CONFIG // diff --git a/nostrdb/src/nostrdb.h b/nostrdb/src/nostrdb.h index a25e8d436..c25c0fe6d 100644 --- a/nostrdb/src/nostrdb.h +++ b/nostrdb/src/nostrdb.h @@ -551,6 +551,7 @@ void ndb_builder_set_id(struct ndb_builder *builder, unsigned char *id); void ndb_builder_set_kind(struct ndb_builder *builder, uint32_t kind); int ndb_builder_new_tag(struct ndb_builder *builder); int ndb_builder_push_tag_str(struct ndb_builder *builder, const char *str, int len); +int ndb_builder_push_tag_id(struct ndb_builder *builder, unsigned char *id); // FILTERS int ndb_filter_init(struct ndb_filter *); From b5079c42d574a08a18a6974d3fac0fab39fce59a Mon Sep 17 00:00:00 2001 From: William Casarin Date: Tue, 8 Jul 2025 14:12:52 -0700 Subject: [PATCH 47/91] nostrdb: memory: fix a bunch of memory leaks Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index eff9609d1..ab2d6f7da 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -591,6 +591,11 @@ int ndb_filter_end(struct ndb_filter *filter) size_t orig_size; #endif size_t data_len, elem_len; + unsigned char *rel; + + assert(filter); + assert(filter->elem_buf.start); + if (filter->finalized == 1) return 0; @@ -609,7 +614,10 @@ int ndb_filter_end(struct ndb_filter *filter) memmove(filter->elem_buf.p, filter->data_buf.start, data_len); // realloc the whole thing - filter->elem_buf.start = realloc(filter->elem_buf.start, elem_len + data_len); + rel = realloc(filter->elem_buf.start, elem_len + data_len); + if (rel) + filter->elem_buf.start = rel; + assert(filter->elem_buf.start); filter->elem_buf.end = filter->elem_buf.start + elem_len; filter->elem_buf.p = filter->elem_buf.end; @@ -4149,12 +4157,12 @@ static int ndb_query_plan_execute_profile_search( // get the authors element after we finalize the filter, since // the data could have moved if (!(els = ndb_filter_find_elements(f, NDB_FILTER_AUTHORS))) - return 0; + goto fail; // grab pointer to pubkey in the filter so that we can // update the filter as we go if (!(filter_pubkey = ndb_filter_get_id_element(f, els, 0))) - return 0; + goto fail; for (i = 0; !query_is_full(results, limit); i++) { if (i == 0) { @@ -4170,10 +4178,15 @@ static int ndb_query_plan_execute_profile_search( // Look up the corresponding note associated with that pubkey if (!ndb_query_plan_execute_author_kinds(txn, f, results, limit)) - return 0; + goto fail; } + ndb_filter_destroy(f); return 1; + +fail: + ndb_filter_destroy(f); + return 0; } static int ndb_query_plan_execute_relay_kinds( From 84839d1c43c7f4e09f2cef5c6b0164b76dfb49fa Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:24:41 -0700 Subject: [PATCH 48/91] nostrdb: mem: search cursor close --- nostrdb/src/nostrdb.c | 1 + 1 file changed, 1 insertion(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index ab2d6f7da..a1090134f 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -3080,6 +3080,7 @@ int ndb_search_profile(struct ndb_txn *txn, struct ndb_search *search, const cha search->key = k.mv_data; assert(v.mv_size == 8); search->profile_key = *((uint64_t*)v.mv_data); + mdb_cursor_close(search->cursor); return 1; } From 342067640f9b31c70c003aa93c1cb29641d428d4 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:25:06 -0700 Subject: [PATCH 49/91] nostrdb: mem: reaction stats cleanup --- nostrdb/src/nostrdb.c | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index a1090134f..cb0845e19 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -3388,7 +3388,7 @@ static int ndb_write_reaction_stats(struct ndb_txn *txn, struct ndb_note *note) if (root == NULL) { ndb_debug("failed to create note metadata record\n"); - return 0; + goto fail; } // metadata is keyed on id because we want to collect stats regardless @@ -3406,13 +3406,18 @@ static int ndb_write_reaction_stats(struct ndb_txn *txn, struct ndb_note *note) if ((rc = mdb_put(txn->mdb_txn, txn->lmdb->dbs[NDB_DB_META], &key, &val, 0))) { ndb_debug("write reaction stats to db failed: %s\n", mdb_strerror(rc)); - free(root); - return 0; + goto fail; } free(root); + flatcc_builder_clear(&builder); return 1; + +fail: + free(root); + flatcc_builder_clear(&builder); + return 0; } From 4d8313c78850e6d0dabb8558e4320f1a62f67619 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:25:51 -0700 Subject: [PATCH 50/91] nostrdb: mem: relay iter cleanup --- nostrdb/src/nostrdb.c | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index cb0845e19..4654c4451 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -3671,7 +3671,8 @@ static int ndb_query_plan_execute_ids(struct ndb_txn *txn, uint64_t note_id, until, *pint; size_t note_size; unsigned char *id; - struct ndb_note_relay_iterator note_relay_iter; + struct ndb_note_relay_iterator note_relay_iter = {0}; + struct ndb_note_relay_iterator *relay_iter = NULL; until = UINT64_MAX; @@ -3712,17 +3713,20 @@ static int ndb_query_plan_execute_ids(struct ndb_txn *txn, if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) continue; - if (need_relays) - ndb_note_relay_iterate_start(txn, ¬e_relay_iter, note_id); + relay_iter = need_relays ? ¬e_relay_iter : NULL; + if (relay_iter) + ndb_note_relay_iterate_start(txn, relay_iter, note_id); // Sure this particular lookup matched the index query, but // does it match the entire filter? Check! We also pass in // things we've already matched via the filter so we don't have // to check again. This can be pretty important for filters // with a large number of entries. - if (!ndb_filter_matches_with(filter, note, 1 << NDB_FILTER_IDS, - need_relays ? ¬e_relay_iter : NULL)) + if (!ndb_filter_matches_with(filter, note, 1 << NDB_FILTER_IDS, relay_iter)) { + ndb_note_relay_iterate_close(relay_iter); continue; + } + ndb_note_relay_iterate_close(relay_iter); ndb_query_result_init(&res, note, note_size, note_id); if (!push_query_result(results, &res)) @@ -3973,8 +3977,9 @@ static int ndb_query_plan_execute_tags(struct ndb_txn *txn, if (!(len = ndb_encode_tag_key(key_buffer, sizeof(key_buffer), tags->field.tag, tag, taglen, - until))) - return 0; + until))) { + goto fail; + } k.mv_data = key_buffer; k.mv_size = len; @@ -4020,6 +4025,9 @@ static int ndb_query_plan_execute_tags(struct ndb_txn *txn, mdb_cursor_close(cur); return 1; +fail: + mdb_cursor_close(cur); + return 0; } static int ndb_query_plan_execute_author_kinds( From 8014d772bac979f4959e51b4a2caf3d689b428bc Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:26:18 -0700 Subject: [PATCH 51/91] nostrdb: mem: builder clear before free --- nostrdb/src/nostrdb.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 4654c4451..bb11499ba 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -2674,13 +2674,16 @@ void ndb_profile_record_builder_init(struct ndb_profile_record_builder *b) void ndb_profile_record_builder_free(struct ndb_profile_record_builder *b) { - if (b->builder) + if (b->builder) { + flatcc_builder_clear(b->builder); free(b->builder); + } if (b->flatbuf) free(b->flatbuf); b->builder = NULL; b->flatbuf = NULL; + } int ndb_process_profile_note(struct ndb_note *note, From aa8ce31941b18062bf3d651b25d2cb767631b534 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:26:43 -0700 Subject: [PATCH 52/91] nostrdb: mem: close cursors in print helpers --- nostrdb/src/nostrdb.c | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index bb11499ba..6661e100d 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -7848,6 +7848,8 @@ int ndb_print_author_kind_index(struct ndb_txn *txn) i++; } + mdb_cursor_close(cur); + return i; } @@ -7872,6 +7874,8 @@ int ndb_print_relay_kind_index(struct ndb_txn *txn) i++; } + mdb_cursor_close(cur); + return i; } @@ -7891,6 +7895,8 @@ int ndb_print_tag_index(struct ndb_txn *txn) i++; } + mdb_cursor_close(cur); + return 1; } @@ -7913,6 +7919,8 @@ int ndb_print_kind_keys(struct ndb_txn *txn) i++; } + mdb_cursor_close(cur); + return 1; } @@ -7940,6 +7948,8 @@ int ndb_print_search_keys(struct ndb_txn *txn) i++; } + mdb_cursor_close(cur); + return 1; } From 42a0f2c08dbe8ef416122cd6d0db22ccac1ddf48 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:29:31 -0700 Subject: [PATCH 53/91] nostrdb: Revert "mem: search cursor close" this is causing heap corruption on the windows build This reverts commit a8d6925a5b33ddbdd4306423527b5d8314f7dd36. --- nostrdb/src/nostrdb.c | 1 - 1 file changed, 1 deletion(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 6661e100d..c9e00277f 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -3083,7 +3083,6 @@ int ndb_search_profile(struct ndb_txn *txn, struct ndb_search *search, const cha search->key = k.mv_data; assert(v.mv_size == 8); search->profile_key = *((uint64_t*)v.mv_data); - mdb_cursor_close(search->cursor); return 1; } From 66e10db6b22d0ea8f0c2aac62e3e5094776f14be Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:34:14 -0700 Subject: [PATCH 54/91] nostrdb: mem: re-enable profile freeing Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index c9e00277f..1a130c7a6 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -5624,7 +5624,7 @@ static void *ndb_writer_thread(void *data) free((void*)msg->note.relay); } else if (msg->type == NDB_WRITER_PROFILE) { free(msg->profile.note.note); - //ndb_profile_record_builder_free(&msg->profile.record); + ndb_profile_record_builder_free(&msg->profile.record); } else if (msg->type == NDB_WRITER_BLOCKS) { ndb_blocks_free(msg->blocks.blocks); } else if (msg->type == NDB_WRITER_NOTE_RELAY) { From 387af198d629fb6005c5c87cdcf13202abd420d4 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:39:29 -0700 Subject: [PATCH 55/91] nostrdb: win: fix heap corruption with flatbuf --- nostrdb/src/nostrdb.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index 1a130c7a6..a38f52937 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -2674,12 +2674,12 @@ void ndb_profile_record_builder_init(struct ndb_profile_record_builder *b) void ndb_profile_record_builder_free(struct ndb_profile_record_builder *b) { + if (b->flatbuf) + flatcc_builder_aligned_free(b->flatbuf); if (b->builder) { flatcc_builder_clear(b->builder); free(b->builder); } - if (b->flatbuf) - free(b->flatbuf); b->builder = NULL; b->flatbuf = NULL; From a040a0244bb317ed66512817902c7e7a8b405614 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Fri, 11 Jul 2025 12:42:15 -0700 Subject: [PATCH 56/91] nostrdb: search: fix memleak in profile search Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 1 + 1 file changed, 1 insertion(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index a38f52937..ce6933cc1 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -4197,6 +4197,7 @@ static int ndb_query_plan_execute_profile_search( goto fail; } + ndb_search_profile_end(&profile_search); ndb_filter_destroy(f); return 1; From d565eb20f73ff304e904252432da958f5262c801 Mon Sep 17 00:00:00 2001 From: William Casarin Date: Thu, 17 Jul 2025 10:43:35 -0700 Subject: [PATCH 57/91] nostrdb: query: enforce author matches in author_kind queries before we weren't checking this, meaning we were getting results from other keys. oops. Reported-by: Jeff Gardner Fixes: #84 Signed-off-by: William Casarin --- nostrdb/src/nostrdb.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/nostrdb/src/nostrdb.c b/nostrdb/src/nostrdb.c index ce6933cc1..0546afa56 100644 --- a/nostrdb/src/nostrdb.c +++ b/nostrdb/src/nostrdb.c @@ -4112,6 +4112,9 @@ static int ndb_query_plan_execute_author_kinds( if (pkey->timestamp < since) break; + if (memcmp(pkey->id, author, 32)) + break; + note_id = *(uint64_t*)v.mv_data; if (!(note = ndb_get_note_by_key(txn, note_id, ¬e_size))) goto next; From ecbfb3714b6622f4454a01cf0183e75705590884 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 10 Sep 2025 16:07:03 -0700 Subject: [PATCH 58/91] Fix incompatibilities with new nostrdb version MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- nostrdb/Ndb.swift | 4 +++- nostrdb/NdbNote.swift | 18 +++--------------- 2 files changed, 6 insertions(+), 16 deletions(-) diff --git a/nostrdb/Ndb.swift b/nostrdb/Ndb.swift index f0de8f124..bbc73abb5 100644 --- a/nostrdb/Ndb.swift +++ b/nostrdb/Ndb.swift @@ -34,6 +34,8 @@ class Ndb { var generation: Int private var closed: Bool private var callbackHandler: Ndb.CallbackHandler + + private static let DEFAULT_WRITER_SCRATCH_SIZE: Int32 = 2097152; // 2mb scratch size for the writer thread, it should match with the one specified in nostrdb.c var is_closed: Bool { self.closed || self.ndb.ndb == nil @@ -111,7 +113,7 @@ class Ndb { let ok = path.withCString { testdir in var ok = false while !ok && mapsize > 1024 * 1024 * 700 { - var cfg = ndb_config(flags: 0, ingester_threads: ingest_threads, mapsize: mapsize, filter_context: nil, ingest_filter: nil, sub_cb_ctx: nil, sub_cb: nil) + var cfg = ndb_config(flags: 0, ingester_threads: ingest_threads, writer_scratch_buffer_size: DEFAULT_WRITER_SCRATCH_SIZE, mapsize: mapsize, filter_context: nil, ingest_filter: nil, sub_cb_ctx: nil, sub_cb: nil) // Here we hook up the global callback function for subscription callbacks. // We do an "unretained" pass here because the lifetime of the callback handler is larger than the lifetime of the nostrdb monitor in the C code. diff --git a/nostrdb/NdbNote.swift b/nostrdb/NdbNote.swift index 2ce945537..553ccae18 100644 --- a/nostrdb/NdbNote.swift +++ b/nostrdb/NdbNote.swift @@ -303,12 +303,10 @@ class NdbNote: Codable, Equatable, Hashable { let scratch_buf = malloc(scratch_buf_len) defer { free(scratch_buf) } // Ensure we deallocate as soon as we leave this scope, regardless of the outcome - // Calculate the ID based on the content - guard ndb_calculate_id(n.ptr, scratch_buf, Int32(scratch_buf_len)) == 1 else { throw InitError.generic } - // Verify the signature against the pubkey and the computed ID, to verify the validity of the whole note var ctx = secp256k1_context_create(UInt32(SECP256K1_CONTEXT_VERIFY)) - guard ndb_note_verify(&ctx, ndb_note_pubkey(n.ptr), ndb_note_id(n.ptr), ndb_note_sig(n.ptr)) == 1 else { throw InitError.generic } + + guard ndb_note_verify(&ctx, scratch_buf, scratch_buf_len, n.ptr) == 1 else { throw InitError.generic } } catch { free(buf) @@ -351,19 +349,9 @@ class NdbNote: Codable, Equatable, Hashable { let scratch_buf = malloc(scratch_buf_len) defer { free(scratch_buf) } // Ensure we deallocate as soon as we leave this scope, regardless of the outcome - let current_id = self.id - - // Calculate the ID based on the content - guard ndb_calculate_id(self.note.ptr, scratch_buf, Int32(scratch_buf_len)) == 1 else { return false } - - let computed_id = self.id - - // Ensure computed ID matches given id to prevent ID tampering - guard computed_id == current_id else { return false } - // Verify the signature against the pubkey and the computed ID, to verify the validity of the whole note var ctx = secp256k1_context_create(UInt32(SECP256K1_CONTEXT_VERIFY)) - guard ndb_note_verify(&ctx, ndb_note_pubkey(self.note.ptr), ndb_note_id(self.note.ptr), ndb_note_sig(self.note.ptr)) == 1 else { return false } + guard ndb_note_verify(&ctx, scratch_buf, scratch_buf_len, self.note.ptr) == 1 else { return false } return true } From 1caad243648efbed0d14bb5cfeda5c916d078188 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 10 Sep 2025 17:06:45 -0700 Subject: [PATCH 59/91] Add note provenance filter support to SubscriptionManager MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes: https://github.com/damus-io/damus/issues/3222 Signed-off-by: Daniel D’Aquino --- .../SubscriptionManager.swift | 8 +++- damus/Core/Nostr/RelayPool.swift | 2 +- .../Search/Models/SearchHomeModel.swift | 10 ++--- nostrdb/Ndb+.swift | 9 +++++ nostrdb/Ndb.swift | 38 ++++++++++++++++++- nostrdb/NdbNote.swift | 7 +++- 6 files changed, 62 insertions(+), 12 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index e5ce9ab38..aa3b7de22 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -137,7 +137,13 @@ extension NostrNetworkManager { case .event(let noteKey): let lender = NdbNoteLender(ndb: self.ndb, noteKey: noteKey) try Task.checkCancellation() - continuation.yield(.event(lender: lender)) + guard let desiredRelays else { + continuation.yield(.event(lender: lender)) // If no desired relays are specified, return all notes we see. + break + } + if try ndb.was(noteKey: noteKey, seenOnAnyOf: desiredRelays) { + continuation.yield(.event(lender: lender)) // If desired relays were specified and this note was seen there, return it. + } } } } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index d38ce7be3..23f3042ba 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -141,7 +141,7 @@ class RelayPool { case .string(let str) = msg else { return } - let _ = self.ndb.process_event(str) + let _ = self.ndb.process_event(str, originRelayURL: relay_id) self.message_received_function?((str, desc)) }) let relay = Relay(descriptor: desc, connection: conn) diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index 723b5b56c..a69c73b41 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -51,14 +51,10 @@ class SearchHomeModel: ObservableObject { var follow_list_filter = NostrFilter(kinds: [.follow_list]) follow_list_filter.until = UInt32(Date.now.timeIntervalSince1970) - outerLoop: for await item in damus_state.nostrNetwork.reader.subscribe(filters: [get_base_filter(), follow_list_filter], to: to_relays) { - switch item { - case .event(let lender): - await lender.justUseACopy({ await self.handleEvent($0) }) - case .eose: - break outerLoop - } + for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [get_base_filter(), follow_list_filter], to: to_relays) { + await noteLender.justUseACopy({ await self.handleEvent($0) }) } + DispatchQueue.main.async { self.loading = false } diff --git a/nostrdb/Ndb+.swift b/nostrdb/Ndb+.swift index ac92cac46..c1d138a99 100644 --- a/nostrdb/Ndb+.swift +++ b/nostrdb/Ndb+.swift @@ -27,4 +27,13 @@ extension Ndb { } return try self.subscribe(filters: ndbFilters, maxSimultaneousResults: maxSimultaneousResults) } + + /// Determines if a given note was seen on any of the listed relay URLs + func was(noteKey: NoteKey, seenOnAnyOf relayUrls: [RelayURL], txn: SafeNdbTxn<()>? = nil) throws -> Bool { + return try self.was(noteKey: noteKey, seenOnAnyOf: relayUrls.map({ $0.absoluteString }), txn: txn) + } + + func process_event(_ str: String, originRelayURL: RelayURL? = nil) -> Bool { + self.process_event(str, originRelayURL: originRelayURL?.absoluteString) + } } diff --git a/nostrdb/Ndb.swift b/nostrdb/Ndb.swift index bbc73abb5..134d4f0d4 100644 --- a/nostrdb/Ndb.swift +++ b/nostrdb/Ndb.swift @@ -563,10 +563,20 @@ class Ndb { } } - func process_event(_ str: String) -> Bool { + func process_event(_ str: String, originRelayURL: String? = nil) -> Bool { guard !is_closed else { return false } + guard let originRelayURL else { + return str.withCString { cstr in + return ndb_process_event(ndb.ndb, cstr, Int32(str.utf8.count)) != 0 + } + } return str.withCString { cstr in - return ndb_process_event(ndb.ndb, cstr, Int32(str.utf8.count)) != 0 + return originRelayURL.withCString { originRelayCString in + let meta = UnsafeMutablePointer.allocate(capacity: 1) + defer { meta.deallocate() } + ndb_ingest_meta_init(meta, 0, originRelayCString) + return ndb_process_event_with(ndb.ndb, cstr, Int32(str.utf8.count), meta) != 0 + } } } @@ -805,6 +815,25 @@ class Ndb { } } + /// Determines if a given note was seen on a specific relay URL + func was(noteKey: NoteKey, seenOn relayUrl: String, txn: SafeNdbTxn<()>? = nil) throws -> Bool { + guard let txn = txn ?? SafeNdbTxn.new(on: self) else { throw NdbLookupError.cannotOpenTransaction } + return relayUrl.withCString({ relayCString in + return ndb_note_seen_on_relay(&txn.txn, noteKey, relayCString) == 1 + }) + } + + /// Determines if a given note was seen on any of the listed relay URLs + func was(noteKey: NoteKey, seenOnAnyOf relayUrls: [String], txn: SafeNdbTxn<()>? = nil) throws -> Bool { + guard let txn = txn ?? SafeNdbTxn.new(on: self) else { throw NdbLookupError.cannotOpenTransaction } + for relayUrl in relayUrls { + if try self.was(noteKey: noteKey, seenOn: relayUrl, txn: txn) { + return true + } + } + return false + } + // MARK: Internal ndb callback interfaces internal func setCallback(for subscriptionId: UInt64, callback: @escaping (NoteKey) -> Void) async { @@ -893,6 +922,11 @@ extension Ndb { case streamError(NdbStreamError) case internalInconsistency case timeout + case notFound + } + + enum OperationError: Error { + case genericError } } diff --git a/nostrdb/NdbNote.swift b/nostrdb/NdbNote.swift index 553ccae18..f6542dce3 100644 --- a/nostrdb/NdbNote.swift +++ b/nostrdb/NdbNote.swift @@ -384,7 +384,12 @@ class NdbNote: Codable, Equatable, Hashable { // Extension to make NdbNote compatible with NostrEvent's original API extension NdbNote { var is_textlike: Bool { - return kind == 1 || kind == 42 || kind == 30023 || kind == 9802 || kind == 39089 + switch known_kind { + case .text, .chat, .longform, .highlight: + true + default: + false + } } var is_quote_repost: NoteId? { From 2185984ed7750f83f5a84c06a68df2baba39d2b1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 15 Sep 2025 11:20:20 -0700 Subject: [PATCH 60/91] Stream from both NDB and network relays MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit takes a step back from the full local relay model by treating NostrDB as one of the many relays streamed from, instead of the one exclusive relay that other classes rely on. This was done to reduce regression risk from the local relay model migration, without discarding the migration work already done. The full "local relay model" behavior (exclusive NDB streaming) was hidden behind a feature flag for easy migration later on. Closes: https://github.com/damus-io/damus/issues/3225 Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManager.swift | 5 +++- .../SubscriptionManager.swift | 27 +++++++++++++++++-- damus/Core/Nostr/RelayPool.swift | 2 +- damus/Core/Storage/DamusState.swift | 1 + .../Settings/Models/UserSettingsStore.swift | 4 +++ .../NostrNetworkManagerTests.swift | 2 +- nostrdb/Ndb+.swift | 2 +- 7 files changed, 37 insertions(+), 6 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index ac11b97af..80321f8b6 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -38,7 +38,7 @@ class NostrNetworkManager { self.delegate = delegate let pool = RelayPool(ndb: delegate.ndb, keypair: delegate.keypair) self.pool = pool - let reader = SubscriptionManager(pool: pool, ndb: delegate.ndb) + let reader = SubscriptionManager(pool: pool, ndb: delegate.ndb, experimentalLocalRelayModelSupport: self.delegate.experimentalLocalRelayModelSupport) let userRelayList = UserRelayListManager(delegate: delegate, pool: pool, reader: reader) self.reader = reader self.userRelayList = userRelayList @@ -174,6 +174,9 @@ extension NostrNetworkManager { /// Whether the app is in developer mode var developerMode: Bool { get } + /// Whether the app has the experimental local relay model flag that streams data only from the local relay (ndb) + var experimentalLocalRelayModelSupport: Bool { get } + /// The cache of relay model information var relayModelCache: RelayModelCache { get } diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index aa3b7de22..ca79def5f 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -16,11 +16,13 @@ extension NostrNetworkManager { private let pool: RelayPool private var ndb: Ndb private var taskManager: TaskManager + private let experimentalLocalRelayModelSupport: Bool - init(pool: RelayPool, ndb: Ndb) { + init(pool: RelayPool, ndb: Ndb, experimentalLocalRelayModelSupport: Bool) { self.pool = pool self.ndb = ndb self.taskManager = TaskManager() + self.experimentalLocalRelayModelSupport = experimentalLocalRelayModelSupport } // MARK: - Subscribing and Streaming data from Nostr @@ -127,13 +129,28 @@ extension NostrNetworkManager { /// - Returns: An async stream of nostr data private func sessionSubscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { return AsyncStream { continuation in + var ndbEOSEIssued = false + var networkEOSEIssued = false + + // This closure function issues (yields) an EOSE signal to the stream if all relevant conditions are met + let yieldEOSEIfReady = { + // In normal mode: Issuing EOSE requires EOSE from both NDB and the network, since they are all considered separate relays + // In experimental local relay model mode: Issuing EOSE requires only EOSE from NDB, since that is the only relay that "matters" + let canIssueEOSE = self.experimentalLocalRelayModelSupport ? ndbEOSEIssued : ndbEOSEIssued && networkEOSEIssued + if canIssueEOSE { + continuation.yield(.eose) + } + } + let ndbStreamTask = Task { do { for await item in try self.ndb.subscribe(filters: try filters.map({ try NdbFilter(from: $0) })) { try Task.checkCancellation() switch item { case .eose: - continuation.yield(.eose) + Log.debug("Session subscribe: Received EOSE from nostrdb", for: .subscription_manager) + ndbEOSEIssued = true + yieldEOSEIfReady() case .event(let noteKey): let lender = NdbNoteLender(ndb: self.ndb, noteKey: noteKey) try Task.checkCancellation() @@ -161,8 +178,14 @@ extension NostrNetworkManager { switch item { case .event(let event): Log.debug("Session subscribe: Received kind %d event with id %s from the network", for: .subscription_manager, event.kind, event.id.hex()) + if !self.experimentalLocalRelayModelSupport { + // In normal mode (non-experimental), we stream from ndb but also directly from the network + continuation.yield(.event(lender: NdbNoteLender(ownedNdbNote: event))) + } case .eose: Log.debug("Session subscribe: Received EOSE from the network", for: .subscription_manager) + networkEOSEIssued = true + yieldEOSEIfReady() } } } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 23f3042ba..6b24a8e68 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -141,7 +141,7 @@ class RelayPool { case .string(let str) = msg else { return } - let _ = self.ndb.process_event(str, originRelayURL: relay_id) + let _ = self.ndb.processEvent(str, originRelayURL: relay_id) self.message_received_function?((str, desc)) }) let relay = Relay(descriptor: desc, connection: conn) diff --git a/damus/Core/Storage/DamusState.swift b/damus/Core/Storage/DamusState.swift index f663064af..f7170a032 100644 --- a/damus/Core/Storage/DamusState.swift +++ b/damus/Core/Storage/DamusState.swift @@ -223,6 +223,7 @@ fileprivate extension DamusState { var latestContactListEvent: NostrEvent? { self.contacts.event } var bootstrapRelays: [RelayURL] { get_default_bootstrap_relays() } var developerMode: Bool { self.settings.developer_mode } + var experimentalLocalRelayModelSupport: Bool { self.settings.enable_experimental_local_relay_model } var relayModelCache: RelayModelCache var relayFilters: RelayFilters diff --git a/damus/Features/Settings/Models/UserSettingsStore.swift b/damus/Features/Settings/Models/UserSettingsStore.swift index 0da7377cb..eb01f7e60 100644 --- a/damus/Features/Settings/Models/UserSettingsStore.swift +++ b/damus/Features/Settings/Models/UserSettingsStore.swift @@ -243,6 +243,10 @@ class UserSettingsStore: ObservableObject { @Setting(key: "enable_experimental_purple_api", default_value: false) var enable_experimental_purple_api: Bool + /// Whether the app has the experimental local relay model flag that streams data only from the local relay (ndb) + @Setting(key: "enable_experimental_local_relay_model", default_value: false) + var enable_experimental_local_relay_model: Bool + @StringSetting(key: "purple_environment", default_value: .production) var purple_enviroment: DamusPurpleEnvironment diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift index 271c6f15f..de3eb0780 100644 --- a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -21,7 +21,7 @@ class NostrNetworkManagerTests: XCTestCase { let notesJSONL = getTestNotesJSONL() for noteText in notesJSONL.split(separator: "\n") { - let _ = damusState!.ndb.process_event("[\"EVENT\",\"subid\",\(String(noteText))]") + let _ = damusState!.ndb.processEvent("[\"EVENT\",\"subid\",\(String(noteText))]") } } diff --git a/nostrdb/Ndb+.swift b/nostrdb/Ndb+.swift index c1d138a99..79fc39c18 100644 --- a/nostrdb/Ndb+.swift +++ b/nostrdb/Ndb+.swift @@ -33,7 +33,7 @@ extension Ndb { return try self.was(noteKey: noteKey, seenOnAnyOf: relayUrls.map({ $0.absoluteString }), txn: txn) } - func process_event(_ str: String, originRelayURL: RelayURL? = nil) -> Bool { + func processEvent(_ str: String, originRelayURL: RelayURL? = nil) -> Bool { self.process_event(str, originRelayURL: originRelayURL?.absoluteString) } } From 0582892cae424c31832435b4799fdde061d01f0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 15 Sep 2025 16:12:13 -0700 Subject: [PATCH 61/91] Improve Follow pack timeline loading logic in the Universe view MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus.xcodeproj/project.pbxproj | 8 +++ damus/Core/Nostr/RelayPool.swift | 13 +++- .../Onboarding/Views/SaveKeysView.swift | 2 +- .../CondensedProfilePicturesViewModel.swift | 42 +++++++++++++ .../Views/CondensedProfilePicturesView.swift | 18 +++--- .../Search/Models/SearchHomeModel.swift | 61 +++++++++++++------ .../Search/Views/SearchHomeView.swift | 20 +++--- damus/Shared/Utilities/EventHolder.swift | 6 ++ 8 files changed, 130 insertions(+), 40 deletions(-) create mode 100644 damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index c1ec36ec6..1502ff40b 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -1161,6 +1161,9 @@ D72A2D022AD9C136002AFF62 /* EventViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A2CFF2AD9B66B002AFF62 /* EventViewTests.swift */; }; D72A2D052AD9C1B5002AFF62 /* MockDamusState.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A2D042AD9C1B5002AFF62 /* MockDamusState.swift */; }; D72A2D072AD9C1FB002AFF62 /* MockProfiles.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A2D062AD9C1FB002AFF62 /* MockProfiles.swift */; }; + D72C01312E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */; }; + D72C01322E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */; }; + D72C01332E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */; }; D72E12782BEED22500F4F781 /* Array.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72E12772BEED22400F4F781 /* Array.swift */; }; D72E127A2BEEEED000F4F781 /* NostrFilterTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72E12792BEEEED000F4F781 /* NostrFilterTests.swift */; }; D7315A2A2ACDF3B70036E30A /* DamusCacheManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7315A292ACDF3B70036E30A /* DamusCacheManager.swift */; }; @@ -2607,6 +2610,7 @@ D72A2CFF2AD9B66B002AFF62 /* EventViewTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EventViewTests.swift; sourceTree = ""; }; D72A2D042AD9C1B5002AFF62 /* MockDamusState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockDamusState.swift; sourceTree = ""; }; D72A2D062AD9C1FB002AFF62 /* MockProfiles.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockProfiles.swift; sourceTree = ""; }; + D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CondensedProfilePicturesViewModel.swift; sourceTree = ""; }; D72E12772BEED22400F4F781 /* Array.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Array.swift; sourceTree = ""; }; D72E12792BEEEED000F4F781 /* NostrFilterTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NostrFilterTests.swift; sourceTree = ""; }; D7315A292ACDF3B70036E30A /* DamusCacheManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DamusCacheManager.swift; sourceTree = ""; }; @@ -4301,6 +4305,7 @@ 5C78A7922E3036F800CF177D /* Models */ = { isa = PBXGroup; children = ( + D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */, 4C3BEFD12819DB9B00B3DE84 /* ProfileModel.swift */, 4C363A912825FCF2006E126D /* ProfileUpdate.swift */, ); @@ -5783,6 +5788,7 @@ D706C5AF2D5D31C20027C627 /* AutoSaveIndicatorView.swift in Sources */, 6439E014296790CF0020672B /* ProfilePicImageView.swift in Sources */, 4CE6DF1627F8DEBF00C66700 /* RelayConnection.swift in Sources */, + D72C01312E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */, 4C1253682A76D2470004F4B8 /* MuteNotify.swift in Sources */, 4CDA128C29EB19C40006FA5A /* LocalNotification.swift in Sources */, 4C3BEFD6281D995700B3DE84 /* ActionBarModel.swift in Sources */, @@ -6385,6 +6391,7 @@ 82D6FC082CD99F7900C925F4 /* ProfileZapLinkView.swift in Sources */, D71AD8FE2CEC176A002E2C3C /* AppAccessibilityIdentifiers.swift in Sources */, 82D6FC092CD99F7900C925F4 /* AboutView.swift in Sources */, + D72C01332E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */, 82D6FC0A2CD99F7900C925F4 /* ProfileName.swift in Sources */, 82D6FC0B2CD99F7900C925F4 /* ProfilePictureSelector.swift in Sources */, 82D6FC0C2CD99F7900C925F4 /* EditMetadataView.swift in Sources */, @@ -6940,6 +6947,7 @@ D703D7502C6709F500A400EA /* NdbTxn.swift in Sources */, D703D77E2C670C1100A400EA /* NostrKind.swift in Sources */, D73E5F972C6AA7B7007EB227 /* SuggestedHashtagsView.swift in Sources */, + D72C01322E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */, D703D7B22C6710AF00A400EA /* ContentParsing.swift in Sources */, D703D7522C670A1400A400EA /* Log.swift in Sources */, D73E5E1B2C6A9672007EB227 /* LikeCounter.swift in Sources */, diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 6b24a8e68..c34a5e059 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -43,6 +43,10 @@ class RelayPool { private let network_monitor = NWPathMonitor() private let network_monitor_queue = DispatchQueue(label: "io.damus.network_monitor") private var last_network_status: NWPath.Status = .unsatisfied + + /// The limit of maximum concurrent subscriptions. Any subscriptions beyond this limit will be paused until subscriptions clear + /// This is to avoid error states and undefined behaviour related to hitting subscription limits on the relays, by letting those wait instead — with the principle that slower is better than broken. + static let MAX_CONCURRENT_SUBSCRIPTION_LIMIT = 10 // This number is only an educated guess at this point. func close() { disconnect() @@ -102,10 +106,17 @@ class RelayPool { } @MainActor - func register_handler(sub_id: String, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) { + func register_handler(sub_id: String, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) async { + while handlers.count > Self.MAX_CONCURRENT_SUBSCRIPTION_LIMIT { + Log.debug("%s: Too many subscriptions, waiting for subscription pool to clear", for: .networking, sub_id) + try? await Task.sleep(for: .seconds(1)) + } + Log.debug("%s: Subscription pool cleared", for: .networking, sub_id) for handler in handlers { // don't add duplicate handlers if handler.sub_id == sub_id { + assertionFailure("Duplicate handlers are not allowed. Proper error handling for this has not been built yet.") + Log.error("Duplicate handlers are not allowed. Error handling for this has not been built yet.", for: .networking) return } } diff --git a/damus/Features/Onboarding/Views/SaveKeysView.swift b/damus/Features/Onboarding/Views/SaveKeysView.swift index 3e92c8deb..4a2bf947b 100644 --- a/damus/Features/Onboarding/Views/SaveKeysView.swift +++ b/damus/Features/Onboarding/Views/SaveKeysView.swift @@ -142,7 +142,7 @@ struct SaveKeysView: View { add_rw_relay(self.pool, relay) } - self.pool.register_handler(sub_id: "signup", handler: handle_event) + Task { await self.pool.register_handler(sub_id: "signup", handler: handle_event) } self.loading = true diff --git a/damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift b/damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift new file mode 100644 index 000000000..a8c35f079 --- /dev/null +++ b/damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift @@ -0,0 +1,42 @@ +// +// CondensedProfilePicturesViewModel.swift +// damus +// +// Created by Daniel D’Aquino on 2025-09-15. +// +import Combine +import Foundation + +class CondensedProfilePicturesViewModel: ObservableObject { + let state: DamusState + let pubkeys: [Pubkey] + let maxPictures: Int + var shownPubkeys: [Pubkey] { + return Array(pubkeys.prefix(maxPictures)) + } + var loadingTask: Task? = nil + + init(state: DamusState, pubkeys: [Pubkey], maxPictures: Int) { + self.state = state + self.pubkeys = pubkeys + self.maxPictures = min(maxPictures, pubkeys.count) + } + + func load() { + loadingTask?.cancel() + loadingTask = Task { try? await loadingTask() } + } + + func loadingTask() async throws { + let filter = NostrFilter(kinds: [.metadata], authors: shownPubkeys) + let _ = await state.nostrNetwork.reader.query(filters: [filter]) + for await _ in state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) { + // NO-OP, we just need it to be loaded into NostrDB. + try Task.checkCancellation() + } + DispatchQueue.main.async { + // Cause the view to re-render with the newly loaded profiles + self.objectWillChange.send() + } + } +} diff --git a/damus/Features/Profile/Views/CondensedProfilePicturesView.swift b/damus/Features/Profile/Views/CondensedProfilePicturesView.swift index ed5de758a..a04666fd9 100644 --- a/damus/Features/Profile/Views/CondensedProfilePicturesView.swift +++ b/damus/Features/Profile/Views/CondensedProfilePicturesView.swift @@ -8,26 +8,26 @@ import SwiftUI struct CondensedProfilePicturesView: View { - let state: DamusState - let pubkeys: [Pubkey] - let maxPictures: Int + let model: CondensedProfilePicturesViewModel init(state: DamusState, pubkeys: [Pubkey], maxPictures: Int) { - self.state = state - self.pubkeys = pubkeys - self.maxPictures = min(maxPictures, pubkeys.count) + self.model = CondensedProfilePicturesViewModel(state: state, pubkeys: pubkeys, maxPictures: maxPictures) } var body: some View { // Using ZStack to make profile pictures floating and stacked on top of each other. ZStack { - ForEach((0.. = Set() + var follow_pack_seen_pubkey: Set = Set() let damus_state: DamusState let base_subid = UUID().description let follow_pack_subid = UUID().description @@ -25,6 +27,9 @@ class SearchHomeModel: ObservableObject { self.events = EventHolder(on_queue: { ev in preload_events(state: damus_state, events: [ev]) }) + self.followPackEvents = EventHolder(on_queue: { ev in + preload_events(state: damus_state, events: [ev]) + }) } func get_base_filter() -> NostrFilter { @@ -40,6 +45,12 @@ class SearchHomeModel: ObservableObject { self.objectWillChange.send() } + @MainActor + func reload() async { + self.events.reset() + await self.load() + } + func load() async { DispatchQueue.main.async { self.loading = true @@ -51,16 +62,23 @@ class SearchHomeModel: ObservableObject { var follow_list_filter = NostrFilter(kinds: [.follow_list]) follow_list_filter.until = UInt32(Date.now.timeIntervalSince1970) - for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [get_base_filter(), follow_list_filter], to: to_relays) { + for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [follow_list_filter], to: to_relays) { + await noteLender.justUseACopy({ await self.handleFollowPackEvent($0) }) + } + + for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [get_base_filter()], to: to_relays) { await noteLender.justUseACopy({ await self.handleEvent($0) }) } + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + let allEvents = events.all_events + followPackEvents.all_events + let task = load_profiles(context: "universe", load: .from_events(allEvents), damus_state: damus_state, txn: txn) + + try? await task?.value + DispatchQueue.main.async { self.loading = false } - - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } - load_profiles(context: "universe", load: .from_events(events.all_events), damus_state: damus_state, txn: txn) } @MainActor @@ -76,6 +94,20 @@ class SearchHomeModel: ObservableObject { } } } + + @MainActor + func handleFollowPackEvent(_ ev: NostrEvent) { + if ev.known_kind == .follow_list && should_show_event(state: damus_state, ev: ev) && !ev.is_reply() { + if !damus_state.settings.multiple_events_per_pubkey && follow_pack_seen_pubkey.contains(ev.pubkey) { + return + } + follow_pack_seen_pubkey.insert(ev.pubkey) + + if self.followPackEvents.insert(ev) { + self.objectWillChange.send() + } + } + } } func find_profiles_to_fetch(profiles: Profiles, load: PubkeysToLoad, cache: EventCache, txn: NdbTxn) -> [Pubkey] { @@ -113,28 +145,23 @@ enum PubkeysToLoad { case from_keys([Pubkey]) } -func load_profiles(context: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn) { +func load_profiles(context: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn) -> Task? { let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events, txn: txn) guard !authors.isEmpty else { - return + return nil } - Task { + return Task { print("load_profiles[\(context)]: requesting \(authors.count) profiles from relay pool") let filter = NostrFilter(kinds: [.metadata], authors: authors) - for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { + for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) { let now = UInt64(Date.now.timeIntervalSince1970) - switch item { - case .event(let lender): - lender.justUseACopy({ event in - if event.known_kind == .metadata { - damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now) - } - }) - case .eose: - break + try noteLender.borrow { event in + if event.known_kind == .metadata { + damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now) + } } } diff --git a/damus/Features/Search/Views/SearchHomeView.swift b/damus/Features/Search/Views/SearchHomeView.swift index 5f056dfa3..be619bbc9 100644 --- a/damus/Features/Search/Views/SearchHomeView.swift +++ b/damus/Features/Search/Views/SearchHomeView.swift @@ -54,7 +54,7 @@ struct SearchHomeView: View { loading: $model.loading, damus: damus_state, show_friend_icon: true, - filter:content_filter(FilterState.posts), + filter: content_filter(FilterState.posts), content: { AnyView(VStack(alignment: .leading) { HStack { @@ -66,7 +66,7 @@ struct SearchHomeView: View { .padding(.top) .padding(.horizontal) - FollowPackTimelineView(events: model.events, loading: $model.loading, damus: damus_state, show_friend_icon: true,filter:content_filter(FilterState.follow_list) + FollowPackTimelineView(events: model.followPackEvents, loading: $model.loading, damus: damus_state, show_friend_icon: true, filter: content_filter(FilterState.follow_list) ).padding(.bottom) Divider() @@ -83,20 +83,10 @@ struct SearchHomeView: View { }.padding(.bottom, 50)) } ) - .refreshable { - // Fetch new information by unsubscribing and resubscribing to the relay - loadingTask?.cancel() - loadingTask = Task { await model.load() } - } } var SearchContent: some View { SearchResultsView(damus_state: damus_state, search: $search) - .refreshable { - // Fetch new information by unsubscribing and resubscribing to the relay - loadingTask?.cancel() - loadingTask = Task { await model.load() } - } } var MainContent: some View { @@ -136,6 +126,12 @@ struct SearchHomeView: View { .onDisappear { loadingTask?.cancel() } + .refreshable { + // Fetch new information by unsubscribing and resubscribing to the relay + loadingTask?.cancel() + loadingTask = Task { await model.reload() } + try? await loadingTask?.value + } } } diff --git a/damus/Shared/Utilities/EventHolder.swift b/damus/Shared/Utilities/EventHolder.swift index cd615f33b..f31e6c2b9 100644 --- a/damus/Shared/Utilities/EventHolder.swift +++ b/damus/Shared/Utilities/EventHolder.swift @@ -95,4 +95,10 @@ class EventHolder: ObservableObject, ScrollQueue { self.incoming = [] } + + @MainActor + func reset() { + self.incoming = [] + self.events = [] + } } From 8164eee479bf564441af1fb6befadbb0e7c7374a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 15 Sep 2025 16:54:54 -0700 Subject: [PATCH 62/91] Return network EOSE in normal mode if device is offline MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is done to prevent hang ups when the device is offline. Changelog-Added: Added the ability to load saved notes if device is offline Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManager/SubscriptionManager.swift | 7 ++++++- damus/Core/Nostr/RelayPool.swift | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index ca79def5f..cbbef5495 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -134,9 +134,14 @@ extension NostrNetworkManager { // This closure function issues (yields) an EOSE signal to the stream if all relevant conditions are met let yieldEOSEIfReady = { + let connectedToNetwork = self.pool.network_monitor.currentPath.status == .satisfied // In normal mode: Issuing EOSE requires EOSE from both NDB and the network, since they are all considered separate relays // In experimental local relay model mode: Issuing EOSE requires only EOSE from NDB, since that is the only relay that "matters" - let canIssueEOSE = self.experimentalLocalRelayModelSupport ? ndbEOSEIssued : ndbEOSEIssued && networkEOSEIssued + let canIssueEOSE = self.experimentalLocalRelayModelSupport ? + (ndbEOSEIssued) + : + (ndbEOSEIssued && (networkEOSEIssued || !connectedToNetwork)) + if canIssueEOSE { continuation.yield(.eose) } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index c34a5e059..b15eece4b 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -40,7 +40,7 @@ class RelayPool { var delegate: Delegate? private(set) var signal: SignalModel = SignalModel() - private let network_monitor = NWPathMonitor() + let network_monitor = NWPathMonitor() private let network_monitor_queue = DispatchQueue(label: "io.damus.network_monitor") private var last_network_status: NWPath.Status = .unsatisfied From de528f3f70ebbbbf7664ae30ba48e1383d3d3bb8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 17 Sep 2025 12:29:11 -0700 Subject: [PATCH 63/91] Improve loading speed on home timeline MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit improves the loading speed for the home timeline (and likely other areas of the app) by employing various techniques and changes: - Network EOSE timeout reduced from 10 seconds down to 5 seconds - Network EOSE does not wait on relays with broken connections - Offload HomeModel handler event processing to separate tasks to avoid a large backlog - Give SubscriptionManager streamers more fine-grained EOSE signals for local optimization - Only wait for Ndb EOSE on the home timeline for faster loading - Add logging with time elapsed measurements for easier identification of loading problems Signed-off-by: Daniel D’Aquino --- .../SubscriptionManager.swift | 65 ++++++++++++++----- .../UserRelayListManager.swift | 2 + damus/Core/Nostr/RelayPool.swift | 11 ++-- damus/Features/Chat/Models/ThreadModel.swift | 4 ++ .../Features/Events/Models/EventsModel.swift | 4 ++ .../FollowPack/Models/FollowPackModel.swift | 4 ++ .../Follows/Models/FollowersModel.swift | 8 +++ .../NIP05/Models/NIP05DomainEventsModel.swift | 4 ++ .../Onboarding/SuggestedUsersViewModel.swift | 48 ++++++-------- .../Profile/Models/ProfileModel.swift | 12 ++++ .../Features/Timeline/Models/HomeModel.swift | 46 ++++++++++++- .../Features/Wallet/Models/WalletModel.swift | 4 ++ damus/Features/Zaps/Models/ZapsModel.swift | 4 ++ damus/Shared/Utilities/Log.swift | 1 + .../NostrNetworkManagerTests.swift | 4 ++ 15 files changed, 171 insertions(+), 50 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index cbbef5495..7ffec8e22 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -18,6 +18,8 @@ extension NostrNetworkManager { private var taskManager: TaskManager private let experimentalLocalRelayModelSupport: Bool + let EXTRA_VERBOSE_LOGGING: Bool = false + init(pool: RelayPool, ndb: Ndb, experimentalLocalRelayModelSupport: Bool) { self.pool = pool self.ndb = ndb @@ -28,17 +30,21 @@ extension NostrNetworkManager { // MARK: - Subscribing and Streaming data from Nostr /// Streams notes until the EOSE signal - func streamNotesUntilEndOfStoredEvents(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration? = nil) -> AsyncStream { + func streamNotesUntilEndOfStoredEvents(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration? = nil, id: UUID? = nil) -> AsyncStream { let timeout = timeout ?? .seconds(10) return AsyncStream { continuation in let streamingTask = Task { - outerLoop: for await item in self.subscribe(filters: filters, to: desiredRelays, timeout: timeout) { + outerLoop: for await item in self.subscribe(filters: filters, to: desiredRelays, timeout: timeout, id: id) { try Task.checkCancellation() switch item { case .event(let lender): continuation.yield(lender) case .eose: break outerLoop + case .ndbEose: + continue + case .networkEose: + continue } } continuation.finish() @@ -52,10 +58,10 @@ extension NostrNetworkManager { /// Subscribes to data from user's relays, for a maximum period of time — after which the stream will end. /// /// This is useful when waiting for some specific data from Nostr, but not indefinitely. - func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration) -> AsyncStream { + func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration, id: UUID? = nil) -> AsyncStream { return AsyncStream { continuation in let streamingTask = Task { - for await item in self.subscribe(filters: filters, to: desiredRelays) { + for await item in self.subscribe(filters: filters, to: desiredRelays, id: id) { try Task.checkCancellation() continuation.yield(item) } @@ -79,9 +85,10 @@ extension NostrNetworkManager { /// /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to /// - Returns: An async stream of nostr data - func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { + func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, id: UUID? = nil) -> AsyncStream { return AsyncStream { continuation in - let subscriptionId = UUID() + let subscriptionId = id ?? UUID() + let startTime = CFAbsoluteTimeGetCurrent() Log.info("Starting subscription %s: %s", for: .subscription_manager, subscriptionId.uuidString, filters.debugDescription) let multiSessionStreamingTask = Task { while !Task.isCancelled { @@ -97,7 +104,7 @@ extension NostrNetworkManager { continue } Log.info("%s: Streaming.", for: .subscription_manager, subscriptionId.uuidString) - for await item in self.sessionSubscribe(filters: filters, to: desiredRelays) { + for await item in self.sessionSubscribe(filters: filters, to: desiredRelays, id: id) { try Task.checkCancellation() continuation.yield(item) } @@ -127,8 +134,11 @@ extension NostrNetworkManager { /// /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to /// - Returns: An async stream of nostr data - private func sessionSubscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil) -> AsyncStream { + private func sessionSubscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, id: UUID? = nil) -> AsyncStream { + let id = id ?? UUID() return AsyncStream { continuation in + let startTime = CFAbsoluteTimeGetCurrent() + Log.debug("Session subscription %s: Started", for: .subscription_manager, id.uuidString) var ndbEOSEIssued = false var networkEOSEIssued = false @@ -143,6 +153,7 @@ extension NostrNetworkManager { (ndbEOSEIssued && (networkEOSEIssued || !connectedToNetwork)) if canIssueEOSE { + Log.debug("Session subscription %s: Issued EOSE for session. Elapsed: %.2f seconds", for: .subscription_manager, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) continuation.yield(.eose) } } @@ -153,7 +164,8 @@ extension NostrNetworkManager { try Task.checkCancellation() switch item { case .eose: - Log.debug("Session subscribe: Received EOSE from nostrdb", for: .subscription_manager) + Log.debug("Session subscription %s: Received EOSE from nostrdb. Elapsed: %.2f seconds", for: .subscription_manager, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) + continuation.yield(.ndbEose) ndbEOSEIssued = true yieldEOSEIfReady() case .event(let noteKey): @@ -170,32 +182,35 @@ extension NostrNetworkManager { } } catch { - Log.error("NDB streaming error: %s", for: .subscription_manager, error.localizedDescription) + Log.error("Session subscription %s: NDB streaming error: %s", for: .subscription_manager, id.uuidString, error.localizedDescription) } continuation.finish() } let streamTask = Task { do { - for await item in self.pool.subscribe(filters: filters, to: desiredRelays) { + for await item in self.pool.subscribe(filters: filters, to: desiredRelays, id: id) { // NO-OP. Notes will be automatically ingested by NostrDB // TODO: Improve efficiency of subscriptions? try Task.checkCancellation() switch item { case .event(let event): - Log.debug("Session subscribe: Received kind %d event with id %s from the network", for: .subscription_manager, event.kind, event.id.hex()) + if EXTRA_VERBOSE_LOGGING { + Log.debug("Session subscription %s: Received kind %d event with id %s from the network", for: .subscription_manager, id.uuidString, event.kind, event.id.hex()) + } if !self.experimentalLocalRelayModelSupport { // In normal mode (non-experimental), we stream from ndb but also directly from the network continuation.yield(.event(lender: NdbNoteLender(ownedNdbNote: event))) } case .eose: - Log.debug("Session subscribe: Received EOSE from the network", for: .subscription_manager) + Log.debug("Session subscription %s: Received EOSE from the network. Elapsed: %.2f seconds", for: .subscription_manager, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) + continuation.yield(.networkEose) networkEOSEIssued = true yieldEOSEIfReady() } } } catch { - Log.error("Network streaming error: %s", for: .subscription_manager, error.localizedDescription) + Log.error("Session subscription %s: Network streaming error: %s", for: .subscription_manager, id.uuidString, error.localizedDescription) } continuation.finish() } @@ -348,7 +363,27 @@ extension NostrNetworkManager { enum StreamItem { /// An event which can be borrowed from NostrDB case event(lender: NdbNoteLender) - /// The end of stored events + /// The canonical "end of stored events". See implementations of `subscribe` to see when this event is fired in relation to other EOSEs case eose + /// "End of stored events" from NostrDB. + case ndbEose + /// "End of stored events" from all relays in `RelayPool`. + case networkEose + + var debugDescription: String { + switch self { + case .event(lender: let lender): + let detailedDescription = try? lender.borrow({ event in + "Note with ID: \(event.id.hex())" + }) + return detailedDescription ?? "Some note" + case .eose: + return "EOSE" + case .ndbEose: + return "NDB EOSE" + case .networkEose: + return "NETWORK EOSE" + } + } } } diff --git a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift index 01225fc53..f0ed0da83 100644 --- a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift @@ -145,6 +145,8 @@ extension NostrNetworkManager { try? self.set(userRelayList: relayList) // Set the validated list }) case .eose: continue + case .ndbEose: continue + case .networkEose: continue } } } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index b15eece4b..ffa38c0bb 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -232,11 +232,13 @@ class RelayPool { /// - desiredRelays: The desired relays which to subsctibe to. If `nil`, it defaults to the `RelayPool`'s default list /// - eoseTimeout: The maximum timeout which to give up waiting for the eoseSignal /// - Returns: Returns an async stream that callers can easily consume via a for-loop - func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, eoseTimeout: Duration? = nil) -> AsyncStream { - let eoseTimeout = eoseTimeout ?? .seconds(10) - let desiredRelays = desiredRelays ?? self.relays.map({ $0.descriptor.url }) + func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, eoseTimeout: Duration? = nil, id: UUID? = nil) -> AsyncStream { + let eoseTimeout = eoseTimeout ?? .seconds(5) + let desiredRelays = desiredRelays ?? self.relays.filter({ $0.connection.isConnected }).map({ $0.descriptor.url }) + let startTime = CFAbsoluteTimeGetCurrent() return AsyncStream { continuation in - let sub_id = UUID().uuidString + let id = id ?? UUID() + let sub_id = id.uuidString var seenEvents: Set = [] var relaysWhoFinishedInitialResults: Set = [] var eoseSent = false @@ -257,6 +259,7 @@ class RelayPool { break // We do not support handling these yet case .eose(_): relaysWhoFinishedInitialResults.insert(relayUrl) + Log.debug("RelayPool subscription %s: EOSE from %s. EOSE count: %d/%d. Elapsed: %.2f seconds.", for: .networking, id.uuidString, relayUrl.absoluteString, relaysWhoFinishedInitialResults.count, Set(desiredRelays).count, CFAbsoluteTimeGetCurrent() - startTime) if relaysWhoFinishedInitialResults == Set(desiredRelays) { continuation.yield(with: .success(.eose)) eoseSent = true diff --git a/damus/Features/Chat/Models/ThreadModel.swift b/damus/Features/Chat/Models/ThreadModel.swift index e22ca6379..b414b945a 100644 --- a/damus/Features/Chat/Models/ThreadModel.swift +++ b/damus/Features/Chat/Models/ThreadModel.swift @@ -122,6 +122,10 @@ class ThreadModel: ObservableObject { case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } load_profiles(context: "thread", load: .from_events(Array(event_map.events)), damus_state: damus_state, txn: txn) + case .ndbEose: + break + case .networkEose: + break } } } diff --git a/damus/Features/Events/Models/EventsModel.swift b/damus/Features/Events/Models/EventsModel.swift index 049ecfe76..9a0fd1ea0 100644 --- a/damus/Features/Events/Models/EventsModel.swift +++ b/damus/Features/Events/Models/EventsModel.swift @@ -84,6 +84,10 @@ class EventsModel: ObservableObject { case .eose: DispatchQueue.main.async { self.loading = false } break outerLoop + case .ndbEose: + break + case .networkEose: + break } } DispatchQueue.main.async { self.loading = false } diff --git a/damus/Features/FollowPack/Models/FollowPackModel.swift b/damus/Features/FollowPack/Models/FollowPackModel.swift index 7049efa99..80a66d8ed 100644 --- a/damus/Features/FollowPack/Models/FollowPackModel.swift +++ b/damus/Features/FollowPack/Models/FollowPackModel.swift @@ -59,6 +59,10 @@ class FollowPackModel: ObservableObject { }) case .eose: continue + case .ndbEose: + continue + case .networkEose: + continue } } } diff --git a/damus/Features/Follows/Models/FollowersModel.swift b/damus/Features/Follows/Models/FollowersModel.swift index e696525ff..082025e06 100644 --- a/damus/Features/Follows/Models/FollowersModel.swift +++ b/damus/Features/Follows/Models/FollowersModel.swift @@ -45,6 +45,10 @@ class FollowersModel: ObservableObject { case .eose: guard let txn = NdbTxn(ndb: self.damus_state.ndb) else { return } load_profiles(txn: txn) + case .ndbEose: + continue + case .networkEose: + continue } } } @@ -83,6 +87,10 @@ class FollowersModel: ObservableObject { case .event(let lender): lender.justUseACopy({ self.handle_event(ev: $0) }) case .eose: break + case .ndbEose: + continue + case .networkEose: + continue } } } diff --git a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift index 545c0927f..137cc7870 100644 --- a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift +++ b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift @@ -73,6 +73,10 @@ class NIP05DomainEventsModel: ObservableObject { load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) DispatchQueue.main.async { self.loading = false } continue + case .ndbEose: + break + case .networkEose: + break } } } diff --git a/damus/Features/Onboarding/SuggestedUsersViewModel.swift b/damus/Features/Onboarding/SuggestedUsersViewModel.swift index d53f878b8..3945765fc 100644 --- a/damus/Features/Onboarding/SuggestedUsersViewModel.swift +++ b/damus/Features/Onboarding/SuggestedUsersViewModel.swift @@ -189,30 +189,25 @@ class SuggestedUsersViewModel: ObservableObject { authors: [Constants.ONBOARDING_FOLLOW_PACK_CURATOR_PUBKEY] ) - for await item in self.damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { + for await lender in self.damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) { // Check for cancellation on each iteration guard !Task.isCancelled else { break } - - switch item { - case .event(let lender): - lender.justUseACopy({ event in - let followPack = FollowPackEvent.parse(from: event) - - guard let id = followPack.uuid else { return } - - let latestPackForThisId: FollowPackEvent - - if let existingPack = packsById[id], existingPack.event.created_at > followPack.event.created_at { - latestPackForThisId = existingPack - } else { - latestPackForThisId = followPack - } - - packsById[id] = latestPackForThisId - }) - case .eose: - break - } + + lender.justUseACopy({ event in + let followPack = FollowPackEvent.parse(from: event) + + guard let id = followPack.uuid else { return } + + let latestPackForThisId: FollowPackEvent + + if let existingPack = packsById[id], existingPack.event.created_at > followPack.event.created_at { + latestPackForThisId = existingPack + } else { + latestPackForThisId = followPack + } + + packsById[id] = latestPackForThisId + }) } } @@ -228,13 +223,8 @@ class SuggestedUsersViewModel: ObservableObject { } let profileFilter = NostrFilter(kinds: [.metadata], authors: allPubkeys) - for await item in damus_state.nostrNetwork.reader.subscribe(filters: [profileFilter]) { - switch item { - case .event(_): - continue // We just need NostrDB to ingest these for them to be available elsewhere, no need to analyze the data - case .eose: - break - } + for await _ in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [profileFilter]) { + // NO-OP. We just need NostrDB to ingest these for them to be available elsewhere, no need to analyze the data } } } diff --git a/damus/Features/Profile/Models/ProfileModel.swift b/damus/Features/Profile/Models/ProfileModel.swift index a50e252a8..eb281c134 100644 --- a/damus/Features/Profile/Models/ProfileModel.swift +++ b/damus/Features/Profile/Models/ProfileModel.swift @@ -81,6 +81,8 @@ class ProfileModel: ObservableObject, Equatable { case .event(let lender): lender.justUseACopy({ handleNostrEvent($0) }) case .eose: break + case .ndbEose: break + case .networkEose: break } } guard let txn = NdbTxn(ndb: damus.ndb) else { return } @@ -97,6 +99,8 @@ class ProfileModel: ObservableObject, Equatable { case .event(let lender): lender.justUseACopy({ handleNostrEvent($0) }) case .eose: break + case .ndbEose: break + case .networkEose: break } } await bumpUpProgress() @@ -138,6 +142,10 @@ class ProfileModel: ObservableObject, Equatable { } case .eose: continue + case .ndbEose: + continue + case .networkEose: + continue } } } @@ -215,6 +223,10 @@ class ProfileModel: ObservableObject, Equatable { } case .eose: break + case .ndbEose: + break + case .networkEose: + break } } } diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index 37f64d887..1b3223d93 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -450,6 +450,9 @@ class HomeModel: ContactsDelegate, ObservableObject { /// Send the initial filters, just our contact list and relay list mostly func send_initial_filters() { Task { + let startTime = CFAbsoluteTimeGetCurrent() + let id = UUID() + Log.info("Initial filter task started with ID %s", for: .homeModel, id.uuidString) let filter = NostrFilter(kinds: [.contacts], limit: 1, authors: [damus_state.pubkey]) for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { switch item { @@ -459,9 +462,14 @@ class HomeModel: ContactsDelegate, ObservableObject { case .eose: if !done_init { done_init = true + Log.info("Initial filter task %s: Done initialization; Elapsed time: %.2f seconds", for: .homeModel, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) send_home_filters() } break + case .ndbEose: + break + case .networkEose: + break } } @@ -474,6 +482,8 @@ class HomeModel: ContactsDelegate, ObservableObject { case .event(let lender): await lender.justUseACopy({ await process_event(ev: $0, context: .initialRelayList) }) case .eose: break + case .ndbEose: break + case .networkEose: break } } } @@ -538,6 +548,8 @@ class HomeModel: ContactsDelegate, ObservableObject { case .event(let lender): await lender.justUseACopy({ await process_event(ev: $0, context: .contacts) }) case .eose: continue + case .ndbEose: continue + case .networkEose: continue } } } @@ -550,6 +562,8 @@ class HomeModel: ContactsDelegate, ObservableObject { case .eose: guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } load_profiles(context: "notifications", load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn) + case .ndbEose: break + case .networkEose: break } } } @@ -564,6 +578,8 @@ class HomeModel: ContactsDelegate, ObservableObject { var dms = dms.dms.flatMap { $0.events } dms.append(contentsOf: incoming_dms) load_profiles(context: "dms", load: .from_events(dms), damus_state: damus_state, txn: txn) + case .ndbEose: break + case .networkEose: break } } } @@ -580,6 +596,8 @@ class HomeModel: ContactsDelegate, ObservableObject { case .event(let lender): await lender.justUseACopy({ await process_event(ev: $0, context: .nwc) }) case .eose: continue + case .ndbEose: continue + case .networkEose: continue } } } @@ -628,19 +646,43 @@ class HomeModel: ContactsDelegate, ObservableObject { self.homeHandlerTask?.cancel() self.homeHandlerTask = Task { + let startTime = CFAbsoluteTimeGetCurrent() + let id = UUID() + Log.info("Home handler task: Starting home handler task with ID %s", for: .homeModel, id.uuidString) + DispatchQueue.main.async { self.loading = true } - for await item in damus_state.nostrNetwork.reader.subscribe(filters: home_filters) { + for await item in damus_state.nostrNetwork.reader.subscribe(filters: home_filters, id: id) { switch item { case .event(let lender): - await lender.justUseACopy({ await process_event(ev: $0, context: .home) }) + let currentTime = CFAbsoluteTimeGetCurrent() + // Process events in parallel on a separate task, to avoid holding up upcoming signals + // Empirical evidence has shown that in at least one instance this technique saved up to 5 seconds of load time! + Task { await lender.justUseACopy({ await process_event(ev: $0, context: .home) }) } case .eose: + let eoseTime = CFAbsoluteTimeGetCurrent() + Log.info("Home handler task %s: Received general EOSE after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) + + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + load_profiles(context: "home", load: .from_events(events.events), damus_state: damus_state, txn: txn) + + let finishTime = CFAbsoluteTimeGetCurrent() + Log.info("Home handler task %s: Completed initial loading task after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) + case .ndbEose: + let eoseTime = CFAbsoluteTimeGetCurrent() + Log.info("Home handler task %s: Received NDB EOSE after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) + guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } DispatchQueue.main.async { self.loading = false } load_profiles(context: "home", load: .from_events(events.events), damus_state: damus_state, txn: txn) + + let finishTime = CFAbsoluteTimeGetCurrent() + Log.info("Home handler task %s: Completed initial NDB loading task after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) + case .networkEose: + break } } } diff --git a/damus/Features/Wallet/Models/WalletModel.swift b/damus/Features/Wallet/Models/WalletModel.swift index 4c7d90367..f80364853 100644 --- a/damus/Features/Wallet/Models/WalletModel.swift +++ b/damus/Features/Wallet/Models/WalletModel.swift @@ -198,6 +198,10 @@ class WalletModel: ObservableObject { return result case .eose: continue + case .ndbEose: + continue + case .networkEose: + continue } } do { try Task.checkCancellation() } catch { throw .cancelled } diff --git a/damus/Features/Zaps/Models/ZapsModel.swift b/damus/Features/Zaps/Models/ZapsModel.swift index a3e26e67f..9eb063c5a 100644 --- a/damus/Features/Zaps/Models/ZapsModel.swift +++ b/damus/Features/Zaps/Models/ZapsModel.swift @@ -43,6 +43,10 @@ class ZapsModel: ObservableObject { let events = state.events.lookup_zaps(target: target).map { $0.request.ev } guard let txn = NdbTxn(ndb: state.ndb) else { return } load_profiles(context: "zaps_model", load: .from_events(events), damus_state: state, txn: txn) + case .ndbEose: + break + case .networkEose: + break } } } diff --git a/damus/Shared/Utilities/Log.swift b/damus/Shared/Utilities/Log.swift index 79b9955a4..216e7e530 100644 --- a/damus/Shared/Utilities/Log.swift +++ b/damus/Shared/Utilities/Log.swift @@ -24,6 +24,7 @@ enum LogCategory: String { case video_coordination case tips case ndb + case homeModel } /// Damus structured logger diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift index de3eb0780..ab0b3400a 100644 --- a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -61,6 +61,10 @@ class NostrNetworkManagerTests: XCTestCase { case .eose: // End of stream, break out of the loop endOfStream.fulfill() + case .ndbEose: + continue + case .networkEose: + continue } } } From a3ef36120e05fbedfef4378a732f84e2005d1954 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 17 Sep 2025 17:17:43 -0700 Subject: [PATCH 64/91] Fix OS 26 build errors MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus.xcodeproj/project.pbxproj | 10 ++++++++++ damus/Core/NIPs/NIP04/NIP04.swift | 4 ++-- damus/Core/Nostr/NostrEvent.swift | 8 ++++---- damus/Features/Posting/Models/DraftsModel.swift | 2 +- .../Media/Video/DamusVideoCoordinator.swift | 2 +- damus/Shared/Utilities/DataExtensions.swift | 17 +++++++++++++++++ damusTests/NIP44v2EncryptionTests.swift | 4 ++-- nostrdb/NdbNote.swift | 2 +- 8 files changed, 38 insertions(+), 11 deletions(-) create mode 100644 damus/Shared/Utilities/DataExtensions.swift diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index 1502ff40b..8d6275163 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -1590,6 +1590,10 @@ D76BE18C2E0CF3DA004AD0C6 /* Interests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D76BE18B2E0CF3D5004AD0C6 /* Interests.swift */; }; D76BE18D2E0CF3DA004AD0C6 /* Interests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D76BE18B2E0CF3D5004AD0C6 /* Interests.swift */; }; D76BE18E2E0CF3DA004AD0C6 /* Interests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D76BE18B2E0CF3D5004AD0C6 /* Interests.swift */; }; + D77135D32E7B766B00E7639F /* DataExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = D77135D22E7B766300E7639F /* DataExtensions.swift */; }; + D77135D42E7B766B00E7639F /* DataExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = D77135D22E7B766300E7639F /* DataExtensions.swift */; }; + D77135D52E7B766B00E7639F /* DataExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = D77135D22E7B766300E7639F /* DataExtensions.swift */; }; + D77135D62E7B78D700E7639F /* DataExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = D77135D22E7B766300E7639F /* DataExtensions.swift */; }; D773BC5F2C6D538500349F0A /* CommentItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = D773BC5E2C6D538500349F0A /* CommentItem.swift */; }; D773BC602C6D538500349F0A /* CommentItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = D773BC5E2C6D538500349F0A /* CommentItem.swift */; }; D77BFA0B2AE3051200621634 /* ProfileActionSheetView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D77BFA0A2AE3051200621634 /* ProfileActionSheetView.swift */; }; @@ -2647,6 +2651,7 @@ D767066E2C8BB3CE00F09726 /* URLHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = URLHandler.swift; sourceTree = ""; }; D76874F22AE3632B00FB0F68 /* ProfileZapLinkView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileZapLinkView.swift; sourceTree = ""; }; D76BE18B2E0CF3D5004AD0C6 /* Interests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Interests.swift; sourceTree = ""; }; + D77135D22E7B766300E7639F /* DataExtensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DataExtensions.swift; sourceTree = ""; }; D773BC5E2C6D538500349F0A /* CommentItem.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CommentItem.swift; sourceTree = ""; }; D77BFA0A2AE3051200621634 /* ProfileActionSheetView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileActionSheetView.swift; sourceTree = ""; }; D783A63E2AD4E53D00658DDA /* SuggestedHashtagsView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SuggestedHashtagsView.swift; sourceTree = ""; }; @@ -4673,6 +4678,7 @@ 5C78A7B82E3047DE00CF177D /* Utilities */ = { isa = PBXGroup; children = ( + D77135D22E7B766300E7639F /* DataExtensions.swift */, 4CF0ABEA29844B2F00D66079 /* AnyCodable */, D73B74E02D8365B40067BDBC /* ExtraFonts.swift */, D7DB93042D66A43B00DA1EE5 /* Undistractor.swift */, @@ -5925,6 +5931,7 @@ D7100C5C2B77016700C59298 /* IAPProductStateView.swift in Sources */, 4CB9D4A72992D02B00A9A7E4 /* ProfileNameView.swift in Sources */, D733F9E82D92C76100317B11 /* UnownedNdbNote.swift in Sources */, + D77135D42E7B766B00E7639F /* DataExtensions.swift in Sources */, D74EA0902D2E271E002290DD /* ErrorView.swift in Sources */, 4CE4F0F429D779B5005914DB /* PostBox.swift in Sources */, BA37598E2ABCCE500018D73B /* VideoCaptureProcessor.swift in Sources */, @@ -6013,6 +6020,7 @@ 4C36246F2D5EA16A00DD066E /* str.c in Sources */, 4C36246E2D5EA10400DD066E /* hash_u5.c in Sources */, 4C36246C2D5EA0E500DD066E /* bolt11.c in Sources */, + D77135D52E7B766B00E7639F /* DataExtensions.swift in Sources */, 4C36246B2D5EA0D700DD066E /* invoice.c in Sources */, 4C36246A2D5EA0CB00DD066E /* content_parser.c in Sources */, 4C3624692D5EA0C200DD066E /* list.c in Sources */, @@ -6586,6 +6594,7 @@ D73E5E4F2C6A97F4007EB227 /* WebsiteLink.swift in Sources */, D73E5E502C6A97F4007EB227 /* Highlight.swift in Sources */, D73E5E512C6A97F4007EB227 /* CustomPicker.swift in Sources */, + D77135D32E7B766B00E7639F /* DataExtensions.swift in Sources */, D73E5E522C6A97F4007EB227 /* UserView.swift in Sources */, D73E5E532C6A97F4007EB227 /* ZoomableScrollView.swift in Sources */, D73E5E542C6A97F4007EB227 /* NoteZapButton.swift in Sources */, @@ -7180,6 +7189,7 @@ D798D22D2B086DC400234419 /* NostrEvent.swift in Sources */, D798D22E2B086E4800234419 /* NostrResponse.swift in Sources */, D7EDED162B1177840018B19C /* LNUrls.swift in Sources */, + D77135D62E7B78D700E7639F /* DataExtensions.swift in Sources */, D7CCFC132B05887C00323D86 /* ProofOfWork.swift in Sources */, D7CE1B392B0BE719002EDAD4 /* Table.swift in Sources */, D7CE1B452B0BE719002EDAD4 /* Root.swift in Sources */, diff --git a/damus/Core/NIPs/NIP04/NIP04.swift b/damus/Core/NIPs/NIP04/NIP04.swift index d9ec959e4..787027121 100644 --- a/damus/Core/NIPs/NIP04/NIP04.swift +++ b/damus/Core/NIPs/NIP04/NIP04.swift @@ -12,11 +12,11 @@ struct NIP04 {} extension NIP04 { /// Encrypts a message using NIP-04. static func encrypt_message(message: String, privkey: Privkey, to_pk: Pubkey, encoding: EncEncoding = .base64) -> String? { - let iv = random_bytes(count: 16).bytes + let iv = random_bytes(count: 16).byteArray guard let shared_sec = get_shared_secret(privkey: privkey, pubkey: to_pk) else { return nil } - let utf8_message = Data(message.utf8).bytes + let utf8_message = Data(message.utf8).byteArray guard let enc_message = aes_encrypt(data: utf8_message, iv: iv, shared_sec: shared_sec) else { return nil } diff --git a/damus/Core/Nostr/NostrEvent.swift b/damus/Core/Nostr/NostrEvent.swift index d3ea206da..0e6cb3e08 100644 --- a/damus/Core/Nostr/NostrEvent.swift +++ b/damus/Core/Nostr/NostrEvent.swift @@ -321,7 +321,7 @@ func sign_id(privkey: String, id: String) -> String { // Extra params for custom signing - var aux_rand = random_bytes(count: 64).bytes + var aux_rand = random_bytes(count: 64).byteArray var digest = try! id.bytes // API allows for signing variable length messages @@ -786,15 +786,15 @@ func validate_event(ev: NostrEvent) -> ValidationResult { let ctx = secp256k1.Context.raw var xonly_pubkey = secp256k1_xonly_pubkey.init() - var ev_pubkey = ev.pubkey.id.bytes + var ev_pubkey = ev.pubkey.id.byteArray var ok = secp256k1_xonly_pubkey_parse(ctx, &xonly_pubkey, &ev_pubkey) != 0 if !ok { return .bad_sig } - var sig = ev.sig.data.bytes - var idbytes = id.id.bytes + var sig = ev.sig.data.byteArray + var idbytes = id.id.byteArray ok = secp256k1_schnorrsig_verify(ctx, &sig, &idbytes, 32, &xonly_pubkey) > 0 return ok ? .ok : .bad_sig diff --git a/damus/Features/Posting/Models/DraftsModel.swift b/damus/Features/Posting/Models/DraftsModel.swift index 14d0b71b3..882e7b5e6 100644 --- a/damus/Features/Posting/Models/DraftsModel.swift +++ b/damus/Features/Posting/Models/DraftsModel.swift @@ -6,7 +6,7 @@ // import Foundation -import SwiftUICore +import SwiftUI import UIKit /// Represents artifacts in a post draft, which is rendered by `PostView` diff --git a/damus/Shared/Media/Video/DamusVideoCoordinator.swift b/damus/Shared/Media/Video/DamusVideoCoordinator.swift index d0de60dfd..3e23948f4 100644 --- a/damus/Shared/Media/Video/DamusVideoCoordinator.swift +++ b/damus/Shared/Media/Video/DamusVideoCoordinator.swift @@ -7,7 +7,7 @@ import Combine import Foundation -import SwiftUICore +import SwiftUI import AVFoundation /// DamusVideoCoordinator is responsible for coordinating the various video players throughout the app, and providing a nicely orchestrated experience. diff --git a/damus/Shared/Utilities/DataExtensions.swift b/damus/Shared/Utilities/DataExtensions.swift new file mode 100644 index 000000000..11bc3ced5 --- /dev/null +++ b/damus/Shared/Utilities/DataExtensions.swift @@ -0,0 +1,17 @@ +// +// DataExtensions.swift +// damus +// +// Created by Daniel D’Aquino on 2025-09-17. +// +import Foundation + +extension Data { + var byteArray: [UInt8] { + var bytesToReturn: [UInt8] = [] + for i in self.bytes.byteOffsets { + bytesToReturn.append(self[i]) + } + return bytesToReturn + } +} diff --git a/damusTests/NIP44v2EncryptionTests.swift b/damusTests/NIP44v2EncryptionTests.swift index 406d35dc3..bcb59a0f1 100644 --- a/damusTests/NIP44v2EncryptionTests.swift +++ b/damusTests/NIP44v2EncryptionTests.swift @@ -132,7 +132,7 @@ final class NIP44v2EncryptingTests: XCTestCase { try encryptDecryptVectors.forEach { vector in let conversationKey = vector.conversationKey let conversationKeyData = try XCTUnwrap(conversationKey.hexDecoded) - let conversationKeyBytes = conversationKeyData.bytes + let conversationKeyBytes = conversationKeyData.byteArray let nonce = try XCTUnwrap(vector.nonce.hexDecoded) let expectedPlaintextSHA256 = vector.plaintextSHA256 @@ -211,7 +211,7 @@ final class NIP44v2EncryptingTests: XCTestCase { func testInvalidDecrypt() throws { let decryptVectors = try XCTUnwrap(vectors.v2.invalid.decrypt) try decryptVectors.forEach { vector in - let conversationKey = try XCTUnwrap(vector.conversationKey.hexDecoded).bytes + let conversationKey = try XCTUnwrap(vector.conversationKey.hexDecoded).byteArray let payload = vector.payload XCTAssertThrowsError(try NIP44v2Encryption.decrypt(payload: payload, conversationKey: conversationKey), vector.note) } diff --git a/nostrdb/NdbNote.swift b/nostrdb/NdbNote.swift index f6542dce3..25fa9e995 100644 --- a/nostrdb/NdbNote.swift +++ b/nostrdb/NdbNote.swift @@ -291,7 +291,7 @@ class NdbNote: Codable, Equatable, Hashable { return nil } case .manual(_, let signature, _): - var raw_sig = signature.data.bytes + var raw_sig = signature.data.byteArray ndb_builder_set_sig(&builder, &raw_sig) do { From a09e22df246907c50fd92151fc7aff43284b5e06 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 19 Sep 2025 11:39:07 -0700 Subject: [PATCH 65/91] Improve streaming interfaces and profile loading logic MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus.xcodeproj/project.pbxproj | 16 ++ damus/ContentView.swift | 2 +- .../NostrNetworkManager.swift | 3 + .../NostrNetworkManager/ProfilesManager.swift | 137 ++++++++++++++++++ .../SubscriptionManager.swift | 105 ++++++++++---- .../UserRelayListManager.swift | 24 ++- damus/Core/Nostr/ProfileObserver.swift | 35 +++++ damus/Core/Storage/DamusState.swift | 2 +- .../Actions/Reposts/Views/Reposted.swift | 2 +- damus/Features/Chat/ChatEventView.swift | 2 +- damus/Features/Chat/Models/ThreadModel.swift | 14 +- damus/Features/Chat/ReplyQuoteView.swift | 2 +- damus/Features/DMs/Views/DMChatView.swift | 2 +- damus/Features/Events/EventProfile.swift | 2 +- .../Features/Events/Models/EventsModel.swift | 4 +- .../FollowPack/Models/FollowPackModel.swift | 29 ++-- .../FollowPack/Views/FollowPackPreview.swift | 2 +- .../FollowPack/Views/FollowPackView.swift | 2 +- .../Follows/Models/FollowersModel.swift | 39 +---- .../Follows/Models/FollowingModel.swift | 2 +- .../NIP05/Models/NIP05DomainEventsModel.swift | 4 +- .../Views/ProfilePicturesView.swift | 2 +- .../Onboarding/SuggestedUsersViewModel.swift | 5 +- damus/Features/Posting/Views/PostView.swift | 2 +- .../CondensedProfilePicturesViewModel.swift | 18 --- .../Profile/Models/ProfileModel.swift | 73 +++------- .../Views/CondensedProfilePicturesView.swift | 6 +- .../Profile/Views/MaybeAnonPfpView.swift | 2 +- .../Views/ProfileActionSheetView.swift | 2 +- .../Features/Profile/Views/ProfileName.swift | 2 + .../Profile/Views/ProfilePicView.swift | 8 +- .../Features/Profile/Views/ProfileView.swift | 2 +- .../Purple/Views/DamusPurpleAccountView.swift | 2 +- .../Views/Detail/RelayAdminDetail.swift | 2 +- .../Search/Models/SearchHomeModel.swift | 35 +---- .../Features/Search/Models/SearchModel.swift | 2 - .../Status/Views/UserStatusSheet.swift | 2 +- .../Features/Timeline/Models/HomeModel.swift | 77 +++------- .../Timeline/Views/SideMenuView.swift | 2 +- .../Features/Wallet/Models/WalletModel.swift | 33 ++--- .../Wallet/Views/TransactionsView.swift | 2 +- damus/Features/Zaps/Models/ZapsModel.swift | 17 +-- damus/Shared/Components/QRCodeView.swift | 2 +- damus/Shared/Components/UserView.swift | 2 +- nostrdb/UnownedNdbNote.swift | 2 +- 45 files changed, 380 insertions(+), 352 deletions(-) create mode 100644 damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift create mode 100644 damus/Core/Nostr/ProfileObserver.swift diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index 8d6275163..d1792c398 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -1161,6 +1161,12 @@ D72A2D022AD9C136002AFF62 /* EventViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A2CFF2AD9B66B002AFF62 /* EventViewTests.swift */; }; D72A2D052AD9C1B5002AFF62 /* MockDamusState.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A2D042AD9C1B5002AFF62 /* MockDamusState.swift */; }; D72A2D072AD9C1FB002AFF62 /* MockProfiles.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72A2D062AD9C1FB002AFF62 /* MockProfiles.swift */; }; + D72B6FA22E7DFB450050CD1D /* ProfilesManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72B6FA12E7DFB3F0050CD1D /* ProfilesManager.swift */; }; + D72B6FA32E7DFB450050CD1D /* ProfilesManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72B6FA12E7DFB3F0050CD1D /* ProfilesManager.swift */; }; + D72B6FA42E7DFB450050CD1D /* ProfilesManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72B6FA12E7DFB3F0050CD1D /* ProfilesManager.swift */; }; + D72B6FA62E7E06AD0050CD1D /* ProfileObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72B6FA52E7E06A40050CD1D /* ProfileObserver.swift */; }; + D72B6FA72E7E06AD0050CD1D /* ProfileObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72B6FA52E7E06A40050CD1D /* ProfileObserver.swift */; }; + D72B6FA92E7E06AD0050CD1D /* ProfileObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72B6FA52E7E06A40050CD1D /* ProfileObserver.swift */; }; D72C01312E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */; }; D72C01322E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */; }; D72C01332E78C10500AACB67 /* CondensedProfilePicturesViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */; }; @@ -2614,6 +2620,8 @@ D72A2CFF2AD9B66B002AFF62 /* EventViewTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EventViewTests.swift; sourceTree = ""; }; D72A2D042AD9C1B5002AFF62 /* MockDamusState.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockDamusState.swift; sourceTree = ""; }; D72A2D062AD9C1FB002AFF62 /* MockProfiles.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockProfiles.swift; sourceTree = ""; }; + D72B6FA12E7DFB3F0050CD1D /* ProfilesManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfilesManager.swift; sourceTree = ""; }; + D72B6FA52E7E06A40050CD1D /* ProfileObserver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileObserver.swift; sourceTree = ""; }; D72C01302E78C0FB00AACB67 /* CondensedProfilePicturesViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CondensedProfilePicturesViewModel.swift; sourceTree = ""; }; D72E12772BEED22400F4F781 /* Array.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Array.swift; sourceTree = ""; }; D72E12792BEEEED000F4F781 /* NostrFilterTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NostrFilterTests.swift; sourceTree = ""; }; @@ -3105,6 +3113,7 @@ 4C75EFAB28049CC80006080F /* Nostr */ = { isa = PBXGroup; children = ( + D72B6FA52E7E06A40050CD1D /* ProfileObserver.swift */, 4CE6DF1527F8DEBF00C66700 /* RelayConnection.swift */, 50A60D132A28BEEE00186190 /* RelayLog.swift */, 4C75EFA527FF87A20006080F /* Nostr.swift */, @@ -4907,6 +4916,7 @@ D73BDB122D71212600D69970 /* NostrNetworkManager */ = { isa = PBXGroup; children = ( + D72B6FA12E7DFB3F0050CD1D /* ProfilesManager.swift */, D733F9E02D92C1AA00317B11 /* SubscriptionManager.swift */, D73BDB172D71310C00D69970 /* UserRelayListErrors.swift */, D73BDB132D71215F00D69970 /* UserRelayListManager.swift */, @@ -5712,6 +5722,7 @@ 4C64305C2A945AFF00B0C0E9 /* MusicController.swift in Sources */, 5053ACA72A56DF3B00851AE3 /* DeveloperSettingsView.swift in Sources */, F79C7FAD29D5E9620000F946 /* EditPictureControl.swift in Sources */, + D72B6FA62E7E06AD0050CD1D /* ProfileObserver.swift in Sources */, 4C011B5F2BD0A56A002F2F9B /* ChatroomThreadView.swift in Sources */, 4C9F18E229AA9B6C008C55EC /* CustomizeZapView.swift in Sources */, 4C2859602A12A2BE004746F7 /* SupporterBadge.swift in Sources */, @@ -5764,6 +5775,7 @@ 4C5F9114283D694D0052CD1C /* FollowTarget.swift in Sources */, 5C0567582C8FBC560073F23A /* NDBSearchView.swift in Sources */, D72341192B6864F200E1E135 /* DamusPurpleEnvironment.swift in Sources */, + D72B6FA32E7DFB450050CD1D /* ProfilesManager.swift in Sources */, 4CF0ABD629817F5B00D66079 /* ReportView.swift in Sources */, D71528002E0A3D6900C893D6 /* InterestList.swift in Sources */, 4C1A9A2729DDE31900516EAC /* TranslationSettingsView.swift in Sources */, @@ -6064,6 +6076,7 @@ 82D6FAC12CD99F7900C925F4 /* AsciiCharacter.swift in Sources */, 82D6FAC22CD99F7900C925F4 /* NdbTagElem.swift in Sources */, 82D6FAC32CD99F7900C925F4 /* Ndb.swift in Sources */, + D72B6FA92E7E06AD0050CD1D /* ProfileObserver.swift in Sources */, 82D6FAC42CD99F7900C925F4 /* NdbTagsIterator.swift in Sources */, 82D6FAC52CD99F7900C925F4 /* NdbTxn.swift in Sources */, 82D6FAC72CD99F7900C925F4 /* midl.c in Sources */, @@ -6124,6 +6137,7 @@ 82D6FB052CD99F7900C925F4 /* MusicController.swift in Sources */, 82D6FB062CD99F7900C925F4 /* UserStatusView.swift in Sources */, 82D6FB072CD99F7900C925F4 /* UserStatus.swift in Sources */, + D72B6FA22E7DFB450050CD1D /* ProfilesManager.swift in Sources */, 5CB017262D42C5C400A9ED05 /* TransactionsView.swift in Sources */, 82D6FB082CD99F7900C925F4 /* UserStatusSheet.swift in Sources */, 82D6FB092CD99F7900C925F4 /* SearchHeaderView.swift in Sources */, @@ -6551,6 +6565,7 @@ D73E5E242C6A97F4007EB227 /* FollowedNotify.swift in Sources */, D73E5E252C6A97F4007EB227 /* FollowNotify.swift in Sources */, D73E5E262C6A97F4007EB227 /* LikedNotify.swift in Sources */, + D72B6FA42E7DFB450050CD1D /* ProfilesManager.swift in Sources */, D73E5E272C6A97F4007EB227 /* LocalNotificationNotify.swift in Sources */, D73E5F8B2C6AA6A2007EB227 /* UserStatusSheet.swift in Sources */, D73E5E282C6A97F4007EB227 /* LoginNotify.swift in Sources */, @@ -6664,6 +6679,7 @@ D73E5E8A2C6A97F4007EB227 /* PurpleStoreKitManager.swift in Sources */, D733F9E72D92C76100317B11 /* UnownedNdbNote.swift in Sources */, D73E5E8E2C6A97F4007EB227 /* ImageResizer.swift in Sources */, + D72B6FA72E7E06AD0050CD1D /* ProfileObserver.swift in Sources */, D78F080E2D7F78EF00FC6C75 /* Request.swift in Sources */, D73E5E8F2C6A97F4007EB227 /* PhotoCaptureProcessor.swift in Sources */, D773BC602C6D538500349F0A /* CommentItem.swift in Sources */, diff --git a/damus/ContentView.swift b/damus/ContentView.swift index c024145d9..2984943ba 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -819,7 +819,7 @@ struct TopbarSideMenuButton: View { Button { isSideBarOpened.toggle() } label: { - ProfilePicView(pubkey: damus_state.pubkey, size: 32, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation) + ProfilePicView(pubkey: damus_state.pubkey, size: 32, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, damusState: damus_state) .opacity(isSideBarOpened ? 0 : 1) .animation(isSideBarOpened ? .none : .default, value: isSideBarOpened) .accessibilityHidden(true) // Knowing there is a profile picture here leads to no actionable outcome to VoiceOver users, so it is best not to show it diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 80321f8b6..1157314bf 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -33,6 +33,7 @@ class NostrNetworkManager { let postbox: PostBox /// Handles subscriptions and functions to read or consume data from the Nostr network let reader: SubscriptionManager + let profilesManager: ProfilesManager init(delegate: Delegate) { self.delegate = delegate @@ -43,6 +44,7 @@ class NostrNetworkManager { self.reader = reader self.userRelayList = userRelayList self.postbox = PostBox(pool: pool) + self.profilesManager = ProfilesManager(subscriptionManager: reader, ndb: delegate.ndb) } // MARK: - Control functions @@ -51,6 +53,7 @@ class NostrNetworkManager { func connect() { self.userRelayList.connect() self.pool.open = true + Task { await self.profilesManager.load() } } func disconnect() { diff --git a/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift new file mode 100644 index 000000000..2b0f462db --- /dev/null +++ b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift @@ -0,0 +1,137 @@ +// +// ProfilesManager.swift +// damus +// +// Created by Daniel D’Aquino on 2025-09-19. +// +import Foundation + +extension NostrNetworkManager { + /// Efficiently manages getting profile metadata from the network and NostrDB without too many relay subscriptions + /// + /// This is necessary because relays have a limit on how many subscriptions can be sent to relays at one given time. + actor ProfilesManager { + private var profileListenerTask: Task? = nil + private var subscriptionSwitcherTask: Task? = nil + private var subscriptionNeedsUpdate: Bool = false + private let subscriptionManager: SubscriptionManager + private let ndb: Ndb + private var streams: [Pubkey: [UUID: ProfileStreamInfo]] + + + // MARK: - Initialization and deinitialization + + init(subscriptionManager: SubscriptionManager, ndb: Ndb) { + self.subscriptionManager = subscriptionManager + self.ndb = ndb + self.streams = [:] + } + + deinit { + self.subscriptionSwitcherTask?.cancel() + self.profileListenerTask?.cancel() + } + + // MARK: - Task management + + func load() { + self.restartProfileListenerTask() + self.subscriptionSwitcherTask?.cancel() + self.subscriptionSwitcherTask = Task { + while true { + try await Task.sleep(for: .seconds(1)) + try Task.checkCancellation() + if subscriptionNeedsUpdate { + self.restartProfileListenerTask() + subscriptionNeedsUpdate = false + } + } + } + } + + func stop() { + self.subscriptionSwitcherTask?.cancel() + self.profileListenerTask?.cancel() + } + + private func restartProfileListenerTask() { + self.profileListenerTask?.cancel() + self.profileListenerTask = Task { + try await self.listenToProfileChanges() + } + } + + + // MARK: - Listening and publishing of profile changes + + private func listenToProfileChanges() async throws { + let pubkeys = Array(streams.keys) + guard pubkeys.count > 0 else { return } + let profileFilter = NostrFilter(kinds: [.metadata], authors: pubkeys) + for await ndbLender in self.subscriptionManager.streamIndefinitely(filters: [profileFilter], streamMode: .ndbFirst) { + try Task.checkCancellation() + try? ndbLender.borrow { ev in + publishProfileUpdates(metadataEvent: ev) + } + try Task.checkCancellation() + } + } + + private func publishProfileUpdates(metadataEvent: borrowing UnownedNdbNote) { + let now = UInt64(Date.now.timeIntervalSince1970) + ndb.write_profile_last_fetched(pubkey: metadataEvent.pubkey, fetched_at: now) + + if let relevantStreams = streams[metadataEvent.pubkey] { + // If we have the user metadata event in ndb, then we should have the profile record as well. + guard let profile = ndb.lookup_profile(metadataEvent.pubkey) else { return } + for relevantStream in relevantStreams.values { + relevantStream.continuation.yield(profile) + } + } + } + + + // MARK: - Streaming interface + + func streamProfile(pubkey: Pubkey) -> AsyncStream { + return AsyncStream { continuation in + let stream = ProfileStreamInfo(continuation: continuation) + self.add(pubkey: pubkey, stream: stream) + + continuation.onTermination = { @Sendable _ in + Task { await self.removeStream(pubkey: pubkey, id: stream.id) } + } + } + } + + + // MARK: - Stream management + + private func add(pubkey: Pubkey, stream: ProfileStreamInfo) { + if self.streams[pubkey] == nil { + self.streams[pubkey] = [:] + self.subscriptionNeedsUpdate = true + } + self.streams[pubkey]?[stream.id] = stream + } + + func removeStream(pubkey: Pubkey, id: UUID) { + self.streams[pubkey]?[id] = nil + if self.streams[pubkey]?.keys.count == 0 { + // We don't need to subscribe to this profile anymore + self.streams[pubkey] = nil + self.subscriptionNeedsUpdate = true + } + } + + + // MARK: - Helper types + + typealias ProfileStreamItem = NdbTxn + + struct ProfileStreamInfo { + let id: UUID = UUID() + let continuation: AsyncStream.Continuation + } + } +} diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 7ffec8e22..0207f1218 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -30,11 +30,11 @@ extension NostrNetworkManager { // MARK: - Subscribing and Streaming data from Nostr /// Streams notes until the EOSE signal - func streamNotesUntilEndOfStoredEvents(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration? = nil, id: UUID? = nil) -> AsyncStream { + func streamExistingEvents(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { let timeout = timeout ?? .seconds(10) return AsyncStream { continuation in let streamingTask = Task { - outerLoop: for await item in self.subscribe(filters: filters, to: desiredRelays, timeout: timeout, id: id) { + outerLoop: for await item in self.advancedStream(filters: filters, to: desiredRelays, timeout: timeout, streamMode: streamMode, id: id) { try Task.checkCancellation() switch item { case .event(let lender): @@ -58,34 +58,55 @@ extension NostrNetworkManager { /// Subscribes to data from user's relays, for a maximum period of time — after which the stream will end. /// /// This is useful when waiting for some specific data from Nostr, but not indefinitely. - func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration, id: UUID? = nil) -> AsyncStream { - return AsyncStream { continuation in + func timedStream(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { + return AsyncStream { continuation in let streamingTask = Task { - for await item in self.subscribe(filters: filters, to: desiredRelays, id: id) { + for await item in self.advancedStream(filters: filters, to: desiredRelays, timeout: timeout, streamMode: streamMode, id: id) { try Task.checkCancellation() - continuation.yield(item) + switch item { + case .event(lender: let lender): + continuation.yield(lender) + case .eose: break + case .ndbEose: break + case .networkEose: break + } } + continuation.finish() } - let timeoutTask = Task { - try await Task.sleep(for: timeout) - continuation.finish() // End the stream due to timeout. + continuation.onTermination = { @Sendable _ in + streamingTask.cancel() + } + } + } + + /// Subscribes to notes indefinitely + /// + /// This is useful when simply streaming all events indefinitely + func streamIndefinitely(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { + return AsyncStream { continuation in + let streamingTask = Task { + for await item in self.advancedStream(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { + try Task.checkCancellation() + switch item { + case .event(lender: let lender): + continuation.yield(lender) + case .eose: + break + case .ndbEose: + break + case .networkEose: + break + } + } } continuation.onTermination = { @Sendable _ in - timeoutTask.cancel() streamingTask.cancel() } } } /// Subscribes to data from the user's relays - /// - /// ## Implementation notes - /// - /// - When we migrate to the local relay model, we should modify this function to stream directly from NostrDB - /// - /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to - /// - Returns: An async stream of nostr data - func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, id: UUID? = nil) -> AsyncStream { + func advancedStream(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { return AsyncStream { continuation in let subscriptionId = id ?? UUID() let startTime = CFAbsoluteTimeGetCurrent() @@ -104,7 +125,7 @@ extension NostrNetworkManager { continue } Log.info("%s: Streaming.", for: .subscription_manager, subscriptionId.uuidString) - for await item in self.sessionSubscribe(filters: filters, to: desiredRelays, id: id) { + for await item in self.sessionSubscribe(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { try Task.checkCancellation() continuation.yield(item) } @@ -117,9 +138,16 @@ extension NostrNetworkManager { } Log.info("%s: Terminated.", for: .subscription_manager, subscriptionId.uuidString) } + let timeoutTask = Task { + if let timeout { + try await Task.sleep(for: timeout) + continuation.finish() // End the stream due to timeout. + } + } continuation.onTermination = { @Sendable _ in Log.info("%s: Cancelled.", for: .subscription_manager, subscriptionId.uuidString) multiSessionStreamingTask.cancel() + timeoutTask.cancel() } } } @@ -134,8 +162,9 @@ extension NostrNetworkManager { /// /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to /// - Returns: An async stream of nostr data - private func sessionSubscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, id: UUID? = nil) -> AsyncStream { + private func sessionSubscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { let id = id ?? UUID() + let streamMode = streamMode ?? defaultStreamMode() return AsyncStream { continuation in let startTime = CFAbsoluteTimeGetCurrent() Log.debug("Session subscription %s: Started", for: .subscription_manager, id.uuidString) @@ -147,10 +176,10 @@ extension NostrNetworkManager { let connectedToNetwork = self.pool.network_monitor.currentPath.status == .satisfied // In normal mode: Issuing EOSE requires EOSE from both NDB and the network, since they are all considered separate relays // In experimental local relay model mode: Issuing EOSE requires only EOSE from NDB, since that is the only relay that "matters" - let canIssueEOSE = self.experimentalLocalRelayModelSupport ? - (ndbEOSEIssued) - : - (ndbEOSEIssued && (networkEOSEIssued || !connectedToNetwork)) + let canIssueEOSE = switch streamMode { + case .ndbFirst: (ndbEOSEIssued) + case .ndbAndNetworkParallel: (ndbEOSEIssued && (networkEOSEIssued || !connectedToNetwork)) + } if canIssueEOSE { Log.debug("Session subscription %s: Issued EOSE for session. Elapsed: %.2f seconds", for: .subscription_manager, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) @@ -197,8 +226,10 @@ extension NostrNetworkManager { if EXTRA_VERBOSE_LOGGING { Log.debug("Session subscription %s: Received kind %d event with id %s from the network", for: .subscription_manager, id.uuidString, event.kind, event.id.hex()) } - if !self.experimentalLocalRelayModelSupport { - // In normal mode (non-experimental), we stream from ndb but also directly from the network + switch streamMode { + case .ndbFirst: + break // NO-OP + case .ndbAndNetworkParallel: continuation.yield(.event(lender: NdbNoteLender(ownedNdbNote: event))) } case .eose: @@ -229,6 +260,12 @@ extension NostrNetworkManager { } } + // MARK: - Utility functions + + private func defaultStreamMode() -> StreamMode { + self.experimentalLocalRelayModelSupport ? .ndbFirst : .ndbAndNetworkParallel + } + // MARK: - Finding specific data from Nostr /// Finds a non-replaceable event based on a note ID @@ -255,7 +292,7 @@ extension NostrNetworkManager { func query(filters: [NostrFilter], to: [RelayURL]? = nil, timeout: Duration? = nil) async -> [NostrEvent] { var events: [NostrEvent] = [] - for await noteLender in self.streamNotesUntilEndOfStoredEvents(filters: filters, to: to, timeout: timeout) { + for await noteLender in self.streamExistingEvents(filters: filters, to: to, timeout: timeout) { noteLender.justUseACopy({ events.append($0) }) } return events @@ -270,7 +307,7 @@ extension NostrNetworkManager { let filter = NostrFilter(kinds: nostrKinds, authors: [naddr.author]) - for await noteLender in self.streamNotesUntilEndOfStoredEvents(filters: [filter], to: targetRelays, timeout: timeout) { + for await noteLender in self.streamExistingEvents(filters: [filter], to: targetRelays, timeout: timeout) { // TODO: This can be refactored to borrow the note instead of copying it. But we need to implement `referenced_params` on `UnownedNdbNote` to do so guard let event = noteLender.justGetACopy() else { continue } if event.referenced_params.first?.param.string() == naddr.identifier { @@ -307,7 +344,7 @@ extension NostrNetworkManager { var has_event = false guard let filter else { return nil } - for await noteLender in self.streamNotesUntilEndOfStoredEvents(filters: [filter], to: find_from) { + for await noteLender in self.streamExistingEvents(filters: [filter], to: find_from) { let foundEvent: FoundEvent? = try? noteLender.borrow({ event in switch query { case .profile: @@ -363,7 +400,7 @@ extension NostrNetworkManager { enum StreamItem { /// An event which can be borrowed from NostrDB case event(lender: NdbNoteLender) - /// The canonical "end of stored events". See implementations of `subscribe` to see when this event is fired in relation to other EOSEs + /// The canonical generic "end of stored events", which depends on the stream mode. See `StreamMode` to see when this event is fired in relation to other EOSEs case eose /// "End of stored events" from NostrDB. case ndbEose @@ -386,4 +423,12 @@ extension NostrNetworkManager { } } } + + /// The mode of streaming + enum StreamMode { + /// Returns notes exclusively through NostrDB, treating it as the only channel for information in the pipeline. Generic EOSE is fired when EOSE is received from NostrDB + case ndbFirst + /// Returns notes from both NostrDB and the network, in parallel, treating it with similar importance against the network relays. Generic EOSE is fired when EOSE is received from both the network and NostrDB + case ndbAndNetworkParallel + } } diff --git a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift index f0ed0da83..cd4392baf 100644 --- a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift @@ -133,21 +133,15 @@ extension NostrNetworkManager { func listenAndHandleRelayUpdates() async { let filter = NostrFilter(kinds: [.relay_list], authors: [delegate.keypair.pubkey]) - for await item in self.reader.subscribe(filters: [filter]) { - switch item { - case .event(let lender): // Signature validity already ensured at this point - let currentRelayListCreationDate = self.getUserCurrentRelayListCreationDate() - try? lender.borrow({ note in - guard note.pubkey == self.delegate.keypair.pubkey else { return } // Ensure this new list was ours - guard note.createdAt > (currentRelayListCreationDate ?? 0) else { return } // Ensure this is a newer list - guard let relayList = try? NIP65.RelayList(event: note) else { return } // Ensure it is a valid NIP-65 list - - try? self.set(userRelayList: relayList) // Set the validated list - }) - case .eose: continue - case .ndbEose: continue - case .networkEose: continue - } + for await noteLender in self.reader.streamIndefinitely(filters: [filter]) { + let currentRelayListCreationDate = self.getUserCurrentRelayListCreationDate() + try? noteLender.borrow({ note in + guard note.pubkey == self.delegate.keypair.pubkey else { return } // Ensure this new list was ours + guard note.createdAt > (currentRelayListCreationDate ?? 0) else { return } // Ensure this is a newer list + guard let relayList = try? NIP65.RelayList(event: note) else { return } // Ensure it is a valid NIP-65 list + + try? self.set(userRelayList: relayList) // Set the validated list + }) } } diff --git a/damus/Core/Nostr/ProfileObserver.swift b/damus/Core/Nostr/ProfileObserver.swift new file mode 100644 index 000000000..a67f3e221 --- /dev/null +++ b/damus/Core/Nostr/ProfileObserver.swift @@ -0,0 +1,35 @@ +// +// ProfileObserver.swift +// damus +// +// Created by Daniel D’Aquino on 2025-09-19. +// +import Combine +import Foundation + +@MainActor +class ProfileObserver: ObservableObject { + private let pubkey: Pubkey + private var observerTask: Task? = nil + private let damusState: DamusState + + init(pubkey: Pubkey, damusState: DamusState) { + self.pubkey = pubkey + self.damusState = damusState + self.watchProfileChanges() + } + + private func watchProfileChanges() { + observerTask?.cancel() + observerTask = Task { + for await _ in await damusState.nostrNetwork.profilesManager.streamProfile(pubkey: self.pubkey) { + try Task.checkCancellation() + DispatchQueue.main.async { self.objectWillChange.send() } + } + } + } + + deinit { + observerTask?.cancel() + } +} diff --git a/damus/Core/Storage/DamusState.swift b/damus/Core/Storage/DamusState.swift index f7170a032..85d7a0528 100644 --- a/damus/Core/Storage/DamusState.swift +++ b/damus/Core/Storage/DamusState.swift @@ -9,7 +9,7 @@ import Foundation import LinkPresentation import EmojiPicker -class DamusState: HeadlessDamusState { +class DamusState: HeadlessDamusState, ObservableObject { let keypair: Keypair let likes: EventCounter let boosts: EventCounter diff --git a/damus/Features/Actions/Reposts/Views/Reposted.swift b/damus/Features/Actions/Reposts/Views/Reposted.swift index 3387e36c5..465a05486 100644 --- a/damus/Features/Actions/Reposts/Views/Reposted.swift +++ b/damus/Features/Actions/Reposts/Views/Reposted.swift @@ -27,7 +27,7 @@ struct Reposted: View { // Show profile picture of the reposter only if the reposter is not the author of the reposted note. if pubkey != target.pubkey { - ProfilePicView(pubkey: pubkey, size: eventview_pfp_size(.small), highlight: .none, profiles: damus.profiles, disable_animation: damus.settings.disable_animation) + ProfilePicView(pubkey: pubkey, size: eventview_pfp_size(.small), highlight: .none, profiles: damus.profiles, disable_animation: damus.settings.disable_animation, damusState: damus) .onTapGesture { show_profile_action_sheet_if_enabled(damus_state: damus, pubkey: pubkey) } diff --git a/damus/Features/Chat/ChatEventView.swift b/damus/Features/Chat/ChatEventView.swift index 6ce5cd355..530e1b6cd 100644 --- a/damus/Features/Chat/ChatEventView.swift +++ b/damus/Features/Chat/ChatEventView.swift @@ -83,7 +83,7 @@ struct ChatEventView: View { var profile_picture_view: some View { VStack { - ProfilePicView(pubkey: event.pubkey, size: 32, highlight: .none, profiles: damus_state.profiles, disable_animation: disable_animation) + ProfilePicView(pubkey: event.pubkey, size: 32, highlight: .none, profiles: damus_state.profiles, disable_animation: disable_animation, damusState: damus_state) .onTapGesture { show_profile_action_sheet_if_enabled(damus_state: damus_state, pubkey: event.pubkey) } diff --git a/damus/Features/Chat/Models/ThreadModel.swift b/damus/Features/Chat/Models/ThreadModel.swift index b414b945a..871aa1266 100644 --- a/damus/Features/Chat/Models/ThreadModel.swift +++ b/damus/Features/Chat/Models/ThreadModel.swift @@ -115,18 +115,8 @@ class ThreadModel: ObservableObject { self.listener?.cancel() self.listener = Task { Log.info("subscribing to thread %s ", for: .render, original_event.id.hex()) - for await item in damus_state.nostrNetwork.reader.subscribe(filters: base_filters + meta_filters) { - switch item { - case .event(let lender): - lender.justUseACopy({ handle_event(ev: $0) }) - case .eose: - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } - load_profiles(context: "thread", load: .from_events(Array(event_map.events)), damus_state: damus_state, txn: txn) - case .ndbEose: - break - case .networkEose: - break - } + for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: base_filters + meta_filters) { + event.justUseACopy({ handle_event(ev: $0) }) } } } diff --git a/damus/Features/Chat/ReplyQuoteView.swift b/damus/Features/Chat/ReplyQuoteView.swift index d69bd8fd1..62b60a965 100644 --- a/damus/Features/Chat/ReplyQuoteView.swift +++ b/damus/Features/Chat/ReplyQuoteView.swift @@ -26,7 +26,7 @@ struct ReplyQuoteView: View { VStack(alignment: .leading) { HStack(alignment: .center) { if can_show_event { - ProfilePicView(pubkey: event.pubkey, size: 14, highlight: .reply, profiles: state.profiles, disable_animation: false) + ProfilePicView(pubkey: event.pubkey, size: 14, highlight: .reply, profiles: state.profiles, disable_animation: false, damusState: state) let blur_images = should_blur_images(settings: state.settings, contacts: state.contacts, ev: event, our_pubkey: state.pubkey) NoteContentView(damus_state: state, event: event, blur_images: blur_images, size: .small, options: options) .font(.callout) diff --git a/damus/Features/DMs/Views/DMChatView.swift b/damus/Features/DMs/Views/DMChatView.swift index 6804162b3..39a749368 100644 --- a/damus/Features/DMs/Views/DMChatView.swift +++ b/damus/Features/DMs/Views/DMChatView.swift @@ -63,7 +63,7 @@ struct DMChatView: View, KeyboardReadable { var Header: some View { return NavigationLink(value: Route.ProfileByKey(pubkey: pubkey)) { HStack { - ProfilePicView(pubkey: pubkey, size: 24, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation) + ProfilePicView(pubkey: pubkey, size: 24, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, damusState: damus_state) ProfileName(pubkey: pubkey, damus: damus_state) } diff --git a/damus/Features/Events/EventProfile.swift b/damus/Features/Events/EventProfile.swift index 66c281ddf..d435e626a 100644 --- a/damus/Features/Events/EventProfile.swift +++ b/damus/Features/Events/EventProfile.swift @@ -37,7 +37,7 @@ struct EventProfile: View { var body: some View { HStack(alignment: .center, spacing: 10) { - ProfilePicView(pubkey: pubkey, size: pfp_size, highlight: .none, profiles: damus_state.profiles, disable_animation: disable_animation, show_zappability: true) + ProfilePicView(pubkey: pubkey, size: pfp_size, highlight: .none, profiles: damus_state.profiles, disable_animation: disable_animation, show_zappability: true, damusState: damus_state) .onTapGesture { show_profile_action_sheet_if_enabled(damus_state: damus_state, pubkey: pubkey) } diff --git a/damus/Features/Events/Models/EventsModel.swift b/damus/Features/Events/Models/EventsModel.swift index 9a0fd1ea0..fcd94dc8a 100644 --- a/damus/Features/Events/Models/EventsModel.swift +++ b/damus/Features/Events/Models/EventsModel.swift @@ -71,7 +71,7 @@ class EventsModel: ObservableObject { loadingTask?.cancel() loadingTask = Task { DispatchQueue.main.async { self.loading = true } - outerLoop: for await item in state.nostrNetwork.reader.subscribe(filters: [get_filter()]) { + outerLoop: for await item in state.nostrNetwork.reader.advancedStream(filters: [get_filter()]) { switch item { case .event(let lender): Task { @@ -91,8 +91,6 @@ class EventsModel: ObservableObject { } } DispatchQueue.main.async { self.loading = false } - guard let txn = NdbTxn(ndb: self.state.ndb) else { return } - load_profiles(context: "events_model", load: .from_events(events.all_events), damus_state: state, txn: txn) } } diff --git a/damus/Features/FollowPack/Models/FollowPackModel.swift b/damus/Features/FollowPack/Models/FollowPackModel.swift index 80a66d8ed..a66e669b3 100644 --- a/damus/Features/FollowPack/Models/FollowPackModel.swift +++ b/damus/Features/FollowPack/Models/FollowPackModel.swift @@ -43,27 +43,18 @@ class FollowPackModel: ObservableObject { filter.authors = follow_pack_users filter.limit = 500 - for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter], to: to_relays) { - switch item { - case .event(lender: let lender): - await lender.justUseACopy({ event in - let should_show_event = await should_show_event(state: damus_state, ev: event) - if event.is_textlike && should_show_event && !event.is_reply() - { - if await self.events.insert(event) { - DispatchQueue.main.async { - self.objectWillChange.send() - } + for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: [filter], to: to_relays) { + await event.justUseACopy({ event in + let should_show_event = await should_show_event(state: damus_state, ev: event) + if event.is_textlike && should_show_event && !event.is_reply() + { + if await self.events.insert(event) { + DispatchQueue.main.async { + self.objectWillChange.send() } } - }) - case .eose: - continue - case .ndbEose: - continue - case .networkEose: - continue - } + } + }) } } } diff --git a/damus/Features/FollowPack/Views/FollowPackPreview.swift b/damus/Features/FollowPack/Views/FollowPackPreview.swift index 244eea46f..c31dd6a35 100644 --- a/damus/Features/FollowPack/Views/FollowPackPreview.swift +++ b/damus/Features/FollowPack/Views/FollowPackPreview.swift @@ -153,7 +153,7 @@ struct FollowPackPreviewBody: View { } HStack(alignment: .center) { - ProfilePicView(pubkey: event.event.pubkey, size: 25, highlight: .none, profiles: state.profiles, disable_animation: state.settings.disable_animation, show_zappability: true) + ProfilePicView(pubkey: event.event.pubkey, size: 25, highlight: .none, profiles: state.profiles, disable_animation: state.settings.disable_animation, show_zappability: true, damusState: state) .onTapGesture { state.nav.push(route: Route.ProfileByKey(pubkey: event.event.pubkey)) } diff --git a/damus/Features/FollowPack/Views/FollowPackView.swift b/damus/Features/FollowPack/Views/FollowPackView.swift index 301dd30d6..22854a23b 100644 --- a/damus/Features/FollowPack/Views/FollowPackView.swift +++ b/damus/Features/FollowPack/Views/FollowPackView.swift @@ -131,7 +131,7 @@ struct FollowPackView: View { } HStack(alignment: .center) { - ProfilePicView(pubkey: event.event.pubkey, size: 25, highlight: .none, profiles: state.profiles, disable_animation: state.settings.disable_animation, show_zappability: true) + ProfilePicView(pubkey: event.event.pubkey, size: 25, highlight: .none, profiles: state.profiles, disable_animation: state.settings.disable_animation, show_zappability: true, damusState: state) .onTapGesture { state.nav.push(route: Route.ProfileByKey(pubkey: event.event.pubkey)) } diff --git a/damus/Features/Follows/Models/FollowersModel.swift b/damus/Features/Follows/Models/FollowersModel.swift index 082025e06..b1ab15ab3 100644 --- a/damus/Features/Follows/Models/FollowersModel.swift +++ b/damus/Features/Follows/Models/FollowersModel.swift @@ -38,18 +38,8 @@ class FollowersModel: ObservableObject { let filters = [filter] self.listener?.cancel() self.listener = Task { - for await item in damus_state.nostrNetwork.reader.subscribe(filters: filters) { - switch item { - case .event(let lender): - lender.justUseACopy({ self.handle_event(ev: $0) }) - case .eose: - guard let txn = NdbTxn(ndb: self.damus_state.ndb) else { return } - load_profiles(txn: txn) - case .ndbEose: - continue - case .networkEose: - continue - } + for await lender in damus_state.nostrNetwork.reader.streamIndefinitely(filters: filters) { + lender.justUseACopy({ self.handle_event(ev: $0) }) } } } @@ -70,31 +60,6 @@ class FollowersModel: ObservableObject { contacts?.append(ev.pubkey) has_contact.insert(ev.pubkey) } - - func load_profiles(txn: NdbTxn) { - let authors = find_profiles_to_fetch_from_keys(profiles: damus_state.profiles, pks: contacts ?? [], txn: txn) - if authors.isEmpty { - return - } - - let filter = NostrFilter(kinds: [.metadata], - authors: authors) - - self.profilesListener?.cancel() - self.profilesListener = Task { - for await item in await damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { - switch item { - case .event(let lender): - lender.justUseACopy({ self.handle_event(ev: $0) }) - case .eose: break - case .ndbEose: - continue - case .networkEose: - continue - } - } - } - } func handle_event(ev: NostrEvent) { if ev.known_kind == .contacts { diff --git a/damus/Features/Follows/Models/FollowingModel.swift b/damus/Features/Follows/Models/FollowingModel.swift index 59a547ac6..39e79b3bf 100644 --- a/damus/Features/Follows/Models/FollowingModel.swift +++ b/damus/Features/Follows/Models/FollowingModel.swift @@ -43,7 +43,7 @@ class FollowingModel { let filters = [filter] self.listener?.cancel() self.listener = Task { - for await item in self.damus_state.nostrNetwork.reader.subscribe(filters: filters) { + for await item in self.damus_state.nostrNetwork.reader.advancedStream(filters: filters) { // don't need to do anything here really continue } diff --git a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift index 137cc7870..d5072b36a 100644 --- a/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift +++ b/damus/Features/NIP05/Models/NIP05DomainEventsModel.swift @@ -64,13 +64,11 @@ class NIP05DomainEventsModel: ObservableObject { filter.authors = Array(authors) - for await item in state.nostrNetwork.reader.subscribe(filters: [filter]) { + for await item in state.nostrNetwork.reader.advancedStream(filters: [filter]) { switch item { case .event(let lender): await lender.justUseACopy({ await self.add_event($0) }) case .eose: - guard let txn = NdbTxn(ndb: state.ndb) else { return } - load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) DispatchQueue.main.async { self.loading = false } continue case .ndbEose: diff --git a/damus/Features/Notifications/Views/ProfilePicturesView.swift b/damus/Features/Notifications/Views/ProfilePicturesView.swift index 1c274349a..ad8c23ee0 100644 --- a/damus/Features/Notifications/Views/ProfilePicturesView.swift +++ b/damus/Features/Notifications/Views/ProfilePicturesView.swift @@ -14,7 +14,7 @@ struct ProfilePicturesView: View { var body: some View { HStack { ForEach(pubkeys.prefix(8), id: \.self) { pubkey in - ProfilePicView(pubkey: pubkey, size: 32.0, highlight: .none, profiles: state.profiles, disable_animation: state.settings.disable_animation) + ProfilePicView(pubkey: pubkey, size: 32.0, highlight: .none, profiles: state.profiles, disable_animation: state.settings.disable_animation, damusState: state) .onTapGesture { state.nav.push(route: Route.ProfileByKey(pubkey: pubkey)) } diff --git a/damus/Features/Onboarding/SuggestedUsersViewModel.swift b/damus/Features/Onboarding/SuggestedUsersViewModel.swift index 3945765fc..65ab22fd4 100644 --- a/damus/Features/Onboarding/SuggestedUsersViewModel.swift +++ b/damus/Features/Onboarding/SuggestedUsersViewModel.swift @@ -189,7 +189,7 @@ class SuggestedUsersViewModel: ObservableObject { authors: [Constants.ONBOARDING_FOLLOW_PACK_CURATOR_PUBKEY] ) - for await lender in self.damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) { + for await lender in self.damus_state.nostrNetwork.reader.streamExistingEvents(filters: [filter]) { // Check for cancellation on each iteration guard !Task.isCancelled else { break } @@ -212,6 +212,7 @@ class SuggestedUsersViewModel: ObservableObject { } /// Finds all profiles mentioned in the follow packs, and loads the profile data from the network + // TODO LOCAL_RELAY_PROFILE: Remove this private func loadProfiles(for packs: [FollowPackEvent]) async { var allPubkeys: [Pubkey] = [] @@ -223,7 +224,7 @@ class SuggestedUsersViewModel: ObservableObject { } let profileFilter = NostrFilter(kinds: [.metadata], authors: allPubkeys) - for await _ in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [profileFilter]) { + for await _ in damus_state.nostrNetwork.reader.streamExistingEvents(filters: [profileFilter]) { // NO-OP. We just need NostrDB to ingest these for them to be available elsewhere, no need to analyze the data } } diff --git a/damus/Features/Posting/Views/PostView.swift b/damus/Features/Posting/Views/PostView.swift index cfec7833c..1f9672a49 100644 --- a/damus/Features/Posting/Views/PostView.swift +++ b/damus/Features/Posting/Views/PostView.swift @@ -388,7 +388,7 @@ struct PostView: View { HStack(alignment: .top, spacing: 0) { VStack(alignment: .leading, spacing: 0) { HStack(alignment: .top) { - ProfilePicView(pubkey: damus_state.pubkey, size: PFP_SIZE, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation) + ProfilePicView(pubkey: damus_state.pubkey, size: PFP_SIZE, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, damusState: damus_state) VStack(alignment: .leading) { if let prompt_view { diff --git a/damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift b/damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift index a8c35f079..166f2f1fc 100644 --- a/damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift +++ b/damus/Features/Profile/Models/CondensedProfilePicturesViewModel.swift @@ -21,22 +21,4 @@ class CondensedProfilePicturesViewModel: ObservableObject { self.pubkeys = pubkeys self.maxPictures = min(maxPictures, pubkeys.count) } - - func load() { - loadingTask?.cancel() - loadingTask = Task { try? await loadingTask() } - } - - func loadingTask() async throws { - let filter = NostrFilter(kinds: [.metadata], authors: shownPubkeys) - let _ = await state.nostrNetwork.reader.query(filters: [filter]) - for await _ in state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) { - // NO-OP, we just need it to be loaded into NostrDB. - try Task.checkCancellation() - } - DispatchQueue.main.async { - // Cause the view to re-render with the newly loaded profiles - self.objectWillChange.send() - } - } } diff --git a/damus/Features/Profile/Models/ProfileModel.swift b/damus/Features/Profile/Models/ProfileModel.swift index eb281c134..dae8ea95e 100644 --- a/damus/Features/Profile/Models/ProfileModel.swift +++ b/damus/Features/Profile/Models/ProfileModel.swift @@ -76,34 +76,21 @@ class ProfileModel: ObservableObject, Equatable { var text_filter = NostrFilter(kinds: [.text, .longform, .highlight]) text_filter.authors = [pubkey] text_filter.limit = 500 - for await item in damus.nostrNetwork.reader.subscribe(filters: [text_filter]) { - switch item { - case .event(let lender): - lender.justUseACopy({ handleNostrEvent($0) }) - case .eose: break - case .ndbEose: break - case .networkEose: break - } - } - guard let txn = NdbTxn(ndb: damus.ndb) else { return } - load_profiles(context: "profile", load: .from_events(events.events), damus_state: damus, txn: txn) await bumpUpProgress() + for await event in damus.nostrNetwork.reader.streamIndefinitely(filters: [text_filter]) { + event.justUseACopy({ handleNostrEvent($0) }) + } } profileListener?.cancel() profileListener = Task { var profile_filter = NostrFilter(kinds: [.contacts, .metadata, .boost]) var relay_list_filter = NostrFilter(kinds: [.relay_list], authors: [pubkey]) profile_filter.authors = [pubkey] - for await item in damus.nostrNetwork.reader.subscribe(filters: [profile_filter, relay_list_filter]) { - switch item { - case .event(let lender): - lender.justUseACopy({ handleNostrEvent($0) }) - case .eose: break - case .ndbEose: break - case .networkEose: break - } - } await bumpUpProgress() + for await event in damus.nostrNetwork.reader.streamIndefinitely(filters: [profile_filter, relay_list_filter]) { + event.justUseACopy({ handleNostrEvent($0) }) + } + } conversationListener?.cancel() conversationListener = Task { @@ -127,25 +114,16 @@ class ProfileModel: ObservableObject, Equatable { let conversations_filter_them = NostrFilter(kinds: conversation_kinds, pubkeys: [damus.pubkey], limit: limit, authors: [pubkey]) let conversations_filter_us = NostrFilter(kinds: conversation_kinds, pubkeys: [pubkey], limit: limit, authors: [damus.pubkey]) print("subscribing to conversation events from and to profile \(pubkey)") - for await item in self.damus.nostrNetwork.reader.subscribe(filters: [conversations_filter_them, conversations_filter_us]) { - switch item { - case .event(let lender): - try? lender.borrow { ev in - if !seen_event.contains(ev.id) { - let event = ev.toOwned() - Task { await self.add_event(event) } - conversation_events.insert(ev.id) - } - else if !conversation_events.contains(ev.id) { - conversation_events.insert(ev.id) - } + for await noteLender in self.damus.nostrNetwork.reader.streamIndefinitely(filters: [conversations_filter_them, conversations_filter_us]) { + try? noteLender.borrow { ev in + if !seen_event.contains(ev.id) { + let event = ev.toOwned() + Task { await self.add_event(event) } + conversation_events.insert(ev.id) + } + else if !conversation_events.contains(ev.id) { + conversation_events.insert(ev.id) } - case .eose: - continue - case .ndbEose: - continue - case .networkEose: - continue } } } @@ -212,21 +190,12 @@ class ProfileModel: ObservableObject, Equatable { profile_filter.authors = [pubkey] self.findRelaysListener?.cancel() self.findRelaysListener = Task { - for await item in await damus.nostrNetwork.reader.subscribe(filters: [profile_filter]) { - switch item { - case .event(let lender): - try? lender.borrow { event in - if case .contacts = event.known_kind { - // TODO: Is this correct? - self.legacy_relay_list = decode_json_relays(event.content) - } + for await noteLender in damus.nostrNetwork.reader.streamIndefinitely(filters: [profile_filter]) { + try? noteLender.borrow { event in + if case .contacts = event.known_kind { + // TODO: Is this correct? + self.legacy_relay_list = decode_json_relays(event.content) } - case .eose: - break - case .ndbEose: - break - case .networkEose: - break } } } diff --git a/damus/Features/Profile/Views/CondensedProfilePicturesView.swift b/damus/Features/Profile/Views/CondensedProfilePicturesView.swift index a04666fd9..237286fb8 100644 --- a/damus/Features/Profile/Views/CondensedProfilePicturesView.swift +++ b/damus/Features/Profile/Views/CondensedProfilePicturesView.swift @@ -18,16 +18,12 @@ struct CondensedProfilePicturesView: View { // Using ZStack to make profile pictures floating and stacked on top of each other. ZStack { ForEach((0..(context: String, load: PubkeysToLoad, damus_state: DamusState, txn: NdbTxn) -> Task? { - let authors = find_profiles_to_fetch(profiles: damus_state.profiles, load: load, cache: damus_state.events, txn: txn) - - guard !authors.isEmpty else { - return nil - } - - return Task { - print("load_profiles[\(context)]: requesting \(authors.count) profiles from relay pool") - let filter = NostrFilter(kinds: [.metadata], authors: authors) - - for await noteLender in damus_state.nostrNetwork.reader.streamNotesUntilEndOfStoredEvents(filters: [filter]) { - let now = UInt64(Date.now.timeIntervalSince1970) - try noteLender.borrow { event in - if event.known_kind == .metadata { - damus_state.ndb.write_profile_last_fetched(pubkey: event.pubkey, fetched_at: now) - } - } - } - - print("load_profiles[\(context)]: done loading \(authors.count) profiles from relay pool") - } -} - diff --git a/damus/Features/Search/Models/SearchModel.swift b/damus/Features/Search/Models/SearchModel.swift index 3547f4630..41964f367 100644 --- a/damus/Features/Search/Models/SearchModel.swift +++ b/damus/Features/Search/Models/SearchModel.swift @@ -54,9 +54,7 @@ class SearchModel: ObservableObject { } } - guard let txn = NdbTxn(ndb: state.ndb) else { return } try Task.checkCancellation() - load_profiles(context: "search", load: .from_events(self.events.all_events), damus_state: state, txn: txn) DispatchQueue.main.async { self.loading = false } diff --git a/damus/Features/Status/Views/UserStatusSheet.swift b/damus/Features/Status/Views/UserStatusSheet.swift index f2c2dd883..dbff37bf9 100644 --- a/damus/Features/Status/Views/UserStatusSheet.swift +++ b/damus/Features/Status/Views/UserStatusSheet.swift @@ -129,7 +129,7 @@ struct UserStatusSheet: View { Divider() ZStack(alignment: .top) { - ProfilePicView(pubkey: keypair.pubkey, size: 120.0, highlight: .custom(DamusColors.white, 3.0), profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation) + ProfilePicView(pubkey: keypair.pubkey, size: 120.0, highlight: .custom(DamusColors.white, 3.0), profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, damusState: damus_state) .padding(.top, 30) VStack(spacing: 0) { diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index 1b3223d93..125fffa5f 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -454,22 +454,12 @@ class HomeModel: ContactsDelegate, ObservableObject { let id = UUID() Log.info("Initial filter task started with ID %s", for: .homeModel, id.uuidString) let filter = NostrFilter(kinds: [.contacts], limit: 1, authors: [damus_state.pubkey]) - for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter]) { - switch item { - case .event(let lender): - await lender.justUseACopy({ await process_event(ev: $0, context: .initialContactList) }) - continue - case .eose: - if !done_init { - done_init = true - Log.info("Initial filter task %s: Done initialization; Elapsed time: %.2f seconds", for: .homeModel, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) - send_home_filters() - } - break - case .ndbEose: - break - case .networkEose: - break + for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: [filter]) { + await event.justUseACopy({ await process_event(ev: $0, context: .initialContactList) }) + if !done_init { + done_init = true + Log.info("Initial filter task %s: Done initialization; Elapsed time: %.2f seconds", for: .homeModel, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) + send_home_filters() } } @@ -477,14 +467,8 @@ class HomeModel: ContactsDelegate, ObservableObject { Task { let relayListFilter = NostrFilter(kinds: [.relay_list], limit: 1, authors: [damus_state.pubkey]) - for await item in damus_state.nostrNetwork.reader.subscribe(filters: [relayListFilter]) { - switch item { - case .event(let lender): - await lender.justUseACopy({ await process_event(ev: $0, context: .initialRelayList) }) - case .eose: break - case .ndbEose: break - case .networkEose: break - } + for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: [relayListFilter]) { + await event.justUseACopy({ await process_event(ev: $0, context: .initialRelayList) }) } } } @@ -543,41 +527,25 @@ class HomeModel: ContactsDelegate, ObservableObject { self.contactsHandlerTask?.cancel() self.contactsHandlerTask = Task { - for await item in damus_state.nostrNetwork.reader.subscribe(filters: contacts_filters) { - switch item { - case .event(let lender): - await lender.justUseACopy({ await process_event(ev: $0, context: .contacts) }) - case .eose: continue - case .ndbEose: continue - case .networkEose: continue - } + for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: contacts_filters) { + await event.justUseACopy({ await process_event(ev: $0, context: .contacts) }) } } self.notificationsHandlerTask?.cancel() self.notificationsHandlerTask = Task { - for await item in damus_state.nostrNetwork.reader.subscribe(filters: notifications_filters) { - switch item { - case .event(let lender): - await lender.justUseACopy({ await process_event(ev: $0, context: .notifications) }) - case .eose: - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } - load_profiles(context: "notifications", load: .from_keys(notifications.uniq_pubkeys()), damus_state: damus_state, txn: txn) - case .ndbEose: break - case .networkEose: break - } + for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: notifications_filters) { + await event.justUseACopy({ await process_event(ev: $0, context: .notifications) }) } } self.dmsHandlerTask?.cancel() self.dmsHandlerTask = Task { - for await item in damus_state.nostrNetwork.reader.subscribe(filters: dms_filters) { + for await item in damus_state.nostrNetwork.reader.advancedStream(filters: dms_filters) { switch item { case .event(let lender): await lender.justUseACopy({ await process_event(ev: $0, context: .dms) }) case .eose: - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } var dms = dms.dms.flatMap { $0.events } dms.append(contentsOf: incoming_dms) - load_profiles(context: "dms", load: .from_events(dms), damus_state: damus_state, txn: txn) case .ndbEose: break case .networkEose: break } @@ -591,14 +559,8 @@ class HomeModel: ContactsDelegate, ObservableObject { var filter = NostrFilter(kinds: [.nwc_response]) filter.authors = [nwc.pubkey] filter.limit = 0 - for await item in damus_state.nostrNetwork.reader.subscribe(filters: [filter], to: [nwc.relay]) { - switch item { - case .event(let lender): - await lender.justUseACopy({ await process_event(ev: $0, context: .nwc) }) - case .eose: continue - case .ndbEose: continue - case .networkEose: continue - } + for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: [filter], to: [nwc.relay]) { + await event.justUseACopy({ await process_event(ev: $0, context: .nwc) }) } } @@ -653,7 +615,7 @@ class HomeModel: ContactsDelegate, ObservableObject { DispatchQueue.main.async { self.loading = true } - for await item in damus_state.nostrNetwork.reader.subscribe(filters: home_filters, id: id) { + for await item in damus_state.nostrNetwork.reader.advancedStream(filters: home_filters, id: id) { switch item { case .event(let lender): let currentTime = CFAbsoluteTimeGetCurrent() @@ -664,20 +626,15 @@ class HomeModel: ContactsDelegate, ObservableObject { let eoseTime = CFAbsoluteTimeGetCurrent() Log.info("Home handler task %s: Received general EOSE after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } - load_profiles(context: "home", load: .from_events(events.events), damus_state: damus_state, txn: txn) - let finishTime = CFAbsoluteTimeGetCurrent() Log.info("Home handler task %s: Completed initial loading task after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) case .ndbEose: let eoseTime = CFAbsoluteTimeGetCurrent() Log.info("Home handler task %s: Received NDB EOSE after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) - - guard let txn = NdbTxn(ndb: damus_state.ndb) else { return } + DispatchQueue.main.async { self.loading = false } - load_profiles(context: "home", load: .from_events(events.events), damus_state: damus_state, txn: txn) let finishTime = CFAbsoluteTimeGetCurrent() Log.info("Home handler task %s: Completed initial NDB loading task after %.2f seconds", for: .homeModel, id.uuidString, eoseTime - startTime) diff --git a/damus/Features/Timeline/Views/SideMenuView.swift b/damus/Features/Timeline/Views/SideMenuView.swift index a6cbe8dcb..d001e9a3a 100644 --- a/damus/Features/Timeline/Views/SideMenuView.swift +++ b/damus/Features/Timeline/Views/SideMenuView.swift @@ -104,7 +104,7 @@ struct SideMenuView: View { return VStack(alignment: .leading) { HStack(spacing: 10) { - ProfilePicView(pubkey: damus_state.pubkey, size: 50, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation) + ProfilePicView(pubkey: damus_state.pubkey, size: 50, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, damusState: damus_state) Spacer() diff --git a/damus/Features/Wallet/Models/WalletModel.swift b/damus/Features/Wallet/Models/WalletModel.swift index f80364853..ecde51215 100644 --- a/damus/Features/Wallet/Models/WalletModel.swift +++ b/damus/Features/Wallet/Models/WalletModel.swift @@ -182,27 +182,18 @@ class WalletModel: ObservableObject { ] nostrNetwork.send(event: requestEvent, to: [currentNwcUrl.relay], skipEphemeralRelays: false) - for await item in nostrNetwork.reader.subscribe(filters: responseFilters, to: [currentNwcUrl.relay], timeout: timeout) { - switch item { - case .event(let lender): - guard let responseEvent = try? lender.getCopy() else { throw .internalError } - - let fullWalletResponse: WalletConnect.FullWalletResponse - do { fullWalletResponse = try WalletConnect.FullWalletResponse(from: responseEvent, nwc: currentNwcUrl) } - catch { throw WalletRequestError.walletResponseDecodingError(error) } - - guard fullWalletResponse.req_id == requestEvent.id else { continue } // Our filters may match other responses - if let responseError = fullWalletResponse.response.error { throw .walletResponseError(responseError) } - - guard let result = fullWalletResponse.response.result else { throw .walletEmptyResponse } - return result - case .eose: - continue - case .ndbEose: - continue - case .networkEose: - continue - } + for await event in nostrNetwork.reader.timedStream(filters: responseFilters, to: [currentNwcUrl.relay], timeout: timeout) { + guard let responseEvent = try? event.getCopy() else { throw .internalError } + + let fullWalletResponse: WalletConnect.FullWalletResponse + do { fullWalletResponse = try WalletConnect.FullWalletResponse(from: responseEvent, nwc: currentNwcUrl) } + catch { throw WalletRequestError.walletResponseDecodingError(error) } + + guard fullWalletResponse.req_id == requestEvent.id else { continue } // Our filters may match other responses + if let responseError = fullWalletResponse.response.error { throw .walletResponseError(responseError) } + + guard let result = fullWalletResponse.response.result else { throw .walletEmptyResponse } + return result } do { try Task.checkCancellation() } catch { throw .cancelled } throw .responseTimeout diff --git a/damus/Features/Wallet/Views/TransactionsView.swift b/damus/Features/Wallet/Views/TransactionsView.swift index e85c392e6..c71ccadee 100644 --- a/damus/Features/Wallet/Views/TransactionsView.swift +++ b/damus/Features/Wallet/Views/TransactionsView.swift @@ -30,7 +30,7 @@ struct TransactionView: View { VStack(alignment: .leading) { HStack(alignment: .center) { ZStack { - ProfilePicView(pubkey: pubkey ?? ANON_PUBKEY, size: 45, highlight: .custom(.damusAdaptableBlack, 0.1), profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, privacy_sensitive: true) + ProfilePicView(pubkey: pubkey ?? ANON_PUBKEY, size: 45, highlight: .custom(.damusAdaptableBlack, 0.1), profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, privacy_sensitive: true, damusState: damus_state) .onTapGesture { if let pubkey { damus_state.nav.push(route: Route.ProfileByKey(pubkey: pubkey)) diff --git a/damus/Features/Zaps/Models/ZapsModel.swift b/damus/Features/Zaps/Models/ZapsModel.swift index 9eb063c5a..760472ec8 100644 --- a/damus/Features/Zaps/Models/ZapsModel.swift +++ b/damus/Features/Zaps/Models/ZapsModel.swift @@ -33,21 +33,8 @@ class ZapsModel: ObservableObject { } zapCommsListener?.cancel() zapCommsListener = Task { - for await item in state.nostrNetwork.reader.subscribe(filters: [filter]) { - switch item { - case .event(let lender): - await lender.justUseACopy({ event in - await self.handle_event(ev: event) - }) - case .eose: - let events = state.events.lookup_zaps(target: target).map { $0.request.ev } - guard let txn = NdbTxn(ndb: state.ndb) else { return } - load_profiles(context: "zaps_model", load: .from_events(events), damus_state: state, txn: txn) - case .ndbEose: - break - case .networkEose: - break - } + for await event in state.nostrNetwork.reader.streamIndefinitely(filters: [filter]) { + await event.justUseACopy({ await self.handle_event(ev: $0) }) } } } diff --git a/damus/Shared/Components/QRCodeView.swift b/damus/Shared/Components/QRCodeView.swift index 343f1dd93..03aa2a26f 100644 --- a/damus/Shared/Components/QRCodeView.swift +++ b/damus/Shared/Components/QRCodeView.swift @@ -76,7 +76,7 @@ struct QRCodeView: View { let profile_txn = damus_state.profiles.lookup(id: pubkey, txn_name: "qrview-profile") let profile = profile_txn?.unsafeUnownedValue - ProfilePicView(pubkey: pubkey, size: 90.0, highlight: .custom(DamusColors.white, 3.0), profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation) + ProfilePicView(pubkey: pubkey, size: 90.0, highlight: .custom(DamusColors.white, 3.0), profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, damusState: damus_state) .padding(.top, 20) if let display_name = profile?.display_name { diff --git a/damus/Shared/Components/UserView.swift b/damus/Shared/Components/UserView.swift index c83b0c0a7..492c377c8 100644 --- a/damus/Shared/Components/UserView.swift +++ b/damus/Shared/Components/UserView.swift @@ -34,7 +34,7 @@ struct UserView: View { var body: some View { VStack { HStack { - ProfilePicView(pubkey: pubkey, size: PFP_SIZE, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation) + ProfilePicView(pubkey: pubkey, size: PFP_SIZE, highlight: .none, profiles: damus_state.profiles, disable_animation: damus_state.settings.disable_animation, damusState: damus_state) VStack(alignment: .leading) { ProfileName(pubkey: pubkey, damus: damus_state, show_nip5_domain: false) diff --git a/nostrdb/UnownedNdbNote.swift b/nostrdb/UnownedNdbNote.swift index 550c4b71c..79ef237b0 100644 --- a/nostrdb/UnownedNdbNote.swift +++ b/nostrdb/UnownedNdbNote.swift @@ -110,7 +110,7 @@ enum NdbNoteLender: Sendable { return try self.getCopy() } catch { - assertionFailure("Unexpected error while fetching a copy of an NdbNote: \(error.localizedDescription)") +// assertionFailure("Unexpected error while fetching a copy of an NdbNote: \(error.localizedDescription)") Log.error("Unexpected error while fetching a copy of an NdbNote: %s", for: .ndb, error.localizedDescription) } return nil From 798f9ec7b4c8e87d2726168500b7ac4bc367d8e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Sun, 21 Sep 2025 11:29:46 -0700 Subject: [PATCH 66/91] Improve loading speeds for home timeline and universe view MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 2 +- .../Search/Models/SearchHomeModel.swift | 26 ++++++++++++------- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index 2984943ba..dc415752a 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -744,7 +744,7 @@ struct ContentView: View { } damus_state.nostrNetwork.connect() // TODO: Move this to a better spot. Not sure what is the best signal to listen to for sending initial filters - DispatchQueue.main.asyncAfter(deadline: .now() + 3, execute: { + DispatchQueue.main.asyncAfter(deadline: .now() + 0.25, execute: { self.home.send_initial_filters() }) } diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index 8fbb293e8..4c99fbca9 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -62,16 +62,22 @@ class SearchHomeModel: ObservableObject { var follow_list_filter = NostrFilter(kinds: [.follow_list]) follow_list_filter.until = UInt32(Date.now.timeIntervalSince1970) - for await noteLender in damus_state.nostrNetwork.reader.streamExistingEvents(filters: [follow_list_filter], to: to_relays) { - await noteLender.justUseACopy({ await self.handleFollowPackEvent($0) }) - } - - for await noteLender in damus_state.nostrNetwork.reader.streamExistingEvents(filters: [get_base_filter()], to: to_relays) { - await noteLender.justUseACopy({ await self.handleEvent($0) }) - } - - DispatchQueue.main.async { - self.loading = false + for await item in damus_state.nostrNetwork.reader.advancedStream(filters: [get_base_filter(), follow_list_filter], to: to_relays) { + switch item { + case .event(lender: let lender): + await lender.justUseACopy({ event in + await self.handleFollowPackEvent(event) + await self.handleEvent(event) + }) + case .eose: + break + case .ndbEose: + DispatchQueue.main.async { + self.loading = false + } + case .networkEose: + break + } } } From eda4212aa72cfa94ca5e861f64a10d593bfffc6c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Tue, 23 Sep 2025 18:11:20 -0700 Subject: [PATCH 67/91] Disable refreshable on Universe view MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Updates are streamed from the network, removing the need for a refresh action Signed-off-by: Daniel D’Aquino --- damus/Features/Search/Views/SearchHomeView.swift | 6 ------ 1 file changed, 6 deletions(-) diff --git a/damus/Features/Search/Views/SearchHomeView.swift b/damus/Features/Search/Views/SearchHomeView.swift index be619bbc9..58f968357 100644 --- a/damus/Features/Search/Views/SearchHomeView.swift +++ b/damus/Features/Search/Views/SearchHomeView.swift @@ -126,12 +126,6 @@ struct SearchHomeView: View { .onDisappear { loadingTask?.cancel() } - .refreshable { - // Fetch new information by unsubscribing and resubscribing to the relay - loadingTask?.cancel() - loadingTask = Task { await model.reload() } - try? await loadingTask?.value - } } } From e55675a3364363a8d83e5a76791ee905333ccb5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 24 Sep 2025 16:15:52 -0700 Subject: [PATCH 68/91] Optimize HomeModel subscription usage MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reduces the overall subscription usage throughout the app, thus reducing issues associated with too many subscriptions being used at once, and the resulting staleness. Signed-off-by: Daniel D’Aquino --- .../Features/Timeline/Models/HomeModel.swift | 45 ++++++------------- 1 file changed, 14 insertions(+), 31 deletions(-) diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index 125fffa5f..913cd8a61 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -66,9 +66,8 @@ class HomeModel: ContactsDelegate, ObservableObject { var should_debounce_dms = true var homeHandlerTask: Task? - var contactsHandlerTask: Task? var notificationsHandlerTask: Task? - var dmsHandlerTask: Task? + var generalHandlerTask: Task? var nwcHandlerTask: Task? @Published var loading: Bool = true @@ -454,8 +453,8 @@ class HomeModel: ContactsDelegate, ObservableObject { let id = UUID() Log.info("Initial filter task started with ID %s", for: .homeModel, id.uuidString) let filter = NostrFilter(kinds: [.contacts], limit: 1, authors: [damus_state.pubkey]) - for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: [filter]) { - await event.justUseACopy({ await process_event(ev: $0, context: .initialContactList) }) + for await event in damus_state.nostrNetwork.reader.streamExistingEvents(filters: [filter]) { + await event.justUseACopy({ await process_event(ev: $0, context: .other) }) if !done_init { done_init = true Log.info("Initial filter task %s: Done initialization; Elapsed time: %.2f seconds", for: .homeModel, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) @@ -464,13 +463,6 @@ class HomeModel: ContactsDelegate, ObservableObject { } } - - Task { - let relayListFilter = NostrFilter(kinds: [.relay_list], limit: 1, authors: [damus_state.pubkey]) - for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: [relayListFilter]) { - await event.justUseACopy({ await process_event(ev: $0, context: .initialRelayList) }) - } - } } /// After initial connection or reconnect, send subscription filters for the home timeline, DMs, and notifications @@ -517,36 +509,31 @@ class HomeModel: ContactsDelegate, ObservableObject { var notifications_filters = [notifications_filter] let contacts_filter_chunks = contacts_filter.chunked(on: .authors, into: MAX_CONTACTS_ON_FILTER) - var contacts_filters = contacts_filter_chunks + [our_contacts_filter, our_blocklist_filter, our_old_blocklist_filter] + var contacts_filters = [our_contacts_filter, our_blocklist_filter, our_old_blocklist_filter] + contacts_filter_chunks var dms_filters = [dms_filter, our_dms_filter] //print_filters(relay_id: relay_id, filters: [home_filters, contacts_filters, notifications_filters, dms_filters]) subscribe_to_home_filters() - - self.contactsHandlerTask?.cancel() - self.contactsHandlerTask = Task { - for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: contacts_filters) { - await event.justUseACopy({ await process_event(ev: $0, context: .contacts) }) - } - } self.notificationsHandlerTask?.cancel() self.notificationsHandlerTask = Task { for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: notifications_filters) { await event.justUseACopy({ await process_event(ev: $0, context: .notifications) }) } } - self.dmsHandlerTask?.cancel() - self.dmsHandlerTask = Task { - for await item in damus_state.nostrNetwork.reader.advancedStream(filters: dms_filters) { + self.generalHandlerTask?.cancel() + self.generalHandlerTask = Task { + for await item in damus_state.nostrNetwork.reader.advancedStream(filters: dms_filters + contacts_filters) { switch item { case .event(let lender): - await lender.justUseACopy({ await process_event(ev: $0, context: .dms) }) + await lender.justUseACopy({ await process_event(ev: $0, context: .other) }) case .eose: var dms = dms.dms.flatMap { $0.events } dms.append(contentsOf: incoming_dms) - case .ndbEose: break + case .ndbEose: + var dms = dms.dms.flatMap { $0.events } + dms.append(contentsOf: incoming_dms) case .networkEose: break } } @@ -560,7 +547,7 @@ class HomeModel: ContactsDelegate, ObservableObject { filter.authors = [nwc.pubkey] filter.limit = 0 for await event in damus_state.nostrNetwork.reader.streamIndefinitely(filters: [filter], to: [nwc.relay]) { - await event.justUseACopy({ await process_event(ev: $0, context: .nwc) }) + await event.justUseACopy({ await process_event(ev: $0, context: .other) }) } } @@ -647,13 +634,9 @@ class HomeModel: ContactsDelegate, ObservableObject { /// Adapter pattern to make migration easier enum SubscriptionContext { - case initialContactList - case initialRelayList case home case notifications - case dms - case contacts - case nwc + case other } @MainActor @@ -780,7 +763,7 @@ class HomeModel: ContactsDelegate, ObservableObject { Task { await insert_home_event(ev) } case .notifications: handle_notification(ev: ev) - case .dms, .contacts, .initialRelayList, .initialContactList, .nwc: + case .other: break } } From fe491bf69467350498cd5dc1e58507ed21dcf43a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 24 Sep 2025 17:52:38 -0700 Subject: [PATCH 69/91] Increase MAX_CONCURRENT_SUBSCRIPTION_LIMIT MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Through some local experimentation, it seems that network relays can support higher subscription limits. Increase internal limits to avoid hitting issues with subscriptions waiting on subscription pool to clear and appearing stale. Signed-off-by: Daniel D’Aquino --- damus/Core/Nostr/RelayPool.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index ffa38c0bb..0d5f0b7da 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -45,8 +45,8 @@ class RelayPool { private var last_network_status: NWPath.Status = .unsatisfied /// The limit of maximum concurrent subscriptions. Any subscriptions beyond this limit will be paused until subscriptions clear - /// This is to avoid error states and undefined behaviour related to hitting subscription limits on the relays, by letting those wait instead — with the principle that slower is better than broken. - static let MAX_CONCURRENT_SUBSCRIPTION_LIMIT = 10 // This number is only an educated guess at this point. + /// This is to avoid error states and undefined behaviour related to hitting subscription limits on the relays, by letting those wait instead — with the principle that although slower is not ideal, it is better than completely broken. + static let MAX_CONCURRENT_SUBSCRIPTION_LIMIT = 14 // This number is only an educated guess based on some local experiments. func close() { disconnect() From 9153a912b0c5951b33c69067ef54644ab8c15bf6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 24 Sep 2025 17:55:35 -0700 Subject: [PATCH 70/91] Cancel timeout task on stream cancellation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Core/Nostr/RelayPool.swift | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 0d5f0b7da..7be726d6a 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -269,13 +269,22 @@ class RelayPool { } } }, to: desiredRelays) - Task { + let timeoutTask = Task { try? await Task.sleep(for: eoseTimeout) if !eoseSent { continuation.yield(with: .success(.eose)) } } - continuation.onTermination = { @Sendable _ in + continuation.onTermination = { @Sendable termination in + switch termination { + case .finished: + Log.debug("RelayPool subscription %s finished. Closing.", for: .networking, sub_id) + case .cancelled: + Log.debug("RelayPool subscription %s cancelled. Closing.", for: .networking, sub_id) + @unknown default: + break + } self.unsubscribe(sub_id: sub_id, to: desiredRelays) self.remove_handler(sub_id: sub_id) + timeoutTask.cancel() } } } From 258d08723fd3cf5afa2010a0051390b5d0b452ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 26 Sep 2025 12:04:51 -0700 Subject: [PATCH 71/91] Check if Ndb is closed before running subscribe and query operations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This should prevent background crashes caused by race conditions between usages of Ndb and the Ndb/app lifecycle operations. Signed-off-by: Daniel D’Aquino --- nostrdb/Ndb.swift | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/nostrdb/Ndb.swift b/nostrdb/Ndb.swift index 134d4f0d4..0dabf95de 100644 --- a/nostrdb/Ndb.swift +++ b/nostrdb/Ndb.swift @@ -623,6 +623,7 @@ class Ndb { /// - Returns: Array of note keys matching the filters /// - Throws: NdbStreamError if the query fails func query(with txn: NdbTxn, filters: [NdbFilter], maxResults: Int) throws(NdbStreamError) -> [NoteKey] { + guard !self.is_closed else { throw .ndbClosed } let filtersPointer = UnsafeMutablePointer.allocate(capacity: filters.count) defer { filtersPointer.deallocate() } @@ -636,6 +637,7 @@ class Ndb { let results = UnsafeMutablePointer.allocate(capacity: maxResults) defer { results.deallocate() } + guard !self.is_closed else { throw .ndbClosed } guard ndb_query(&txn.txn, filtersPointer, Int32(filters.count), results, Int32(maxResults), count) == 1 else { throw NdbStreamError.initialQueryFailed } @@ -707,6 +709,7 @@ class Ndb { } func subscribe(filters: [NdbFilter], maxSimultaneousResults: Int = 1000) throws(NdbStreamError) -> AsyncStream { + guard !self.is_closed else { throw .ndbClosed } // Fetch initial results guard let txn = NdbTxn(ndb: self) else { throw .cannotOpenTransaction } @@ -914,6 +917,7 @@ extension Ndb { case initialQueryFailed case timeout case cancelled + case ndbClosed } /// An error that may happen when looking something up From fe62aea08af4f3aaeacc101a4498195efa3dd5ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 26 Sep 2025 13:01:05 -0700 Subject: [PATCH 72/91] Stop ProfileManager when app is being backgrounded MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This should prevent RUNNINGBOARD 0xdead10cc crashes related to ProfileManager and app background states. Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManager.swift | 16 +++++++--------- .../NostrNetworkManager/ProfilesManager.swift | 4 +++- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 1157314bf..4010e998d 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -47,7 +47,7 @@ class NostrNetworkManager { self.profilesManager = ProfilesManager(subscriptionManager: reader, ndb: delegate.ndb) } - // MARK: - Control functions + // MARK: - Control and lifecycle functions /// Connects the app to the Nostr network func connect() { @@ -60,6 +60,12 @@ class NostrNetworkManager { self.pool.disconnect() } + func close() async { + await self.reader.cancelAllTasks() + await self.profilesManager.stop() + pool.close() + } + func ping() { self.pool.ping() } @@ -137,14 +143,6 @@ class NostrNetworkManager { print("damus-donation donating...") WalletConnect.pay(url: nwc, pool: self.pool, post: self.postbox, invoice: invoice, zap_request: nil, delay: nil) } - - - // MARK: - App lifecycle functions - - func close() async { - await self.reader.cancelAllTasks() - pool.close() - } } diff --git a/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift index 2b0f462db..3d528994b 100644 --- a/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift @@ -49,9 +49,11 @@ extension NostrNetworkManager { } } - func stop() { + func stop() async { self.subscriptionSwitcherTask?.cancel() self.profileListenerTask?.cancel() + try? await self.subscriptionSwitcherTask?.value + try? await self.profileListenerTask?.value } private func restartProfileListenerTask() { From 1b5f107ac6b88bea26af617b81b87dce0e5e059b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 29 Sep 2025 16:39:14 -0700 Subject: [PATCH 73/91] Add more safeguards to prevent RUNNINGBOARD 0xdead10cc crashes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds more safeguards to prevent RUNNINGBOARD 0xdead10cc crashes, by: 1. Using the `beginBackgroundTask(withName:expirationHandler:)` to request additional background execution time before completely suspending the app. See https://developer.apple.com/documentation/xcode/sigkill 2. Reorganizing app closing/cleanup tasks to be done in parallel when possible to decrease time needed to cleanup resources. Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 12 ++++++++++++ .../NostrNetworkManager.swift | 15 ++++++++++++--- .../NostrNetworkManager/ProfilesManager.swift | 19 +++++++++++++++---- .../SubscriptionManager.swift | 19 ++++++++++++++----- damus/Shared/Utilities/Log.swift | 1 + 5 files changed, 54 insertions(+), 12 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index dc415752a..1eb19bffa 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -513,9 +513,21 @@ struct ContentView: View { switch phase { case .background: print("txn: 📙 DAMUS BACKGROUNDED") + let bgTask = this_app.beginBackgroundTask(withName: "Closing things down gracefully", expirationHandler: { [weak damus_state] in + Log.error("App background signal handling: RUNNING OUT OF TIME! JUST CLOSE NDB DIRECTLY!", for: .app_lifecycle) + // Background time about to expire, so close ndb directly. + // This may still cause a memory error crash if subscription tasks have not been properly closed yet, but that is less likely than a 0xdead10cc crash if we don't do anything here. + damus_state?.ndb.close() + }) + damusClosingTask = Task { @MainActor in + Log.debug("App background signal handling: App being backgrounded", for: .app_lifecycle) + let startTime = CFAbsoluteTimeGetCurrent() await damus_state.nostrNetwork.close() // Close ndb streaming tasks before closing ndb to avoid memory errors + Log.debug("App background signal handling: Nostr network closed after %.2f seconds", for: .app_lifecycle, CFAbsoluteTimeGetCurrent() - startTime) damus_state.ndb.close() + Log.debug("App background signal handling: Ndb closed after %.2f seconds", for: .app_lifecycle, CFAbsoluteTimeGetCurrent() - startTime) + this_app.endBackgroundTask(bgTask) } break case .inactive: diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 4010e998d..5b27699f0 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -61,9 +61,18 @@ class NostrNetworkManager { } func close() async { - await self.reader.cancelAllTasks() - await self.profilesManager.stop() - pool.close() + await withTaskGroup { group in + // Spawn each cancellation task in parallel for faster execution speed + group.addTask { + await self.reader.cancelAllTasks() + } + group.addTask { + await self.profilesManager.stop() + } + pool.close() + // But await on each one to prevent race conditions + for await value in group { continue } + } } func ping() { diff --git a/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift index 3d528994b..43535efba 100644 --- a/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift @@ -42,6 +42,7 @@ extension NostrNetworkManager { try await Task.sleep(for: .seconds(1)) try Task.checkCancellation() if subscriptionNeedsUpdate { + try Task.checkCancellation() self.restartProfileListenerTask() subscriptionNeedsUpdate = false } @@ -50,10 +51,19 @@ extension NostrNetworkManager { } func stop() async { - self.subscriptionSwitcherTask?.cancel() - self.profileListenerTask?.cancel() - try? await self.subscriptionSwitcherTask?.value - try? await self.profileListenerTask?.value + await withTaskGroup { group in + // Spawn each cancellation in parallel for better execution speed + group.addTask { + await self.subscriptionSwitcherTask?.cancel() + try? await self.subscriptionSwitcherTask?.value + } + group.addTask { + await self.profileListenerTask?.cancel() + try? await self.profileListenerTask?.value + } + // But await for all of them to be done before returning to avoid race conditions + for await value in group { continue } + } } private func restartProfileListenerTask() { @@ -70,6 +80,7 @@ extension NostrNetworkManager { let pubkeys = Array(streams.keys) guard pubkeys.count > 0 else { return } let profileFilter = NostrFilter(kinds: [.metadata], authors: pubkeys) + try Task.checkCancellation() for await ndbLender in self.subscriptionManager.streamIndefinitely(filters: [profileFilter], streamMode: .ndbFirst) { try Task.checkCancellation() try? ndbLender.borrow { ev in diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 0207f1218..86fa8b79b 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -387,12 +387,21 @@ extension NostrNetworkManager { } func cancelAllTasks() async { - Log.info("Cancelling all SubscriptionManager tasks", for: .subscription_manager) - for (taskId, _) in self.tasks { - Log.info("Cancelling SubscriptionManager task %s", for: .subscription_manager, taskId.uuidString) - await cancelAndCleanUp(taskId: taskId) + await withTaskGroup { group in + Log.info("Cancelling all SubscriptionManager tasks", for: .subscription_manager) + // Start each task cancellation in parallel for faster execution + for (taskId, _) in self.tasks { + Log.info("Cancelling SubscriptionManager task %s", for: .subscription_manager, taskId.uuidString) + group.addTask { + await self.cancelAndCleanUp(taskId: taskId) + } + } + // However, wait until all cancellations are complete to avoid race conditions. + for await value in group { + continue + } + Log.info("Cancelled all SubscriptionManager tasks", for: .subscription_manager) } - Log.info("Cancelled all SubscriptionManager tasks", for: .subscription_manager) } } } diff --git a/damus/Shared/Utilities/Log.swift b/damus/Shared/Utilities/Log.swift index 216e7e530..81a8d3ce5 100644 --- a/damus/Shared/Utilities/Log.swift +++ b/damus/Shared/Utilities/Log.swift @@ -14,6 +14,7 @@ enum LogCategory: String { case render case storage case networking + case app_lifecycle case subscription_manager case timeline /// Logs related to Nostr Wallet Connect components From 32e8c1b6e1af40732ce2bd80aec5b50a11303023 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 1 Oct 2025 11:18:15 -0700 Subject: [PATCH 74/91] Improve logging in SubscriptionManager MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use Apple's unified logging system, and specify proper privacy levels for each piece of information. Signed-off-by: Daniel D’Aquino --- .../SubscriptionManager.swift | 50 ++++++++++++------- 1 file changed, 31 insertions(+), 19 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 86fa8b79b..4a7eaf986 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -5,6 +5,8 @@ // Created by Daniel D’Aquino on 2025-03-25. // import Foundation +import os + extension NostrNetworkManager { /// Reads or fetches information from RelayPool and NostrDB, and provides an easier and unified higher-level interface. @@ -18,6 +20,11 @@ extension NostrNetworkManager { private var taskManager: TaskManager private let experimentalLocalRelayModelSupport: Bool + private static let logger = Logger( + subsystem: Constants.MAIN_APP_BUNDLE_IDENTIFIER, + category: "subscription_manager" + ) + let EXTRA_VERBOSE_LOGGING: Bool = false init(pool: RelayPool, ndb: Ndb, experimentalLocalRelayModelSupport: Bool) { @@ -110,33 +117,33 @@ extension NostrNetworkManager { return AsyncStream { continuation in let subscriptionId = id ?? UUID() let startTime = CFAbsoluteTimeGetCurrent() - Log.info("Starting subscription %s: %s", for: .subscription_manager, subscriptionId.uuidString, filters.debugDescription) + Self.logger.info("Starting subscription \(subscriptionId.uuidString, privacy: .public): \(filters.debugDescription, privacy: .private)") let multiSessionStreamingTask = Task { while !Task.isCancelled { do { guard !self.ndb.is_closed else { - Log.info("%s: Ndb closed. Sleeping for 1 second before resuming.", for: .subscription_manager, subscriptionId.uuidString) + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Ndb closed. Sleeping for 1 second before resuming.") try await Task.sleep(nanoseconds: 1_000_000_000) continue } guard self.pool.open else { - Log.info("%s: RelayPool closed. Sleeping for 1 second before resuming.", for: .subscription_manager, subscriptionId.uuidString) + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): RelayPool closed. Sleeping for 1 second before resuming.") try await Task.sleep(nanoseconds: 1_000_000_000) continue } - Log.info("%s: Streaming.", for: .subscription_manager, subscriptionId.uuidString) + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Streaming.") for await item in self.sessionSubscribe(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { try Task.checkCancellation() continuation.yield(item) } - Log.info("%s: Session subscription ended. Sleeping for 1 second before resuming.", for: .subscription_manager, subscriptionId.uuidString) + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Session subscription ended. Sleeping for 1 second before resuming.") try await Task.sleep(nanoseconds: 1_000_000_000) } catch { - Log.error("%s: Error: %s", for: .subscription_manager, subscriptionId.uuidString, error.localizedDescription) + Self.logger.error("Session subscription \(subscriptionId.uuidString, privacy: .public): Error: \(error.localizedDescription, privacy: .public)") } } - Log.info("%s: Terminated.", for: .subscription_manager, subscriptionId.uuidString) + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Terminated.") } let timeoutTask = Task { if let timeout { @@ -145,7 +152,7 @@ extension NostrNetworkManager { } } continuation.onTermination = { @Sendable _ in - Log.info("%s: Cancelled.", for: .subscription_manager, subscriptionId.uuidString) + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Cancelled.") multiSessionStreamingTask.cancel() timeoutTask.cancel() } @@ -167,7 +174,7 @@ extension NostrNetworkManager { let streamMode = streamMode ?? defaultStreamMode() return AsyncStream { continuation in let startTime = CFAbsoluteTimeGetCurrent() - Log.debug("Session subscription %s: Started", for: .subscription_manager, id.uuidString) + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Started") var ndbEOSEIssued = false var networkEOSEIssued = false @@ -182,7 +189,7 @@ extension NostrNetworkManager { } if canIssueEOSE { - Log.debug("Session subscription %s: Issued EOSE for session. Elapsed: %.2f seconds", for: .subscription_manager, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Issued EOSE for session. Elapsed: \(CFAbsoluteTimeGetCurrent() - startTime, format: .fixed(precision: 2), privacy: .public) seconds") continuation.yield(.eose) } } @@ -193,7 +200,7 @@ extension NostrNetworkManager { try Task.checkCancellation() switch item { case .eose: - Log.debug("Session subscription %s: Received EOSE from nostrdb. Elapsed: %.2f seconds", for: .subscription_manager, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Received EOSE from nostrdb. Elapsed: \(CFAbsoluteTimeGetCurrent() - startTime, format: .fixed(precision: 2), privacy: .public) seconds") continuation.yield(.ndbEose) ndbEOSEIssued = true yieldEOSEIfReady() @@ -211,7 +218,7 @@ extension NostrNetworkManager { } } catch { - Log.error("Session subscription %s: NDB streaming error: %s", for: .subscription_manager, id.uuidString, error.localizedDescription) + Self.logger.error("Session subscription \(id.uuidString, privacy: .public): NDB streaming error: \(error.localizedDescription, privacy: .public)") } continuation.finish() } @@ -224,7 +231,7 @@ extension NostrNetworkManager { switch item { case .event(let event): if EXTRA_VERBOSE_LOGGING { - Log.debug("Session subscription %s: Received kind %d event with id %s from the network", for: .subscription_manager, id.uuidString, event.kind, event.id.hex()) + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Received kind \(event.kind, privacy: .public) event with id \(event.id.hex(), privacy: .private) from the network") } switch streamMode { case .ndbFirst: @@ -233,7 +240,7 @@ extension NostrNetworkManager { continuation.yield(.event(lender: NdbNoteLender(ownedNdbNote: event))) } case .eose: - Log.debug("Session subscription %s: Received EOSE from the network. Elapsed: %.2f seconds", for: .subscription_manager, id.uuidString, CFAbsoluteTimeGetCurrent() - startTime) + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Received EOSE from the network. Elapsed: \(CFAbsoluteTimeGetCurrent() - startTime, format: .fixed(precision: 2), privacy: .public) seconds") continuation.yield(.networkEose) networkEOSEIssued = true yieldEOSEIfReady() @@ -241,7 +248,7 @@ extension NostrNetworkManager { } } catch { - Log.error("Session subscription %s: Network streaming error: %s", for: .subscription_manager, id.uuidString, error.localizedDescription) + Self.logger.error("Session subscription \(id.uuidString, privacy: .public): Network streaming error: \(error.localizedDescription, privacy: .public)") } continuation.finish() } @@ -373,6 +380,11 @@ extension NostrNetworkManager { actor TaskManager { private var tasks: [UUID: Task] = [:] + private static let logger = Logger( + subsystem: "com.jb55.damus", + category: "subscription_manager.task_manager" + ) + func add(task: Task) -> UUID { let taskId = UUID() self.tasks[taskId] = task @@ -387,11 +399,11 @@ extension NostrNetworkManager { } func cancelAllTasks() async { - await withTaskGroup { group in - Log.info("Cancelling all SubscriptionManager tasks", for: .subscription_manager) + await withTaskGroup { group in + Self.logger.info("Cancelling all SubscriptionManager tasks") // Start each task cancellation in parallel for faster execution for (taskId, _) in self.tasks { - Log.info("Cancelling SubscriptionManager task %s", for: .subscription_manager, taskId.uuidString) + Self.logger.info("Cancelling SubscriptionManager task \(taskId.uuidString, privacy: .public)") group.addTask { await self.cancelAndCleanUp(taskId: taskId) } @@ -400,7 +412,7 @@ extension NostrNetworkManager { for await value in group { continue } - Log.info("Cancelled all SubscriptionManager tasks", for: .subscription_manager) + Self.logger.info("Cancelled all SubscriptionManager tasks") } } } From 84c4594d30eb3e18b8ba3174f42b97035bc95b9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 1 Oct 2025 14:23:57 -0700 Subject: [PATCH 75/91] Fix timeline staleness MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 7 +++-- .../NostrNetworkManager.swift | 11 ++++--- .../SubscriptionManager.swift | 4 +-- .../UserRelayListManager.swift | 7 ++++- damus/Core/Nostr/RelayPool.swift | 29 ++++++++++++------- 5 files changed, 37 insertions(+), 21 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index 1eb19bffa..3dddfc963 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -475,7 +475,7 @@ struct ContentView: View { } } .onReceive(handle_notify(.disconnect_relays)) { () in - damus_state.nostrNetwork.disconnect() + damus_state.nostrNetwork.disconnectRelays() } .onReceive(NotificationCenter.default.publisher(for: UIApplication.willEnterForegroundNotification)) { obj in print("txn: 📙 DAMUS ACTIVE NOTIFY") @@ -523,7 +523,7 @@ struct ContentView: View { damusClosingTask = Task { @MainActor in Log.debug("App background signal handling: App being backgrounded", for: .app_lifecycle) let startTime = CFAbsoluteTimeGetCurrent() - await damus_state.nostrNetwork.close() // Close ndb streaming tasks before closing ndb to avoid memory errors + await damus_state.nostrNetwork.handleAppBackgroundRequest() // Close ndb streaming tasks before closing ndb to avoid memory errors Log.debug("App background signal handling: Nostr network closed after %.2f seconds", for: .app_lifecycle, CFAbsoluteTimeGetCurrent() - startTime) damus_state.ndb.close() Log.debug("App background signal handling: Ndb closed after %.2f seconds", for: .app_lifecycle, CFAbsoluteTimeGetCurrent() - startTime) @@ -537,7 +537,8 @@ struct ContentView: View { print("txn: 📙 DAMUS ACTIVE") Task { await damusClosingTask?.value // Wait for the closing task to finish before reopening things, to avoid race conditions - damus_state.nostrNetwork.connect() + damusClosingTask = nil + // Pinging the network will automatically reconnect any dead websocket connections damus_state.nostrNetwork.ping() } @unknown default: diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 5b27699f0..5e17c200c 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -51,15 +51,18 @@ class NostrNetworkManager { /// Connects the app to the Nostr network func connect() { - self.userRelayList.connect() - self.pool.open = true + self.userRelayList.connect() // Will load the user's list, apply it, and get RelayPool to connect to it. Task { await self.profilesManager.load() } } - func disconnect() { + func disconnectRelays() { self.pool.disconnect() } + func handleAppBackgroundRequest() async { + await self.reader.cancelAllTasks() + } + func close() async { await withTaskGroup { group in // Spawn each cancellation task in parallel for faster execution speed @@ -69,9 +72,9 @@ class NostrNetworkManager { group.addTask { await self.profilesManager.stop() } - pool.close() // But await on each one to prevent race conditions for await value in group { continue } + pool.close() } } diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 4a7eaf986..13420840b 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -254,13 +254,13 @@ extension NostrNetworkManager { } Task { + // Add the ndb streaming task to the task manager so that it can be cancelled when the app is backgrounded let ndbStreamTaskId = await self.taskManager.add(task: ndbStreamTask) - let streamTaskId = await self.taskManager.add(task: streamTask) continuation.onTermination = { @Sendable _ in Task { await self.taskManager.cancelAndCleanUp(taskId: ndbStreamTaskId) - await self.taskManager.cancelAndCleanUp(taskId: streamTaskId) + streamTask.cancel() } } } diff --git a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift index cd4392baf..104b2a925 100644 --- a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift @@ -236,9 +236,14 @@ extension NostrNetworkManager { ) changed = true } + + // Always tell RelayPool to connect whether or not we are already connected. + // This is because: + // 1. Internally it won't redo the connection because of internal checks + // 2. Even if the relay list has not changed, relays may have been disconnected from app lifecycle or other events + pool.connect() if changed { - pool.connect() notify(.relays_changed) } } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 7be726d6a..7a0d2eca2 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -10,7 +10,7 @@ import Network struct RelayHandler { let sub_id: String - let callback: (RelayURL, NostrConnectionEvent) -> () + var callback: (RelayURL, NostrConnectionEvent) -> () } struct QueuedRequest { @@ -95,7 +95,7 @@ class RelayPool { func remove_handler(sub_id: String) { self.handlers = handlers.filter { $0.sub_id != sub_id } - print("removing \(sub_id) handler, current: \(handlers.count)") + Log.debug("Removing %s handler, current: %d", for: .networking, sub_id, handlers.count) } func ping() { @@ -112,16 +112,17 @@ class RelayPool { try? await Task.sleep(for: .seconds(1)) } Log.debug("%s: Subscription pool cleared", for: .networking, sub_id) - for handler in handlers { - // don't add duplicate handlers + handlers = handlers.filter({ handler in if handler.sub_id == sub_id { - assertionFailure("Duplicate handlers are not allowed. Proper error handling for this has not been built yet.") - Log.error("Duplicate handlers are not allowed. Error handling for this has not been built yet.", for: .networking) - return + Log.error("Duplicate handler detected for the same subscription ID. Overriding.", for: .networking) + return false } - } + else { + return true + } + }) self.handlers.append(RelayHandler(sub_id: sub_id, callback: handler)) - print("registering \(sub_id) handler, current: \(self.handlers.count)") + Log.debug("Registering %s handler, current: %d", for: .networking, sub_id, self.handlers.count) } func remove_relay(_ relay_id: RelayURL) { @@ -194,14 +195,17 @@ class RelayPool { } func connect(to: [RelayURL]? = nil) { - open = true let relays = to.map{ get_relays($0) } ?? self.relays for relay in relays { relay.connection.connect() } + // Mark as open last, to prevent other classes from pulling data before the relays are actually connected + open = true } func disconnect(to: [RelayURL]? = nil) { + // Mark as closed first, to prevent other classes from pulling data while the relays are being disconnected + open = false let relays = to.map{ get_relays($0) } ?? self.relays for relay in relays { relay.connection.disconnect() @@ -218,9 +222,11 @@ class RelayPool { func subscribe(sub_id: String, filters: [NostrFilter], handler: @escaping (RelayURL, NostrConnectionEvent) -> (), to: [RelayURL]? = nil) { Task { await register_handler(sub_id: sub_id, handler: handler) + // When the caller specifies no relays, it is implied that the user wants to use the ones in the user relay list. Skip ephemeral relays in that case. // When the caller specifies specific relays, do not skip ephemeral relays to respect the exact list given by the caller. let shouldSkipEphemeralRelays = to == nil ? true : false + send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to, skip_ephemeral: shouldSkipEphemeralRelays) } } @@ -299,6 +305,7 @@ class RelayPool { func subscribe_to(sub_id: String, filters: [NostrFilter], to: [RelayURL]?, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) { Task { await register_handler(sub_id: sub_id, handler: handler) + send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) } } @@ -313,7 +320,7 @@ class RelayPool { return c } - + @MainActor func queue_req(r: NostrRequestType, relay: RelayURL, skip_ephemeral: Bool) { let count = count_queued(relay: relay) From 667a228e1ab8c4a2b485fb43af7cc25c692571de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 3 Oct 2025 10:29:00 -0700 Subject: [PATCH 76/91] Ensure to publish object changes on the main thread MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Features/Settings/Models/UserSettingsStore.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/damus/Features/Settings/Models/UserSettingsStore.swift b/damus/Features/Settings/Models/UserSettingsStore.swift index eb01f7e60..92a15adef 100644 --- a/damus/Features/Settings/Models/UserSettingsStore.swift +++ b/damus/Features/Settings/Models/UserSettingsStore.swift @@ -32,7 +32,9 @@ func setting_get_property_value(key: String, scoped_key: String, default_valu func setting_set_property_value(scoped_key: String, old_value: T, new_value: T) -> T? { guard old_value != new_value else { return nil } DamusUserDefaults.standard.set(new_value, forKey: scoped_key) - UserSettingsStore.shared?.objectWillChange.send() + DispatchQueue.main.async { + UserSettingsStore.shared?.objectWillChange.send() + } return new_value } From 3437cf5347bb73695b7a30032440cd070f6564dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Sun, 5 Oct 2025 13:18:59 -0700 Subject: [PATCH 77/91] Further improvements to app lifecycle handling MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Resend subscription requests to relays when websocket connection is re-established - More safeguard checks on whether Ndb is opened before accessing its memory - Cancel queued unsubscribe requests on app backgrounding to avoid race conditions with subscribe requests when app enters the foreground - Call Ndb re-open when Damus is active (not only on active notify), as experimentally there have been instances where active notify code has not been run. The operation is idempotent, so there should be no risk of it being called twice. Signed-off-by: Daniel D’Aquino --- damus/ContentView.swift | 1 + .../NostrNetworkManager.swift | 1 + .../SubscriptionManager.swift | 5 +- damus/Core/Nostr/RelayPool.swift | 49 +++++++++++++++++-- .../Onboarding/Views/SaveKeysView.swift | 2 +- damus/Features/Posting/Models/PostBox.swift | 2 +- nostrdb/Ndb.swift | 3 +- 7 files changed, 54 insertions(+), 9 deletions(-) diff --git a/damus/ContentView.swift b/damus/ContentView.swift index 3dddfc963..46c468c1f 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -538,6 +538,7 @@ struct ContentView: View { Task { await damusClosingTask?.value // Wait for the closing task to finish before reopening things, to avoid race conditions damusClosingTask = nil + damus_state.ndb.reopen() // Pinging the network will automatically reconnect any dead websocket connections damus_state.nostrNetwork.ping() } diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index 5e17c200c..c9b3a393a 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -61,6 +61,7 @@ class NostrNetworkManager { func handleAppBackgroundRequest() async { await self.reader.cancelAllTasks() + self.pool.cleanQueuedRequestForSessionEnd() } func close() async { diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 13420840b..aaf0d0c7c 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -220,6 +220,7 @@ extension NostrNetworkManager { catch { Self.logger.error("Session subscription \(id.uuidString, privacy: .public): NDB streaming error: \(error.localizedDescription, privacy: .public)") } + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): NDB streaming ended") continuation.finish() } let streamTask = Task { @@ -250,17 +251,19 @@ extension NostrNetworkManager { catch { Self.logger.error("Session subscription \(id.uuidString, privacy: .public): Network streaming error: \(error.localizedDescription, privacy: .public)") } + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Network streaming ended") continuation.finish() } Task { // Add the ndb streaming task to the task manager so that it can be cancelled when the app is backgrounded let ndbStreamTaskId = await self.taskManager.add(task: ndbStreamTask) + let streamTaskId = await self.taskManager.add(task: streamTask) continuation.onTermination = { @Sendable _ in Task { await self.taskManager.cancelAndCleanUp(taskId: ndbStreamTaskId) - streamTask.cancel() + await self.taskManager.cancelAndCleanUp(taskId: streamTaskId) } } } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 7a0d2eca2..31a0f65f8 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -10,6 +10,8 @@ import Network struct RelayHandler { let sub_id: String + let filters: [NostrFilter]? + let to: [RelayURL]? var callback: (RelayURL, NostrConnectionEvent) -> () } @@ -106,7 +108,7 @@ class RelayPool { } @MainActor - func register_handler(sub_id: String, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) async { + func register_handler(sub_id: String, filters: [NostrFilter]?, to relays: [RelayURL]? = nil, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) async { while handlers.count > Self.MAX_CONCURRENT_SUBSCRIPTION_LIMIT { Log.debug("%s: Too many subscriptions, waiting for subscription pool to clear", for: .networking, sub_id) try? await Task.sleep(for: .seconds(1)) @@ -121,7 +123,7 @@ class RelayPool { return true } }) - self.handlers.append(RelayHandler(sub_id: sub_id, callback: handler)) + self.handlers.append(RelayHandler(sub_id: sub_id, filters: filters, to: relays, callback: handler)) Log.debug("Registering %s handler, current: %d", for: .networking, sub_id, self.handlers.count) } @@ -211,6 +213,23 @@ class RelayPool { relay.connection.disconnect() } } + + /// Deletes queued up requests that should not persist between app sessions (i.e. when the app goes to background then back to foreground) + func cleanQueuedRequestForSessionEnd() { + request_queue = request_queue.filter { request in + guard case .typical(let typicalRequest) = request.req else { return true } + switch typicalRequest { + case .subscribe(_): + return true + case .unsubscribe(_): + return false // Do not persist unsubscribe requests to prevent them to race against subscribe requests when we come back to the foreground. + case .event(_): + return true + case .auth(_): + return true + } + } + } func unsubscribe(sub_id: String, to: [RelayURL]? = nil) { if to == nil { @@ -221,7 +240,7 @@ class RelayPool { func subscribe(sub_id: String, filters: [NostrFilter], handler: @escaping (RelayURL, NostrConnectionEvent) -> (), to: [RelayURL]? = nil) { Task { - await register_handler(sub_id: sub_id, handler: handler) + await register_handler(sub_id: sub_id, filters: filters, to: to, handler: handler) // When the caller specifies no relays, it is implied that the user wants to use the ones in the user relay list. Skip ephemeral relays in that case. // When the caller specifies specific relays, do not skip ephemeral relays to respect the exact list given by the caller. @@ -304,7 +323,7 @@ class RelayPool { func subscribe_to(sub_id: String, filters: [NostrFilter], to: [RelayURL]?, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) { Task { - await register_handler(sub_id: sub_id, handler: handler) + await register_handler(sub_id: sub_id, filters: filters, to: to, handler: handler) send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) } @@ -411,14 +430,34 @@ class RelayPool { } } } + + func resubscribeAll(relayId: RelayURL) { + for handler in self.handlers { + guard let filters = handler.filters else { continue } + // When the caller specifies no relays, it is implied that the user wants to use the ones in the user relay list. Skip ephemeral relays in that case. + // When the caller specifies specific relays, do not skip ephemeral relays to respect the exact list given by the caller. + let shouldSkipEphemeralRelays = handler.to == nil ? true : false + + if let handlerTargetRelays = handler.to, + !handlerTargetRelays.contains(where: { $0 == relayId }) { + // Not part of the target relays, skip + continue + } + + send(.subscribe(.init(filters: filters, sub_id: handler.sub_id)), to: [relayId], skip_ephemeral: shouldSkipEphemeralRelays) + } + } func handle_event(relay_id: RelayURL, event: NostrConnectionEvent) { record_seen(relay_id: relay_id, event: event) - // run req queue when we reconnect + // When we reconnect, do two things + // - Send messages that were stored in the queue + // - Re-subscribe to filters we had subscribed before if case .ws_connection_event(let ws) = event { if case .connected = ws { run_queue(relay_id) + self.resubscribeAll(relayId: relay_id) } } diff --git a/damus/Features/Onboarding/Views/SaveKeysView.swift b/damus/Features/Onboarding/Views/SaveKeysView.swift index 4a2bf947b..9939d778e 100644 --- a/damus/Features/Onboarding/Views/SaveKeysView.swift +++ b/damus/Features/Onboarding/Views/SaveKeysView.swift @@ -142,7 +142,7 @@ struct SaveKeysView: View { add_rw_relay(self.pool, relay) } - Task { await self.pool.register_handler(sub_id: "signup", handler: handle_event) } + Task { await self.pool.register_handler(sub_id: "signup", filters: nil, handler: handle_event) } self.loading = true diff --git a/damus/Features/Posting/Models/PostBox.swift b/damus/Features/Posting/Models/PostBox.swift index 0f84038c0..db5bb3b87 100644 --- a/damus/Features/Posting/Models/PostBox.swift +++ b/damus/Features/Posting/Models/PostBox.swift @@ -60,7 +60,7 @@ class PostBox { init(pool: RelayPool) { self.pool = pool self.events = [:] - Task { await pool.register_handler(sub_id: "postbox", handler: handle_event) } + Task { await pool.register_handler(sub_id: "postbox", filters: nil, to: nil, handler: handle_event) } } // only works reliably on delay-sent events diff --git a/nostrdb/Ndb.swift b/nostrdb/Ndb.swift index 0dabf95de..ea3e6ab6f 100644 --- a/nostrdb/Ndb.swift +++ b/nostrdb/Ndb.swift @@ -701,9 +701,10 @@ class Ndb { terminationStarted = true Log.debug("ndb_wait: stream: Terminated early", for: .ndb) streaming = false - ndb_unsubscribe(self.ndb.ndb, subid) Task { await self.unsetCallback(subscriptionId: subid) } filtersPointer.deallocate() + guard !self.is_closed else { return } // Double-check Ndb is open before sending unsubscribe + ndb_unsubscribe(self.ndb.ndb, subid) } } } From d9306d415323abaa0107dfd98cf2e521b03a67dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Sun, 5 Oct 2025 15:21:57 -0700 Subject: [PATCH 78/91] Modify NostrNetworkManager pipeline architecture MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously, we combined the ndb and network stream within a "session subscription" stream, which was teared down and rebuilt every time the app went into the background and back to the foreground (This was done to prevent crashes related to access to Ndb memory when Ndb is closed). However, this caused complications and instability on the network stream, leading to timeline staleness. To address this, the pipeline was modified to merge the ndb and network streams further upstream, on the multi-session stage, allowing the session subscription streams to be completely split between Ndb and the network. For the ndb stream, we still tear it down and bring it up along the app foreground state, to prevent memory crashes. However, the network stream is kept intact between sessions, since RelayPool will now automatically handle resubscription on websocket reconnection. This prevents complexity and potential race conditions that could lead to timeline staleness. Signed-off-by: Daniel D’Aquino --- .../SubscriptionManager.swift | 208 +++++++++++------- damus/Core/Nostr/RelayPool.swift | 1 + 2 files changed, 127 insertions(+), 82 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index aaf0d0c7c..1fdc7ba5b 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -112,64 +112,7 @@ extension NostrNetworkManager { } } - /// Subscribes to data from the user's relays func advancedStream(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, timeout: Duration? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { - return AsyncStream { continuation in - let subscriptionId = id ?? UUID() - let startTime = CFAbsoluteTimeGetCurrent() - Self.logger.info("Starting subscription \(subscriptionId.uuidString, privacy: .public): \(filters.debugDescription, privacy: .private)") - let multiSessionStreamingTask = Task { - while !Task.isCancelled { - do { - guard !self.ndb.is_closed else { - Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Ndb closed. Sleeping for 1 second before resuming.") - try await Task.sleep(nanoseconds: 1_000_000_000) - continue - } - guard self.pool.open else { - Self.logger.info("\(subscriptionId.uuidString, privacy: .public): RelayPool closed. Sleeping for 1 second before resuming.") - try await Task.sleep(nanoseconds: 1_000_000_000) - continue - } - Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Streaming.") - for await item in self.sessionSubscribe(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { - try Task.checkCancellation() - continuation.yield(item) - } - Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Session subscription ended. Sleeping for 1 second before resuming.") - try await Task.sleep(nanoseconds: 1_000_000_000) - } - catch { - Self.logger.error("Session subscription \(subscriptionId.uuidString, privacy: .public): Error: \(error.localizedDescription, privacy: .public)") - } - } - Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Terminated.") - } - let timeoutTask = Task { - if let timeout { - try await Task.sleep(for: timeout) - continuation.finish() // End the stream due to timeout. - } - } - continuation.onTermination = { @Sendable _ in - Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Cancelled.") - multiSessionStreamingTask.cancel() - timeoutTask.cancel() - } - } - } - - /// Subscribes to data from the user's relays - /// - /// Only survives for a single session. This exits after the app is backgrounded - /// - /// ## Implementation notes - /// - /// - When we migrate to the local relay model, we should modify this function to stream directly from NostrDB - /// - /// - Parameter filters: The nostr filters to specify what kind of data to subscribe to - /// - Returns: An async stream of nostr data - private func sessionSubscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { let id = id ?? UUID() let streamMode = streamMode ?? defaultStreamMode() return AsyncStream { continuation in @@ -194,36 +137,66 @@ extension NostrNetworkManager { } } + let streamTask = Task { + while !Task.isCancelled { + for await item in self.multiSessionNetworkStream(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { + try Task.checkCancellation() + switch item { + case .event(let lender): + continuation.yield(item) + case .eose: + break // Should not happen + case .ndbEose: + break // Should not happen + case .networkEose: + continuation.yield(item) + networkEOSEIssued = true + yieldEOSEIfReady() + } + } + } + } + let ndbStreamTask = Task { - do { - for await item in try self.ndb.subscribe(filters: try filters.map({ try NdbFilter(from: $0) })) { + while !Task.isCancelled { + for await item in self.multiSessionNdbStream(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { try Task.checkCancellation() switch item { + case .event(let lender): + continuation.yield(item) case .eose: - Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Received EOSE from nostrdb. Elapsed: \(CFAbsoluteTimeGetCurrent() - startTime, format: .fixed(precision: 2), privacy: .public) seconds") - continuation.yield(.ndbEose) + break // Should not happen + case .ndbEose: + continuation.yield(item) ndbEOSEIssued = true yieldEOSEIfReady() - case .event(let noteKey): - let lender = NdbNoteLender(ndb: self.ndb, noteKey: noteKey) - try Task.checkCancellation() - guard let desiredRelays else { - continuation.yield(.event(lender: lender)) // If no desired relays are specified, return all notes we see. - break - } - if try ndb.was(noteKey: noteKey, seenOnAnyOf: desiredRelays) { - continuation.yield(.event(lender: lender)) // If desired relays were specified and this note was seen there, return it. - } + case .networkEose: + break // Should not happen } } } - catch { - Self.logger.error("Session subscription \(id.uuidString, privacy: .public): NDB streaming error: \(error.localizedDescription, privacy: .public)") - } - Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): NDB streaming ended") - continuation.finish() } + + continuation.onTermination = { @Sendable _ in + streamTask.cancel() + ndbStreamTask.cancel() + } + } + } + + private func multiSessionNetworkStream(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { + let id = id ?? UUID() + let streamMode = streamMode ?? defaultStreamMode() + return AsyncStream { continuation in + let startTime = CFAbsoluteTimeGetCurrent() + Self.logger.debug("Network subscription \(id.uuidString, privacy: .public): Started") + let streamTask = Task { + while !self.pool.open { + Self.logger.info("\(id.uuidString, privacy: .public): RelayPool closed. Sleeping for 1 second before resuming.") + try await Task.sleep(nanoseconds: 1_000_000_000) + continue + } do { for await item in self.pool.subscribe(filters: filters, to: desiredRelays, id: id) { // NO-OP. Notes will be automatically ingested by NostrDB @@ -243,27 +216,98 @@ extension NostrNetworkManager { case .eose: Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Received EOSE from the network. Elapsed: \(CFAbsoluteTimeGetCurrent() - startTime, format: .fixed(precision: 2), privacy: .public) seconds") continuation.yield(.networkEose) - networkEOSEIssued = true - yieldEOSEIfReady() } } } catch { - Self.logger.error("Session subscription \(id.uuidString, privacy: .public): Network streaming error: \(error.localizedDescription, privacy: .public)") + Self.logger.error("Network subscription \(id.uuidString, privacy: .public): Streaming error: \(error.localizedDescription, privacy: .public)") + } + Self.logger.debug("Network subscription \(id.uuidString, privacy: .public): Network streaming ended") + continuation.finish() + } + + continuation.onTermination = { @Sendable _ in + streamTask.cancel() + } + } + } + + private func multiSessionNdbStream(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { + return AsyncStream { continuation in + let subscriptionId = id ?? UUID() + let startTime = CFAbsoluteTimeGetCurrent() + Self.logger.info("Starting multi-session NDB subscription \(subscriptionId.uuidString, privacy: .public): \(filters.debugDescription, privacy: .private)") + let multiSessionStreamingTask = Task { + while !Task.isCancelled { + do { + guard !self.ndb.is_closed else { + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Ndb closed. Sleeping for 1 second before resuming.") + try await Task.sleep(nanoseconds: 1_000_000_000) + continue + } + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Streaming from NDB.") + for await item in self.sessionNdbStream(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { + try Task.checkCancellation() + continuation.yield(item) + } + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Session subscription ended. Sleeping for 1 second before resuming.") + try await Task.sleep(nanoseconds: 1_000_000_000) + } + catch { + Self.logger.error("Session subscription \(subscriptionId.uuidString, privacy: .public): Error: \(error.localizedDescription, privacy: .public)") + } + } + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Terminated.") + } + continuation.onTermination = { @Sendable _ in + Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Cancelled multi-session NDB stream.") + multiSessionStreamingTask.cancel() + } + } + } + + private func sessionNdbStream(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, streamMode: StreamMode? = nil, id: UUID? = nil) -> AsyncStream { + let id = id ?? UUID() + //let streamMode = streamMode ?? defaultStreamMode() + return AsyncStream { continuation in + let startTime = CFAbsoluteTimeGetCurrent() + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Started") + + let ndbStreamTask = Task { + do { + for await item in try self.ndb.subscribe(filters: try filters.map({ try NdbFilter(from: $0) })) { + try Task.checkCancellation() + switch item { + case .eose: + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Received EOSE from nostrdb. Elapsed: \(CFAbsoluteTimeGetCurrent() - startTime, format: .fixed(precision: 2), privacy: .public) seconds") + continuation.yield(.ndbEose) + case .event(let noteKey): + let lender = NdbNoteLender(ndb: self.ndb, noteKey: noteKey) + try Task.checkCancellation() + guard let desiredRelays else { + continuation.yield(.event(lender: lender)) // If no desired relays are specified, return all notes we see. + break + } + if try ndb.was(noteKey: noteKey, seenOnAnyOf: desiredRelays) { + continuation.yield(.event(lender: lender)) // If desired relays were specified and this note was seen there, return it. + } + } + } + } + catch { + Self.logger.error("Session subscription \(id.uuidString, privacy: .public): NDB streaming error: \(error.localizedDescription, privacy: .public)") } - Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Network streaming ended") + Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): NDB streaming ended") continuation.finish() } Task { // Add the ndb streaming task to the task manager so that it can be cancelled when the app is backgrounded let ndbStreamTaskId = await self.taskManager.add(task: ndbStreamTask) - let streamTaskId = await self.taskManager.add(task: streamTask) continuation.onTermination = { @Sendable _ in Task { await self.taskManager.cancelAndCleanUp(taskId: ndbStreamTaskId) - await self.taskManager.cancelAndCleanUp(taskId: streamTaskId) } } } diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 31a0f65f8..96226d164 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -444,6 +444,7 @@ class RelayPool { continue } + Log.debug("%s: Sending resubscribe request to %s", for: .networking, handler.sub_id, relayId.absoluteString) send(.subscribe(.init(filters: filters, sub_id: handler.sub_id)), to: [relayId], skip_ephemeral: shouldSkipEphemeralRelays) } } From 61eb83323976ad1e4f41a750e7adac312efc9540 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Sun, 5 Oct 2025 16:52:56 -0700 Subject: [PATCH 79/91] Add temporary experimental delay to check hypothesis on occasional init timeline staleness MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManager/SubscriptionManager.swift | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 1fdc7ba5b..aac1d9228 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -197,6 +197,14 @@ extension NostrNetworkManager { try await Task.sleep(nanoseconds: 1_000_000_000) continue } + + // FIXME: The delay below is to prevent race conditions when the RelayPool is initializing during the app start. + // Without this, occasionally there is a race condition that causes the subscribe call to be missed somehow + // despite mechanisms in place to queue up requests when relays are disconnected, as well as mechanisms to send subscribe requests when the relay is already connected. + // This is difficult to fix as it will require a big refactor in `RelayPool` to implement proper async/await mechanisms, instead of the current "fire and forget" interfaces. + // If this delay fixes the occasional timeline staleness when starting the app, it helps prove the hypothesis above. + try await Task.sleep(nanoseconds: 2_000_000_000) + do { for await item in self.pool.subscribe(filters: filters, to: desiredRelays, id: id) { // NO-OP. Notes will be automatically ingested by NostrDB From 01ec05ab32ac528b2eebed9f5c8ecefc462f6090 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 6 Oct 2025 11:33:45 -0700 Subject: [PATCH 80/91] Fix test build error MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManagerTests/NostrNetworkManagerTests.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift index ab0b3400a..7ea5dc193 100644 --- a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -45,7 +45,7 @@ class NostrNetworkManagerTests: XCTestCase { var receivedCount = 0 var eventIds: Set = [] Task { - for await item in self.damusState!.nostrNetwork.reader.subscribe(filters: [filter]) { + for await item in self.damusState!.nostrNetwork.reader.advancedStream(filters: [filter], streamMode: .ndbFirst) { switch item { case .event(let lender): try? lender.borrow { event in From 7691b48fb677e5a313188ffab5634e040e4275b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 6 Oct 2025 11:48:58 -0700 Subject: [PATCH 81/91] Fix testDecodingPayInvoiceRequest test failure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- nostrdb/NdbNote.swift | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nostrdb/NdbNote.swift b/nostrdb/NdbNote.swift index 25fa9e995..080048e16 100644 --- a/nostrdb/NdbNote.swift +++ b/nostrdb/NdbNote.swift @@ -290,9 +290,11 @@ class NdbNote: Codable, Equatable, Hashable { free(buf) return nil } - case .manual(_, let signature, _): + case .manual(_, let signature, let noteId): var raw_sig = signature.data.byteArray + var raw_id = noteId.id.byteArray ndb_builder_set_sig(&builder, &raw_sig) + ndb_builder_set_id(&builder, &raw_id) do { // Finalize note, save length, and ensure it is higher than zero (which signals finalization has succeeded) From 4f479d0280b32d554976416d6f454aa30e5c3f04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 6 Oct 2025 15:10:57 -0700 Subject: [PATCH 82/91] Fix RelayPool connection race condition without time delays MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This improves upon a temporary fix we had for the RelayPool race condition that would cause timeline staleness. The root cause was that during app launch, the HomeModel would subscribe to some filters, and the subscribe function would filter out any relays not yet connected to avoid unnecessary waiting for EOSEs from disconnected relays. However, that filtering would cause the subscribe request to not be queued up or sent back to the relays once connected, causing the relays to never receive those subscription requests and causing timeline staleness. This was fixed by separating the relay list used for the subcription request from the relay list used for waiting for network EOSEs. This allows other mechanisms to ensure the subscription will go through even when the app is initializing and relays are not yet fully connected. Fixes: 61eb83323976ad1e4f41a750e7adac312efc9540 Signed-off-by: Daniel D’Aquino --- .../NostrNetworkManager/SubscriptionManager.swift | 7 ------- damus/Core/Nostr/RelayPool.swift | 7 ++++--- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index aac1d9228..7ea4cfb6f 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -198,13 +198,6 @@ extension NostrNetworkManager { continue } - // FIXME: The delay below is to prevent race conditions when the RelayPool is initializing during the app start. - // Without this, occasionally there is a race condition that causes the subscribe call to be missed somehow - // despite mechanisms in place to queue up requests when relays are disconnected, as well as mechanisms to send subscribe requests when the relay is already connected. - // This is difficult to fix as it will require a big refactor in `RelayPool` to implement proper async/await mechanisms, instead of the current "fire and forget" interfaces. - // If this delay fixes the occasional timeline staleness when starting the app, it helps prove the hypothesis above. - try await Task.sleep(nanoseconds: 2_000_000_000) - do { for await item in self.pool.subscribe(filters: filters, to: desiredRelays, id: id) { // NO-OP. Notes will be automatically ingested by NostrDB diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 96226d164..867057448 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -259,7 +259,7 @@ class RelayPool { /// - Returns: Returns an async stream that callers can easily consume via a for-loop func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, eoseTimeout: Duration? = nil, id: UUID? = nil) -> AsyncStream { let eoseTimeout = eoseTimeout ?? .seconds(5) - let desiredRelays = desiredRelays ?? self.relays.filter({ $0.connection.isConnected }).map({ $0.descriptor.url }) + let desiredRelays = desiredRelays ?? self.relays.map({ $0.descriptor.url }) let startTime = CFAbsoluteTimeGetCurrent() return AsyncStream { continuation in let id = id ?? UUID() @@ -284,8 +284,9 @@ class RelayPool { break // We do not support handling these yet case .eose(_): relaysWhoFinishedInitialResults.insert(relayUrl) - Log.debug("RelayPool subscription %s: EOSE from %s. EOSE count: %d/%d. Elapsed: %.2f seconds.", for: .networking, id.uuidString, relayUrl.absoluteString, relaysWhoFinishedInitialResults.count, Set(desiredRelays).count, CFAbsoluteTimeGetCurrent() - startTime) - if relaysWhoFinishedInitialResults == Set(desiredRelays) { + let desiredAndConnectedRelays = desiredRelays ?? self.relays.filter({ $0.connection.isConnected }).map({ $0.descriptor.url }) + Log.debug("RelayPool subscription %s: EOSE from %s. EOSE count: %d/%d. Elapsed: %.2f seconds.", for: .networking, id.uuidString, relayUrl.absoluteString, relaysWhoFinishedInitialResults.count, Set(desiredAndConnectedRelays).count, CFAbsoluteTimeGetCurrent() - startTime) + if relaysWhoFinishedInitialResults == Set(desiredAndConnectedRelays) { continuation.yield(with: .success(.eose)) eoseSent = true } From 588ef46402211afc9050c12eb0f25ae43c0408ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 8 Oct 2025 10:20:10 -0700 Subject: [PATCH 83/91] Hide "Load new content" behind feature flag MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This feature is not production-ready, and is not essential for the current scope of work, so descoping it and hiding it behind a feature flag until it is ready. Changelog-Removed: Removed "Load new content" button Signed-off-by: Daniel D’Aquino --- damus/Features/Settings/Models/UserSettingsStore.swift | 4 ++++ damus/Features/Timeline/Views/PostingTimelineView.swift | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/damus/Features/Settings/Models/UserSettingsStore.swift b/damus/Features/Settings/Models/UserSettingsStore.swift index 92a15adef..9ebb62db0 100644 --- a/damus/Features/Settings/Models/UserSettingsStore.swift +++ b/damus/Features/Settings/Models/UserSettingsStore.swift @@ -249,6 +249,10 @@ class UserSettingsStore: ObservableObject { @Setting(key: "enable_experimental_local_relay_model", default_value: false) var enable_experimental_local_relay_model: Bool + /// Whether the app should present the experimental floating "Load new content" button + @Setting(key: "enable_experimental_load_new_content_button", default_value: false) + var enable_experimental_load_new_content_button: Bool + @StringSetting(key: "purple_environment", default_value: .production) var purple_enviroment: DamusPurpleEnvironment diff --git a/damus/Features/Timeline/Views/PostingTimelineView.swift b/damus/Features/Timeline/Views/PostingTimelineView.swift index 42985e611..fe116858c 100644 --- a/damus/Features/Timeline/Views/PostingTimelineView.swift +++ b/damus/Features/Timeline/Views/PostingTimelineView.swift @@ -96,7 +96,7 @@ struct PostingTimelineView: View { .ignoresSafeArea() } - if homeEvents.incoming.count > 0 { + if damus_state.settings.enable_experimental_load_new_content_button && homeEvents.incoming.count > 0 { Button( action: { notify(.scroll_to_top) From 9311a767c84290ace68b6dd12f702f6ade93ac91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 8 Oct 2025 10:38:08 -0700 Subject: [PATCH 84/91] Speed up quote reposts view loading MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes: https://github.com/damus-io/damus/issues/3252 Signed-off-by: Daniel D’Aquino --- damus/Features/Events/Models/EventsModel.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/damus/Features/Events/Models/EventsModel.swift b/damus/Features/Events/Models/EventsModel.swift index fcd94dc8a..c5fdc8b68 100644 --- a/damus/Features/Events/Models/EventsModel.swift +++ b/damus/Features/Events/Models/EventsModel.swift @@ -82,9 +82,9 @@ class EventsModel: ObservableObject { }) } case .eose: - DispatchQueue.main.async { self.loading = false } break outerLoop case .ndbEose: + DispatchQueue.main.async { self.loading = false } break case .networkEose: break From c80d4f146cc2526044c0e6d85f8b400cb1f2d566 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 8 Oct 2025 13:21:25 -0700 Subject: [PATCH 85/91] Unpublish incoming notes to prevent unnecessary redraws MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damus/Shared/Utilities/EventHolder.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/damus/Shared/Utilities/EventHolder.swift b/damus/Shared/Utilities/EventHolder.swift index f31e6c2b9..995a6c6ee 100644 --- a/damus/Shared/Utilities/EventHolder.swift +++ b/damus/Shared/Utilities/EventHolder.swift @@ -11,7 +11,7 @@ import Foundation class EventHolder: ObservableObject, ScrollQueue { private var has_event = Set() @Published var events: [NostrEvent] - @Published var incoming: [NostrEvent] + var incoming: [NostrEvent] private(set) var should_queue = false var on_queue: ((NostrEvent) -> Void)? From 70d0d9dacfdf41a03cf63c09ed6e12e44ab3f74d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 8 Oct 2025 15:10:47 -0700 Subject: [PATCH 86/91] Offload note filtering computations from the view body render function MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This attempts to improve the performance of InnerTimelineView by performing event filtering computations on "EventHolder.insert" instead of on each view body re-render, to improve SwiftUI performance. Signed-off-by: Daniel D’Aquino --- .../Timeline/Views/InnerTimelineView.swift | 18 ++++--- damus/Shared/Utilities/EventHolder.swift | 48 +++++++++++++++++++ 2 files changed, 60 insertions(+), 6 deletions(-) diff --git a/damus/Features/Timeline/Views/InnerTimelineView.swift b/damus/Features/Timeline/Views/InnerTimelineView.swift index 0e572f8fd..51c8f82d9 100644 --- a/damus/Features/Timeline/Views/InnerTimelineView.swift +++ b/damus/Features/Timeline/Views/InnerTimelineView.swift @@ -9,14 +9,18 @@ import SwiftUI struct InnerTimelineView: View { - @ObservedObject var events: EventHolder + var events: EventHolder + @ObservedObject var filteredEvents: EventHolder.FilteredHolder + var filteredEventHolderId: UUID let state: DamusState - let filter: (NostrEvent) -> Bool init(events: EventHolder, damus: DamusState, filter: @escaping (NostrEvent) -> Bool, apply_mute_rules: Bool = true) { self.events = events self.state = damus - self.filter = apply_mute_rules ? { filter($0) && !damus.mutelist_manager.is_event_muted($0) } : filter + let filter = apply_mute_rules ? { filter($0) && !damus.mutelist_manager.is_event_muted($0) } : filter + let filteredEvents = EventHolder.FilteredHolder(filter: filter) + self.filteredEvents = filteredEvents + self.filteredEventHolderId = events.add(filteredHolder: filteredEvents) } var event_options: EventViewOptions { @@ -29,12 +33,11 @@ struct InnerTimelineView: View { var body: some View { LazyVStack(spacing: 0) { - let events = self.events.events + let events = self.filteredEvents.events if events.isEmpty { EmptyTimelineView() } else { - let evs = events.filter(filter) - let indexed = Array(zip(evs, 0...)) + let indexed = Array(zip(events, 0...)) ForEach(indexed, id: \.0.id) { tup in let ev = tup.0 let ind = tup.1 @@ -62,6 +65,9 @@ struct InnerTimelineView: View { } } } + .onDisappear { + self.events.removeFilteredHolder(id: self.filteredEventHolderId) + } //.padding(.horizontal) } diff --git a/damus/Shared/Utilities/EventHolder.swift b/damus/Shared/Utilities/EventHolder.swift index 995a6c6ee..240261674 100644 --- a/damus/Shared/Utilities/EventHolder.swift +++ b/damus/Shared/Utilities/EventHolder.swift @@ -11,6 +11,7 @@ import Foundation class EventHolder: ObservableObject, ScrollQueue { private var has_event = Set() @Published var events: [NostrEvent] + var filteredHolders: [UUID: FilteredHolder] = [:] var incoming: [NostrEvent] private(set) var should_queue = false var on_queue: ((NostrEvent) -> Void)? @@ -58,6 +59,9 @@ class EventHolder: ObservableObject, ScrollQueue { if insert_uniq_sorted_event_created(events: &self.events, new_ev: ev) { return true } + for (id, filteredView) in self.filteredHolders { + filteredView.insert(event: ev) + } return false } @@ -85,6 +89,9 @@ class EventHolder: ObservableObject, ScrollQueue { if insert_uniq_sorted_event_created(events: &events, new_ev: event) { changed = true } + for (id, filteredHolder) in self.filteredHolders { + filteredHolder.insert(event: event) + } } if changed { @@ -100,5 +107,46 @@ class EventHolder: ObservableObject, ScrollQueue { func reset() { self.incoming = [] self.events = [] + for (id, filteredHolder) in filteredHolders { + filteredHolder.update(events: []) + } + } + + @MainActor + func add(filteredHolder: FilteredHolder) -> UUID { + let id = UUID() + self.filteredHolders[id] = filteredHolder + filteredHolder.update(events: self.events) + return id + } + + @MainActor + func removeFilteredHolder(id: UUID) { + self.filteredHolders[id] = nil + } + + class FilteredHolder: ObservableObject { + @Published private(set) var events: [NostrEvent] + let filter: (NostrEvent) -> Bool + + init(filter: @escaping (NostrEvent) -> Bool) { + self.events = [] + self.filter = filter + } + + func update(events: [NostrEvent]) { + self.events = events.filter(self.filter) + } + + func insert(event: NostrEvent) { + guard self.filter(event) else { return } + var changed = false + if insert_uniq_sorted_event_created(events: &events, new_ev: event) { + changed = true + } + if changed { + self.objectWillChange.send() + } + } } } From 05c02f7dc4047f6676ce044cf19a1ca2f2db9130 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 8 Oct 2025 15:51:59 -0700 Subject: [PATCH 87/91] Initialize AVPlayerItem on the background to avoid hitches MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- .../Shared/Media/Video/DamusVideoPlayer.swift | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/damus/Shared/Media/Video/DamusVideoPlayer.swift b/damus/Shared/Media/Video/DamusVideoPlayer.swift index b7da39867..31043adc5 100644 --- a/damus/Shared/Media/Video/DamusVideoPlayer.swift +++ b/damus/Shared/Media/Video/DamusVideoPlayer.swift @@ -108,10 +108,23 @@ import SwiftUI public init(url: URL) { self.url = url - self.player = AVPlayer(playerItem: AVPlayerItem(url: url)) + // Initialize with an empty player first + self.player = AVPlayer() self.video_size = nil - Task { await self.load() } + // Creating the player item is an expensive action. Create it on a background thread to avoid performance issues. + Task.detached(priority: TaskPriority.userInitiated) { + self.loadPlayerItem(url: url) + } + } + + nonisolated private func loadPlayerItem(url: URL) { + let playerItem = AVPlayerItem(url: url) + + DispatchQueue.main.async { + self.player.replaceCurrentItem(with: playerItem) + Task { await self.load() } + } } func reinitializePlayer() { @@ -122,12 +135,12 @@ import SwiftUI videoDurationObserver?.invalidate() videoIsPlayingObserver?.invalidate() - // Reset player - self.player = AVPlayer(playerItem: AVPlayerItem(url: url)) + // Initialize player with nil item first + self.player.replaceCurrentItem(with: nil) - // Load once again - Task { - await load() + // Creating the player item is an expensive action. Create it on a background thread to avoid performance issues. + Task.detached(priority: TaskPriority.userInitiated) { + self.loadPlayerItem(url: self.url) } } From 7c1594107f8a9feb6f4332b83525fa103ef1cb3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Wed, 8 Oct 2025 17:23:59 -0700 Subject: [PATCH 88/91] Perform LNURL computation on the background in EventActionBar MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is to reduce the amount of computation it takes to create the EventActionBar view Signed-off-by: Daniel D’Aquino --- .../Actions/ActionBar/Views/EventActionBar.swift | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/damus/Features/Actions/ActionBar/Views/EventActionBar.swift b/damus/Features/Actions/ActionBar/Views/EventActionBar.swift index 4b7c6eed2..7973b67d9 100644 --- a/damus/Features/Actions/ActionBar/Views/EventActionBar.swift +++ b/damus/Features/Actions/ActionBar/Views/EventActionBar.swift @@ -36,10 +36,17 @@ struct EventActionBar: View { self.swipe_context = swipe_context } - var lnurl: String? { - damus_state.profiles.lookup_with_timestamp(event.pubkey)?.map({ pr in + @State var lnurl: String? = nil + + // Fetching an LNURL is expensive enough that it can cause a hitch. Use a special backgroundable function to fetch the value. + // Fetch on `.onAppear` + nonisolated func fetchLNURL() { + let lnurl = damus_state.profiles.lookup_with_timestamp(event.pubkey)?.map({ pr in pr?.lnurl }).value + DispatchQueue.main.async { + self.lnurl = lnurl + } } var show_like: Bool { @@ -231,6 +238,9 @@ struct EventActionBar: View { self.content .onAppear { self.bar.update(damus: damus_state, evid: self.event.id) + Task.detached(priority: .background, operation: { + self.fetchLNURL() + }) } .sheet(isPresented: $show_share_action, onDismiss: { self.show_share_action = false }) { if #available(iOS 16.0, *) { From 991a4a86e61bc2aa1c921a279e11b27bcfab3852 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Fri, 10 Oct 2025 14:12:30 -0700 Subject: [PATCH 89/91] Move most of RelayPool away from the Main Thread MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is a large refactor that aims to improve performance by offloading RelayPool computations into a separate actor outside the main thread. This should reduce congestion on the main thread and thus improve UI performance. Also, the internal subscription callback mechanism was changed to use AsyncStreams to prevent race conditions newly found in that area of the code. Changelog-Fixed: Added performance improvements to timeline scrolling Signed-off-by: Daniel D’Aquino --- .../xcshareddata/xcschemes/damus.xcscheme | 1 - damus/ContentView.swift | 212 ++++++++-------- .../NostrNetworkManager.swift | 46 ++-- .../SubscriptionManager.swift | 6 +- .../UserRelayListManager.swift | 83 ++++--- damus/Core/Nostr/RelayConnection.swift | 28 +-- damus/Core/Nostr/RelayPool.swift | 226 +++++++++++------- .../ActionBar/Models/ActionBarModel.swift | 5 +- .../ActionBar/Views/EventActionBar.swift | 33 ++- .../ActionBar/Views/EventDetailBar.swift | 14 +- .../Actions/ActionBar/Views/ShareAction.swift | 16 +- .../Actions/Reports/Views/ReportView.swift | 6 +- .../Actions/Reposts/Views/RepostAction.swift | 14 +- damus/Features/Chat/ChatEventView.swift | 12 +- damus/Features/DMs/Views/DMChatView.swift | 6 +- damus/Features/Events/EventMenu.swift | 8 +- damus/Features/Events/EventView.swift | 2 +- damus/Features/Events/SelectedEventView.swift | 2 +- .../FollowPack/Models/FollowPackModel.swift | 2 +- damus/Features/Follows/Models/Contacts+.swift | 8 +- .../Muting/Models/MutedThreadsManager.swift | 2 +- .../Muting/Views/AddMuteItemView.swift | 2 +- .../Features/Muting/Views/MutelistView.swift | 6 +- .../Views/OnboardingSuggestionsView.swift | 2 +- .../Onboarding/Views/SaveKeysView.swift | 31 ++- .../Features/Posting/Models/DraftsModel.swift | 14 +- damus/Features/Posting/Models/PostBox.swift | 31 ++- damus/Features/Posting/Views/PostView.swift | 20 +- .../Profile/Views/EditMetadataView.swift | 10 +- .../Features/Profile/Views/ProfileView.swift | 2 +- .../Features/Relays/Views/AddRelayView.swift | 48 ++-- .../Search/Models/SearchHomeModel.swift | 2 +- .../Search/Views/SearchHeaderView.swift | 4 +- damus/Features/Search/Views/SearchView.swift | 4 +- .../Features/Settings/Views/ConfigView.swift | 6 +- .../Settings/Views/FirstAidSettingsView.swift | 4 +- .../Status/Views/UserStatusSheet.swift | 20 +- .../Features/Timeline/Models/HomeModel.swift | 12 +- .../Models/WalletConnect/WalletConnect+.swift | 12 +- .../Features/Wallet/Models/WalletModel.swift | 2 +- damus/Features/Wallet/Views/NWCSettings.swift | 2 +- .../Wallet/Views/SendPaymentView.swift | 22 +- damus/Features/Zaps/Models/Zaps.swift | 2 +- damus/Features/Zaps/Views/NoteZapButton.swift | 4 +- damus/Notify/Notify.swift | 4 +- .../Notify/PresentFullScreenItemNotify.swift | 2 +- .../ActionViewController.swift | 16 +- nostrdb/UnownedNdbNote.swift | 14 +- nostrscript/NostrScript.swift | 13 +- share extension/ShareViewController.swift | 10 +- 50 files changed, 602 insertions(+), 451 deletions(-) diff --git a/damus.xcodeproj/xcshareddata/xcschemes/damus.xcscheme b/damus.xcodeproj/xcshareddata/xcschemes/damus.xcscheme index b59a98036..5ff8e186e 100644 --- a/damus.xcodeproj/xcshareddata/xcschemes/damus.xcscheme +++ b/damus.xcodeproj/xcshareddata/xcschemes/damus.xcscheme @@ -55,7 +55,6 @@ buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" - enableAddressSanitizer = "YES" launchStyle = "0" useCustomWorkingDirectory = "NO" ignoresPersistentStateOnLaunch = "NO" diff --git a/damus/ContentView.swift b/damus/ContentView.swift index 46c468c1f..7799fb837 100644 --- a/damus/ContentView.swift +++ b/damus/ContentView.swift @@ -300,16 +300,18 @@ struct ContentView: View { .ignoresSafeArea(.keyboard) .edgesIgnoringSafeArea(hide_bar ? [.bottom] : []) .onAppear() { - self.connect() - try? AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: .default, options: .mixWithOthers) - setup_notifications() - if !hasSeenOnboardingSuggestions || damus_state!.settings.always_show_onboarding_suggestions { - active_sheet = .onboardingSuggestions - hasSeenOnboardingSuggestions = true - } - self.appDelegate?.state = damus_state - Task { // We probably don't need this to be a detached task. According to https://docs.swift.org/swift-book/documentation/the-swift-programming-language/concurrency/#Defining-and-Calling-Asynchronous-Functions, awaits are only suspension points that do not block the thread. - await self.listenAndHandleLocalNotifications() + Task { + await self.connect() + try? AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playback, mode: .default, options: .mixWithOthers) + setup_notifications() + if !hasSeenOnboardingSuggestions || damus_state!.settings.always_show_onboarding_suggestions { + active_sheet = .onboardingSuggestions + hasSeenOnboardingSuggestions = true + } + self.appDelegate?.state = damus_state + Task { // We probably don't need this to be a detached task. According to https://docs.swift.org/swift-book/documentation/the-swift-programming-language/concurrency/#Defining-and-Calling-Asynchronous-Functions, awaits are only suspension points that do not block the thread. + await self.listenAndHandleLocalNotifications() + } } } .sheet(item: $active_sheet) { item in @@ -371,7 +373,7 @@ struct ContentView: View { self.hide_bar = !show } .onReceive(timer) { n in - self.damus_state?.nostrNetwork.postbox.try_flushing_events() + Task{ await self.damus_state?.nostrNetwork.postbox.try_flushing_events() } self.damus_state!.profiles.profile_data(self.damus_state!.pubkey).status.try_expire() } .onReceive(handle_notify(.report)) { target in @@ -382,45 +384,47 @@ struct ContentView: View { self.confirm_mute = true } .onReceive(handle_notify(.attached_wallet)) { nwc in - try? damus_state.nostrNetwork.userRelayList.load() // Reload relay list to apply changes - - // update the lightning address on our profile when we attach a - // wallet with an associated - guard let ds = self.damus_state, - let lud16 = nwc.lud16, - let keypair = ds.keypair.to_full(), - let profile_txn = ds.profiles.lookup(id: ds.pubkey), - let profile = profile_txn.unsafeUnownedValue, - lud16 != profile.lud16 else { - return - } - - // clear zapper cache for old lud16 - if profile.lud16 != nil { - // TODO: should this be somewhere else, where we process profile events!? - invalidate_zapper_cache(pubkey: keypair.pubkey, profiles: ds.profiles, lnurl: ds.lnurls) + Task { + try? await damus_state.nostrNetwork.userRelayList.load() // Reload relay list to apply changes + + // update the lightning address on our profile when we attach a + // wallet with an associated + guard let ds = self.damus_state, + let lud16 = nwc.lud16, + let keypair = ds.keypair.to_full(), + let profile_txn = ds.profiles.lookup(id: ds.pubkey), + let profile = profile_txn.unsafeUnownedValue, + lud16 != profile.lud16 else { + return + } + + // clear zapper cache for old lud16 + if profile.lud16 != nil { + // TODO: should this be somewhere else, where we process profile events!? + invalidate_zapper_cache(pubkey: keypair.pubkey, profiles: ds.profiles, lnurl: ds.lnurls) + } + + let prof = Profile(name: profile.name, display_name: profile.display_name, about: profile.about, picture: profile.picture, banner: profile.banner, website: profile.website, lud06: profile.lud06, lud16: lud16, nip05: profile.nip05, damus_donation: profile.damus_donation, reactions: profile.reactions) + + guard let ev = make_metadata_event(keypair: keypair, metadata: prof) else { return } + await ds.nostrNetwork.postbox.send(ev) } - - let prof = Profile(name: profile.name, display_name: profile.display_name, about: profile.about, picture: profile.picture, banner: profile.banner, website: profile.website, lud06: profile.lud06, lud16: lud16, nip05: profile.nip05, damus_donation: profile.damus_donation, reactions: profile.reactions) - - guard let ev = make_metadata_event(keypair: keypair, metadata: prof) else { return } - ds.nostrNetwork.postbox.send(ev) } .onReceive(handle_notify(.broadcast)) { ev in guard let ds = self.damus_state else { return } - ds.nostrNetwork.postbox.send(ev) + Task { await ds.nostrNetwork.postbox.send(ev) } } .onReceive(handle_notify(.unfollow)) { target in guard let state = self.damus_state else { return } - _ = handle_unfollow(state: state, unfollow: target.follow_ref) + Task { _ = await handle_unfollow(state: state, unfollow: target.follow_ref) } } .onReceive(handle_notify(.unfollowed)) { unfollow in home.resubscribe(.unfollowing(unfollow)) } .onReceive(handle_notify(.follow)) { target in guard let state = self.damus_state else { return } - handle_follow_notif(state: state, target: target) + Task { await handle_follow_notif(state: state, target: target) } } .onReceive(handle_notify(.followed)) { _ in home.resubscribe(.following) @@ -431,8 +435,10 @@ struct ContentView: View { return } - if !handle_post_notification(keypair: keypair, postbox: state.nostrNetwork.postbox, events: state.events, post: post) { - self.active_sheet = nil + Task { + if await !handle_post_notification(keypair: keypair, postbox: state.nostrNetwork.postbox, events: state.events, post: post) { + self.active_sheet = nil + } } } .onReceive(handle_notify(.new_mutes)) { _ in @@ -475,7 +481,7 @@ struct ContentView: View { } } .onReceive(handle_notify(.disconnect_relays)) { () in - damus_state.nostrNetwork.disconnectRelays() + Task { await damus_state.nostrNetwork.disconnectRelays() } } .onReceive(NotificationCenter.default.publisher(for: UIApplication.willEnterForegroundNotification)) { obj in print("txn: 📙 DAMUS ACTIVE NOTIFY") @@ -540,27 +546,29 @@ struct ContentView: View { damusClosingTask = nil damus_state.ndb.reopen() // Pinging the network will automatically reconnect any dead websocket connections - damus_state.nostrNetwork.ping() + await damus_state.nostrNetwork.ping() } @unknown default: break } } .onReceive(handle_notify(.onlyzaps_mode)) { hide in - home.filter_events() - - guard let ds = damus_state, - let profile_txn = ds.profiles.lookup(id: ds.pubkey), - let profile = profile_txn.unsafeUnownedValue, - let keypair = ds.keypair.to_full() - else { - return + Task { + home.filter_events() + + guard let ds = damus_state, + let profile_txn = ds.profiles.lookup(id: ds.pubkey), + let profile = profile_txn.unsafeUnownedValue, + let keypair = ds.keypair.to_full() + else { + return + } + + let prof = Profile(name: profile.name, display_name: profile.display_name, about: profile.about, picture: profile.picture, banner: profile.banner, website: profile.website, lud06: profile.lud06, lud16: profile.lud16, nip05: profile.nip05, damus_donation: profile.damus_donation, reactions: !hide) + + guard let profile_ev = make_metadata_event(keypair: keypair, metadata: prof) else { return } + await ds.nostrNetwork.postbox.send(profile_ev) } - - let prof = Profile(name: profile.name, display_name: profile.display_name, about: profile.about, picture: profile.picture, banner: profile.banner, website: profile.website, lud06: profile.lud06, lud16: profile.lud16, nip05: profile.nip05, damus_donation: profile.damus_donation, reactions: !hide) - - guard let profile_ev = make_metadata_event(keypair: keypair, metadata: prof) else { return } - ds.nostrNetwork.postbox.send(profile_ev) } .alert(NSLocalizedString("User muted", comment: "Alert message to indicate the user has been muted"), isPresented: $user_muted_confirm, actions: { Button(NSLocalizedString("Thanks!", comment: "Button to close out of alert that informs that the action to muted a user was successful.")) { @@ -583,20 +591,22 @@ struct ContentView: View { } Button(NSLocalizedString("Yes, Overwrite", comment: "Text of button that confirms to overwrite the existing mutelist.")) { - guard let ds = damus_state, - let keypair = ds.keypair.to_full(), - let muting, - let mutelist = create_or_update_mutelist(keypair: keypair, mprev: nil, to_add: muting) - else { - return + Task { + guard let ds = damus_state, + let keypair = ds.keypair.to_full(), + let muting, + let mutelist = create_or_update_mutelist(keypair: keypair, mprev: nil, to_add: muting) + else { + return + } + + ds.mutelist_manager.set_mutelist(mutelist) + await ds.nostrNetwork.postbox.send(mutelist) + + confirm_overwrite_mutelist = false + confirm_mute = false + user_muted_confirm = true } - - ds.mutelist_manager.set_mutelist(mutelist) - ds.nostrNetwork.postbox.send(mutelist) - - confirm_overwrite_mutelist = false - confirm_mute = false - user_muted_confirm = true } }, message: { Text("No mute list found, create a new one? This will overwrite any previous mute lists.", comment: "Alert message prompt that asks if the user wants to create a new mute list, overwriting previous mute lists.") @@ -624,7 +634,7 @@ struct ContentView: View { } ds.mutelist_manager.set_mutelist(ev) - ds.nostrNetwork.postbox.send(ev) + Task { await ds.nostrNetwork.postbox.send(ev) } } } }, message: { @@ -676,7 +686,7 @@ struct ContentView: View { self.execute_open_action(openAction) } - func connect() { + func connect() async { // nostrdb var mndb = Ndb() if mndb == nil { @@ -698,7 +708,7 @@ struct ContentView: View { let settings = UserSettingsStore.globally_load_for(pubkey: pubkey) - let new_relay_filters = load_relay_filters(pubkey) == nil + let new_relay_filters = await load_relay_filters(pubkey) == nil self.damus_state = DamusState(keypair: keypair, likes: EventCounter(our_pubkey: pubkey), @@ -756,7 +766,7 @@ struct ContentView: View { Log.error("Failed to configure tips: %s", for: .tips, error.localizedDescription) } } - damus_state.nostrNetwork.connect() + await damus_state.nostrNetwork.connect() // TODO: Move this to a better spot. Not sure what is the best signal to listen to for sending initial filters DispatchQueue.main.asyncAfter(deadline: .now() + 0.25, execute: { self.home.send_initial_filters() @@ -764,26 +774,28 @@ struct ContentView: View { } func music_changed(_ state: MusicState) { - guard let damus_state else { return } - switch state { - case .playback_state: - break - case .song(let song): - guard let song, let kp = damus_state.keypair.to_full() else { return } - - let pdata = damus_state.profiles.profile_data(damus_state.pubkey) - - let desc = "\(song.title ?? "Unknown") - \(song.artist ?? "Unknown")" - let encodedDesc = desc.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) - let url = encodedDesc.flatMap { enc in - URL(string: "spotify:search:\(enc)") + Task { + guard let damus_state else { return } + switch state { + case .playback_state: + break + case .song(let song): + guard let song, let kp = damus_state.keypair.to_full() else { return } + + let pdata = damus_state.profiles.profile_data(damus_state.pubkey) + + let desc = "\(song.title ?? "Unknown") - \(song.artist ?? "Unknown")" + let encodedDesc = desc.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) + let url = encodedDesc.flatMap { enc in + URL(string: "spotify:search:\(enc)") + } + let music = UserStatus(type: .music, expires_at: Date.now.addingTimeInterval(song.playbackDuration), content: desc, created_at: UInt32(Date.now.timeIntervalSince1970), url: url) + + pdata.status.music = music + + guard let ev = music.to_note(keypair: kp) else { return } + await damus_state.nostrNetwork.postbox.send(ev) } - let music = UserStatus(type: .music, expires_at: Date.now.addingTimeInterval(song.playbackDuration), content: desc, created_at: UInt32(Date.now.timeIntervalSince1970), url: url) - - pdata.status.music = music - - guard let ev = music.to_note(keypair: kp) else { return } - damus_state.nostrNetwork.postbox.send(ev) } } @@ -935,7 +947,7 @@ func update_filters_with_since(last_of_kind: [UInt32: NostrEvent], filters: [Nos } } - +@MainActor func setup_notifications() { this_app.registerForRemoteNotifications() let center = UNUserNotificationCenter.current() @@ -992,14 +1004,14 @@ func timeline_name(_ timeline: Timeline?) -> String { } @discardableResult -func handle_unfollow(state: DamusState, unfollow: FollowRef) -> Bool { +func handle_unfollow(state: DamusState, unfollow: FollowRef) async -> Bool { guard let keypair = state.keypair.to_full() else { return false } let old_contacts = state.contacts.event - guard let ev = unfollow_reference(postbox: state.nostrNetwork.postbox, our_contacts: old_contacts, keypair: keypair, unfollow: unfollow) + guard let ev = await unfollow_reference(postbox: state.nostrNetwork.postbox, our_contacts: old_contacts, keypair: keypair, unfollow: unfollow) else { return false } @@ -1020,12 +1032,12 @@ func handle_unfollow(state: DamusState, unfollow: FollowRef) -> Bool { } @discardableResult -func handle_follow(state: DamusState, follow: FollowRef) -> Bool { +func handle_follow(state: DamusState, follow: FollowRef) async -> Bool { guard let keypair = state.keypair.to_full() else { return false } - guard let ev = follow_reference(box: state.nostrNetwork.postbox, our_contacts: state.contacts.event, keypair: keypair, follow: follow) + guard let ev = await follow_reference(box: state.nostrNetwork.postbox, our_contacts: state.contacts.event, keypair: keypair, follow: follow) else { return false } @@ -1045,7 +1057,7 @@ func handle_follow(state: DamusState, follow: FollowRef) -> Bool { } @discardableResult -func handle_follow_notif(state: DamusState, target: FollowTarget) -> Bool { +func handle_follow_notif(state: DamusState, target: FollowTarget) async -> Bool { switch target { case .pubkey(let pk): state.contacts.add_friend_pubkey(pk) @@ -1053,10 +1065,10 @@ func handle_follow_notif(state: DamusState, target: FollowTarget) -> Bool { state.contacts.add_friend_contact(ev) } - return handle_follow(state: state, follow: target.follow_ref) + return await handle_follow(state: state, follow: target.follow_ref) } -func handle_post_notification(keypair: FullKeypair, postbox: PostBox, events: EventCache, post: NostrPostResult) -> Bool { +func handle_post_notification(keypair: FullKeypair, postbox: PostBox, events: EventCache, post: NostrPostResult) async -> Bool { switch post { case .post(let post): //let post = tup.0 @@ -1065,17 +1077,17 @@ func handle_post_notification(keypair: FullKeypair, postbox: PostBox, events: Ev guard let new_ev = post.to_event(keypair: keypair) else { return false } - postbox.send(new_ev) + await postbox.send(new_ev) for eref in new_ev.referenced_ids.prefix(3) { // also broadcast at most 3 referenced events if let ev = events.lookup(eref) { - postbox.send(ev) + await postbox.send(ev) } } for qref in new_ev.referenced_quote_ids.prefix(3) { // also broadcast at most 3 referenced quoted events if let ev = events.lookup(qref.note_id) { - postbox.send(ev) + await postbox.send(ev) } } return true diff --git a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift index c9b3a393a..287693a2e 100644 --- a/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/NostrNetworkManager.swift @@ -50,18 +50,18 @@ class NostrNetworkManager { // MARK: - Control and lifecycle functions /// Connects the app to the Nostr network - func connect() { - self.userRelayList.connect() // Will load the user's list, apply it, and get RelayPool to connect to it. - Task { await self.profilesManager.load() } + func connect() async { + await self.userRelayList.connect() // Will load the user's list, apply it, and get RelayPool to connect to it. + await self.profilesManager.load() } - func disconnectRelays() { - self.pool.disconnect() + func disconnectRelays() async { + await self.pool.disconnect() } func handleAppBackgroundRequest() async { await self.reader.cancelAllTasks() - self.pool.cleanQueuedRequestForSessionEnd() + await self.pool.cleanQueuedRequestForSessionEnd() } func close() async { @@ -75,18 +75,19 @@ class NostrNetworkManager { } // But await on each one to prevent race conditions for await value in group { continue } - pool.close() + await pool.close() } } - func ping() { - self.pool.ping() + func ping() async { + await self.pool.ping() } - func relaysForEvent(event: NostrEvent) -> [RelayURL] { + @MainActor + func relaysForEvent(event: NostrEvent) async -> [RelayURL] { // TODO(tyiu) Ideally this list would be sorted by the event author's outbox relay preferences // and reliability of relays to maximize chances of others finding this event. - if let relays = pool.seen[event.id] { + if let relays = await pool.seen[event.id] { return Array(relays) } @@ -103,30 +104,35 @@ class NostrNetworkManager { /// - This is also to help us migrate to the relay model. // TODO: Define a better interface. This is a temporary scaffold to replace direct relay pool access. After that is done, we can refactor this interface to be cleaner and reduce non-sense. - func sendToNostrDB(event: NostrEvent) { - self.pool.send_raw_to_local_ndb(.typical(.event(event))) + func sendToNostrDB(event: NostrEvent) async { + await self.pool.send_raw_to_local_ndb(.typical(.event(event))) } - func send(event: NostrEvent, to targetRelays: [RelayURL]? = nil, skipEphemeralRelays: Bool = true) { - self.pool.send(.event(event), to: targetRelays, skip_ephemeral: skipEphemeralRelays) + func send(event: NostrEvent, to targetRelays: [RelayURL]? = nil, skipEphemeralRelays: Bool = true) async { + await self.pool.send(.event(event), to: targetRelays, skip_ephemeral: skipEphemeralRelays) } + @MainActor func getRelay(_ id: RelayURL) -> RelayPool.Relay? { pool.get_relay(id) } + @MainActor var connectedRelays: [RelayPool.Relay] { self.pool.relays } + @MainActor var ourRelayDescriptors: [RelayPool.RelayDescriptor] { self.pool.our_descriptors } - func relayURLsThatSawNote(id: NoteId) -> Set? { - return self.pool.seen[id] + @MainActor + func relayURLsThatSawNote(id: NoteId) async -> Set? { + return await self.pool.seen[id] } + @MainActor func determineToRelays(filters: RelayFilters) -> [RelayURL] { return self.pool.our_descriptors .map { $0.url } @@ -137,8 +143,8 @@ class NostrNetworkManager { // TODO: Move this to NWCManager @discardableResult - func nwcPay(url: WalletConnectURL, post: PostBox, invoice: String, delay: TimeInterval? = 5.0, on_flush: OnFlush? = nil, zap_request: NostrEvent? = nil) -> NostrEvent? { - WalletConnect.pay(url: url, pool: self.pool, post: post, invoice: invoice, zap_request: nil) + func nwcPay(url: WalletConnectURL, post: PostBox, invoice: String, delay: TimeInterval? = 5.0, on_flush: OnFlush? = nil, zap_request: NostrEvent? = nil) async -> NostrEvent? { + await WalletConnect.pay(url: url, pool: self.pool, post: post, invoice: invoice, zap_request: nil) } /// Send a donation zap to the Damus team @@ -154,7 +160,7 @@ class NostrNetworkManager { } print("damus-donation donating...") - WalletConnect.pay(url: nwc, pool: self.pool, post: self.postbox, invoice: invoice, zap_request: nil, delay: nil) + await WalletConnect.pay(url: nwc, pool: self.pool, post: self.postbox, invoice: invoice, zap_request: nil, delay: nil) } } diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index 7ea4cfb6f..d08de04fb 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -192,14 +192,14 @@ extension NostrNetworkManager { Self.logger.debug("Network subscription \(id.uuidString, privacy: .public): Started") let streamTask = Task { - while !self.pool.open { + while await !self.pool.open { Self.logger.info("\(id.uuidString, privacy: .public): RelayPool closed. Sleeping for 1 second before resuming.") try await Task.sleep(nanoseconds: 1_000_000_000) continue } do { - for await item in self.pool.subscribe(filters: filters, to: desiredRelays, id: id) { + for await item in await self.pool.subscribe(filters: filters, to: desiredRelays, id: id) { // NO-OP. Notes will be automatically ingested by NostrDB // TODO: Improve efficiency of subscriptions? try Task.checkCancellation() @@ -333,7 +333,7 @@ extension NostrNetworkManager { } // Not available in local ndb, stream from network - outerLoop: for await item in self.pool.subscribe(filters: [NostrFilter(ids: [noteId], limit: 1)], to: targetRelays, eoseTimeout: timeout) { + outerLoop: for await item in await self.pool.subscribe(filters: [NostrFilter(ids: [noteId], limit: 1)], to: targetRelays, eoseTimeout: timeout) { switch item { case .event(let event): return NdbNoteLender(ownedNdbNote: event) diff --git a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift index 104b2a925..83fc96cad 100644 --- a/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/UserRelayListManager.swift @@ -122,68 +122,68 @@ extension NostrNetworkManager { // MARK: - Listening to and handling relay updates from the network - func connect() { - self.load() + func connect() async { + await self.load() self.relayListObserverTask?.cancel() self.relayListObserverTask = Task { await self.listenAndHandleRelayUpdates() } self.walletUpdatesObserverTask?.cancel() - self.walletUpdatesObserverTask = handle_notify(.attached_wallet).sink { _ in self.load() } + self.walletUpdatesObserverTask = handle_notify(.attached_wallet).sink { _ in Task { await self.load() } } } func listenAndHandleRelayUpdates() async { let filter = NostrFilter(kinds: [.relay_list], authors: [delegate.keypair.pubkey]) for await noteLender in self.reader.streamIndefinitely(filters: [filter]) { let currentRelayListCreationDate = self.getUserCurrentRelayListCreationDate() - try? noteLender.borrow({ note in + try? await noteLender.borrow({ note in guard note.pubkey == self.delegate.keypair.pubkey else { return } // Ensure this new list was ours guard note.createdAt > (currentRelayListCreationDate ?? 0) else { return } // Ensure this is a newer list guard let relayList = try? NIP65.RelayList(event: note) else { return } // Ensure it is a valid NIP-65 list - try? self.set(userRelayList: relayList) // Set the validated list + try? await self.set(userRelayList: relayList) // Set the validated list }) } } // MARK: - Editing the user's relay list - func upsert(relay: NIP65.RelayList.RelayItem, force: Bool = false, overwriteExisting: Bool = false) throws(UpdateError) { + func upsert(relay: NIP65.RelayList.RelayItem, force: Bool = false, overwriteExisting: Bool = false) async throws(UpdateError) { guard let currentUserRelayList = force ? self.getBestEffortRelayList() : self.getUserCurrentRelayList() else { throw .noInitialRelayList } guard !currentUserRelayList.relays.keys.contains(relay.url) || overwriteExisting else { throw .relayAlreadyExists } var newList = currentUserRelayList.relays newList[relay.url] = relay - try self.set(userRelayList: NIP65.RelayList(relays: Array(newList.values))) + try await self.set(userRelayList: NIP65.RelayList(relays: Array(newList.values))) } - func insert(relay: NIP65.RelayList.RelayItem, force: Bool = false) throws(UpdateError) { + func insert(relay: NIP65.RelayList.RelayItem, force: Bool = false) async throws(UpdateError) { guard let currentUserRelayList = force ? self.getBestEffortRelayList() : self.getUserCurrentRelayList() else { throw .noInitialRelayList } guard currentUserRelayList.relays[relay.url] == nil else { throw .relayAlreadyExists } - try self.upsert(relay: relay, force: force) + try await self.upsert(relay: relay, force: force) } - func remove(relayURL: RelayURL, force: Bool = false) throws(UpdateError) { + func remove(relayURL: RelayURL, force: Bool = false) async throws(UpdateError) { guard let currentUserRelayList = force ? self.getBestEffortRelayList() : self.getUserCurrentRelayList() else { throw .noInitialRelayList } guard currentUserRelayList.relays.keys.contains(relayURL) || force else { throw .noSuchRelay } var newList = currentUserRelayList.relays newList[relayURL] = nil - try self.set(userRelayList: NIP65.RelayList(relays: Array(newList.values))) + try await self.set(userRelayList: NIP65.RelayList(relays: Array(newList.values))) } - func set(userRelayList: NIP65.RelayList) throws(UpdateError) { + func set(userRelayList: NIP65.RelayList) async throws(UpdateError) { guard let fullKeypair = delegate.keypair.to_full() else { throw .notAuthorizedToChangeRelayList } guard let relayListEvent = userRelayList.toNostrEvent(keypair: fullKeypair) else { throw .cannotFormRelayListEvent } - self.apply(newRelayList: self.computeRelaysToConnectTo(with: userRelayList)) + await self.apply(newRelayList: self.computeRelaysToConnectTo(with: userRelayList)) - self.pool.send(.event(relayListEvent)) // This will send to NostrDB as well, which will locally save that NIP-65 event + await self.pool.send(.event(relayListEvent)) // This will send to NostrDB as well, which will locally save that NIP-65 event self.delegate.latestRelayListEventIdHex = relayListEvent.id.hex() // Make sure we are able to recall this event from NostrDB } // MARK: - Syncing our saved user relay list with the active `RelayPool` /// Loads the current user relay list - func load() { - self.apply(newRelayList: self.relaysToConnectTo()) + func load() async { + await self.apply(newRelayList: self.relaysToConnectTo()) } /// Loads a new relay list into the active relay pool, making sure it matches the specified relay list. @@ -197,7 +197,8 @@ extension NostrNetworkManager { /// /// - This is `private` because syncing the user's saved relay list with the relay pool is `NostrNetworkManager`'s responsibility, /// so we do not want other classes to forcibly load this. - private func apply(newRelayList: [RelayPool.RelayDescriptor]) { + @MainActor + private func apply(newRelayList: [RelayPool.RelayDescriptor]) async { let currentRelayList = self.pool.relays.map({ $0.descriptor }) var changed = false @@ -217,31 +218,37 @@ extension NostrNetworkManager { let relaysToRemove = currentRelayURLs.subtracting(newRelayURLs) let relaysToAdd = newRelayURLs.subtracting(currentRelayURLs) - // Remove relays not in the new list - relaysToRemove.forEach { url in - pool.remove_relay(url) - changed = true - } + await withTaskGroup { taskGroup in + // Remove relays not in the new list + relaysToRemove.forEach { url in + taskGroup.addTask(operation: { await self.pool.remove_relay(url) }) + changed = true + } - // Add new relays from the new list - relaysToAdd.forEach { url in - guard let descriptor = newRelayList.first(where: { $0.url == url }) else { return } - add_new_relay( - model_cache: delegate.relayModelCache, - relay_filters: delegate.relayFilters, - pool: pool, - descriptor: descriptor, - new_relay_filters: new_relay_filters, - logging_enabled: delegate.developerMode - ) - changed = true + // Add new relays from the new list + relaysToAdd.forEach { url in + guard let descriptor = newRelayList.first(where: { $0.url == url }) else { return } + taskGroup.addTask(operation: { + await add_new_relay( + model_cache: self.delegate.relayModelCache, + relay_filters: self.delegate.relayFilters, + pool: self.pool, + descriptor: descriptor, + new_relay_filters: new_relay_filters, + logging_enabled: self.delegate.developerMode + ) + }) + changed = true + } + + for await value in taskGroup { continue } } // Always tell RelayPool to connect whether or not we are already connected. // This is because: // 1. Internally it won't redo the connection because of internal checks // 2. Even if the relay list has not changed, relays may have been disconnected from app lifecycle or other events - pool.connect() + await pool.connect() if changed { notify(.relays_changed) @@ -281,8 +288,8 @@ fileprivate extension NIP65.RelayList { /// - descriptor: The description of the relay being added /// - new_relay_filters: Whether to insert new relay filters /// - logging_enabled: Whether logging is enabled -fileprivate func add_new_relay(model_cache: RelayModelCache, relay_filters: RelayFilters, pool: RelayPool, descriptor: RelayPool.RelayDescriptor, new_relay_filters: Bool, logging_enabled: Bool) { - try? pool.add_relay(descriptor) +fileprivate func add_new_relay(model_cache: RelayModelCache, relay_filters: RelayFilters, pool: RelayPool, descriptor: RelayPool.RelayDescriptor, new_relay_filters: Bool, logging_enabled: Bool) async { + try? await pool.add_relay(descriptor) let url = descriptor.url let relay_id = url @@ -300,7 +307,7 @@ fileprivate func add_new_relay(model_cache: RelayModelCache, relay_filters: Rela model_cache.insert(model: model) if logging_enabled { - pool.setLog(model.log, for: relay_id) + Task { await pool.setLog(model.log, for: relay_id) } } // if this is the first time adding filters, we should filter non-paid relays diff --git a/damus/Core/Nostr/RelayConnection.swift b/damus/Core/Nostr/RelayConnection.swift index 987e1bd5a..608c9ea72 100644 --- a/damus/Core/Nostr/RelayConnection.swift +++ b/damus/Core/Nostr/RelayConnection.swift @@ -48,13 +48,13 @@ final class RelayConnection: ObservableObject { private lazy var socket = WebSocket(relay_url.url) private var subscriptionToken: AnyCancellable? - private var handleEvent: (NostrConnectionEvent) -> () + private var handleEvent: (NostrConnectionEvent) async -> () private var processEvent: (WebSocketEvent) -> () private let relay_url: RelayURL var log: RelayLog? init(url: RelayURL, - handleEvent: @escaping (NostrConnectionEvent) -> (), + handleEvent: @escaping (NostrConnectionEvent) async -> (), processUnverifiedWSEvent: @escaping (WebSocketEvent) -> ()) { self.relay_url = url @@ -95,12 +95,12 @@ final class RelayConnection: ObservableObject { .sink { [weak self] completion in switch completion { case .failure(let error): - self?.receive(event: .error(error)) + Task { await self?.receive(event: .error(error)) } case .finished: - self?.receive(event: .disconnected(.normalClosure, nil)) + Task { await self?.receive(event: .disconnected(.normalClosure, nil)) } } } receiveValue: { [weak self] event in - self?.receive(event: event) + Task { await self?.receive(event: event) } } socket.connect() @@ -138,7 +138,7 @@ final class RelayConnection: ObservableObject { } } - private func receive(event: WebSocketEvent) { + private func receive(event: WebSocketEvent) async { assert(!Thread.isMainThread, "This code must not be executed on the main thread") processEvent(event) switch event { @@ -149,7 +149,7 @@ final class RelayConnection: ObservableObject { self.isConnecting = false } case .message(let message): - self.receive(message: message) + await self.receive(message: message) case .disconnected(let closeCode, let reason): if closeCode != .normalClosure { Log.error("⚠️ Warning: RelayConnection (%d) closed with code: %s", for: .networking, String(describing: closeCode), String(describing: reason)) @@ -176,10 +176,8 @@ final class RelayConnection: ObservableObject { self.reconnect_with_backoff() } } - DispatchQueue.main.async { - guard let ws_connection_event = NostrConnectionEvent.WSConnectionEvent.from(full_ws_event: event) else { return } - self.handleEvent(.ws_connection_event(ws_connection_event)) - } + guard let ws_connection_event = NostrConnectionEvent.WSConnectionEvent.from(full_ws_event: event) else { return } + await self.handleEvent(.ws_connection_event(ws_connection_event)) if let description = event.description { log?.add(description) @@ -213,21 +211,19 @@ final class RelayConnection: ObservableObject { } } - private func receive(message: URLSessionWebSocketTask.Message) { + private func receive(message: URLSessionWebSocketTask.Message) async { switch message { case .string(let messageString): // NOTE: Once we switch to the local relay model, // we will not need to verify nostr events at this point. if let ev = decode_and_verify_nostr_response(txt: messageString) { - DispatchQueue.main.async { - self.handleEvent(.nostr_event(ev)) - } + await self.handleEvent(.nostr_event(ev)) return } print("failed to decode event \(messageString)") case .data(let messageData): if let messageString = String(data: messageData, encoding: .utf8) { - receive(message: .string(messageString)) + await receive(message: .string(messageString)) } @unknown default: print("An unexpected URLSessionWebSocketTask.Message was received.") diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 867057448..8cf8e4343 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -12,7 +12,7 @@ struct RelayHandler { let sub_id: String let filters: [NostrFilter]? let to: [RelayURL]? - var callback: (RelayURL, NostrConnectionEvent) -> () + var handler: AsyncStream<(RelayURL, NostrConnectionEvent)>.Continuation } struct QueuedRequest { @@ -27,7 +27,8 @@ struct SeenEvent: Hashable { } /// Establishes and manages connections and subscriptions to a list of relays. -class RelayPool { +actor RelayPool { + @MainActor private(set) var relays: [Relay] = [] var open: Bool = false var handlers: [RelayHandler] = [] @@ -50,65 +51,86 @@ class RelayPool { /// This is to avoid error states and undefined behaviour related to hitting subscription limits on the relays, by letting those wait instead — with the principle that although slower is not ideal, it is better than completely broken. static let MAX_CONCURRENT_SUBSCRIPTION_LIMIT = 14 // This number is only an educated guess based on some local experiments. - func close() { - disconnect() - relays = [] + func close() async { + await disconnect() + await clearRelays() open = false handlers = [] request_queue = [] - seen.removeAll() + await clearSeen() counts = [:] keypair = nil } + + @MainActor + private func clearRelays() { + relays = [] + } + + private func clearSeen() { + seen.removeAll() + } init(ndb: Ndb, keypair: Keypair? = nil) { self.ndb = ndb self.keypair = keypair network_monitor.pathUpdateHandler = { [weak self] path in - if (path.status == .satisfied || path.status == .requiresConnection) && self?.last_network_status != path.status { - DispatchQueue.main.async { - self?.connect_to_disconnected() - } - } - - if let self, path.status != self.last_network_status { - for relay in self.relays { - relay.connection.log?.add("Network state: \(path.status)") - } - } - - self?.last_network_status = path.status + Task { await self?.pathUpdateHandler(path: path) } } network_monitor.start(queue: network_monitor_queue) } + private func pathUpdateHandler(path: NWPath) async { + if (path.status == .satisfied || path.status == .requiresConnection) && self.last_network_status != path.status { + await self.connect_to_disconnected() + } + + if path.status != self.last_network_status { + for relay in await self.relays { + relay.connection.log?.add("Network state: \(path.status)") + } + } + + self.last_network_status = path.status + } + + @MainActor var our_descriptors: [RelayDescriptor] { return all_descriptors.filter { d in !d.ephemeral } } + @MainActor var all_descriptors: [RelayDescriptor] { relays.map { r in r.descriptor } } + @MainActor var num_connected: Int { return relays.reduce(0) { n, r in n + (r.connection.isConnected ? 1 : 0) } } func remove_handler(sub_id: String) { - self.handlers = handlers.filter { $0.sub_id != sub_id } + self.handlers = handlers.filter { + if $0.sub_id != sub_id { + return true + } + else { + $0.handler.finish() + return false + } + } Log.debug("Removing %s handler, current: %d", for: .networking, sub_id, handlers.count) } - func ping() { - Log.info("Pinging %d relays", for: .networking, relays.count) - for relay in relays { + func ping() async { + Log.info("Pinging %d relays", for: .networking, await relays.count) + for relay in await relays { relay.connection.ping() } } - @MainActor - func register_handler(sub_id: String, filters: [NostrFilter]?, to relays: [RelayURL]? = nil, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) async { + func register_handler(sub_id: String, filters: [NostrFilter]?, to relays: [RelayURL]? = nil, handler: AsyncStream<(RelayURL, NostrConnectionEvent)>.Continuation) async { while handlers.count > Self.MAX_CONCURRENT_SUBSCRIPTION_LIMIT { Log.debug("%s: Too many subscriptions, waiting for subscription pool to clear", for: .networking, sub_id) try? await Task.sleep(for: .seconds(1)) @@ -117,20 +139,22 @@ class RelayPool { handlers = handlers.filter({ handler in if handler.sub_id == sub_id { Log.error("Duplicate handler detected for the same subscription ID. Overriding.", for: .networking) + handler.handler.finish() return false } else { return true } }) - self.handlers.append(RelayHandler(sub_id: sub_id, filters: filters, to: relays, callback: handler)) + self.handlers.append(RelayHandler(sub_id: sub_id, filters: filters, to: relays, handler: handler)) Log.debug("Registering %s handler, current: %d", for: .networking, sub_id, self.handlers.count) } - func remove_relay(_ relay_id: RelayURL) { + @MainActor + func remove_relay(_ relay_id: RelayURL) async { var i: Int = 0 - self.disconnect(to: [relay_id]) + await self.disconnect(to: [relay_id]) for relay in relays { if relay.id == relay_id { @@ -143,13 +167,13 @@ class RelayPool { } } - func add_relay(_ desc: RelayDescriptor) throws(RelayError) { + func add_relay(_ desc: RelayDescriptor) async throws(RelayError) { let relay_id = desc.url - if get_relay(relay_id) != nil { + if await get_relay(relay_id) != nil { throw RelayError.RelayAlreadyExists } let conn = RelayConnection(url: desc.url, handleEvent: { event in - self.handle_event(relay_id: relay_id, event: event) + await self.handle_event(relay_id: relay_id, event: event) }, processUnverifiedWSEvent: { wsev in guard case .message(let msg) = wsev, case .string(let str) = msg @@ -159,19 +183,24 @@ class RelayPool { self.message_received_function?((str, desc)) }) let relay = Relay(descriptor: desc, connection: conn) + await self.appendRelayToList(relay: relay) + } + + @MainActor + private func appendRelayToList(relay: Relay) { self.relays.append(relay) } - func setLog(_ log: RelayLog, for relay_id: RelayURL) { + func setLog(_ log: RelayLog, for relay_id: RelayURL) async { // add the current network state to the log log.add("Network state: \(network_monitor.currentPath.status)") - get_relay(relay_id)?.connection.log = log + await get_relay(relay_id)?.connection.log = log } /// This is used to retry dead connections - func connect_to_disconnected() { - for relay in relays { + func connect_to_disconnected() async { + for relay in await relays { let c = relay.connection let is_connecting = c.isConnecting @@ -188,16 +217,16 @@ class RelayPool { } } - func reconnect(to: [RelayURL]? = nil) { - let relays = to.map{ get_relays($0) } ?? self.relays + func reconnect(to targetRelays: [RelayURL]? = nil) async { + let relays = await getRelays(targetRelays: targetRelays) for relay in relays { // don't try to reconnect to broken relays relay.connection.reconnect() } } - func connect(to: [RelayURL]? = nil) { - let relays = to.map{ get_relays($0) } ?? self.relays + func connect(to targetRelays: [RelayURL]? = nil) async { + let relays = await getRelays(targetRelays: targetRelays) for relay in relays { relay.connection.connect() } @@ -205,15 +234,20 @@ class RelayPool { open = true } - func disconnect(to: [RelayURL]? = nil) { + func disconnect(to targetRelays: [RelayURL]? = nil) async { // Mark as closed first, to prevent other classes from pulling data while the relays are being disconnected open = false - let relays = to.map{ get_relays($0) } ?? self.relays + let relays = await getRelays(targetRelays: targetRelays) for relay in relays { relay.connection.disconnect() } } + @MainActor + func getRelays(targetRelays: [RelayURL]? = nil) -> [Relay] { + targetRelays.map{ get_relays($0) } ?? self.relays + } + /// Deletes queued up requests that should not persist between app sessions (i.e. when the app goes to background then back to foreground) func cleanQueuedRequestForSessionEnd() { request_queue = request_queue.filter { request in @@ -231,14 +265,14 @@ class RelayPool { } } - func unsubscribe(sub_id: String, to: [RelayURL]? = nil) { + func unsubscribe(sub_id: String, to: [RelayURL]? = nil) async { if to == nil { self.remove_handler(sub_id: sub_id) } - self.send(.unsubscribe(sub_id), to: to) + await self.send(.unsubscribe(sub_id), to: to) } - func subscribe(sub_id: String, filters: [NostrFilter], handler: @escaping (RelayURL, NostrConnectionEvent) -> (), to: [RelayURL]? = nil) { + func subscribe(sub_id: String, filters: [NostrFilter], handler: AsyncStream<(RelayURL, NostrConnectionEvent)>.Continuation, to: [RelayURL]? = nil) { Task { await register_handler(sub_id: sub_id, filters: filters, to: to, handler: handler) @@ -246,7 +280,7 @@ class RelayPool { // When the caller specifies specific relays, do not skip ephemeral relays to respect the exact list given by the caller. let shouldSkipEphemeralRelays = to == nil ? true : false - send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to, skip_ephemeral: shouldSkipEphemeralRelays) + await send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to, skip_ephemeral: shouldSkipEphemeralRelays) } } @@ -257,9 +291,9 @@ class RelayPool { /// - desiredRelays: The desired relays which to subsctibe to. If `nil`, it defaults to the `RelayPool`'s default list /// - eoseTimeout: The maximum timeout which to give up waiting for the eoseSignal /// - Returns: Returns an async stream that callers can easily consume via a for-loop - func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, eoseTimeout: Duration? = nil, id: UUID? = nil) -> AsyncStream { + func subscribe(filters: [NostrFilter], to desiredRelays: [RelayURL]? = nil, eoseTimeout: Duration? = nil, id: UUID? = nil) async -> AsyncStream { let eoseTimeout = eoseTimeout ?? .seconds(5) - let desiredRelays = desiredRelays ?? self.relays.map({ $0.descriptor.url }) + let desiredRelays = await getRelays(targetRelays: desiredRelays) let startTime = CFAbsoluteTimeGetCurrent() return AsyncStream { continuation in let id = id ?? UUID() @@ -267,34 +301,40 @@ class RelayPool { var seenEvents: Set = [] var relaysWhoFinishedInitialResults: Set = [] var eoseSent = false - self.subscribe(sub_id: sub_id, filters: filters, handler: { (relayUrl, connectionEvent) in - switch connectionEvent { - case .ws_connection_event(let ev): - // Websocket events such as connect/disconnect/error are already handled in `RelayConnection`. Do not perform any handling here. - // For the future, perhaps we should abstract away `.ws_connection_event` in `RelayPool`? Seems like something to be handled on the `RelayConnection` layer. - break - case .nostr_event(let nostrResponse): - guard nostrResponse.subid == sub_id else { return } // Do not stream items that do not belong in this subscription - switch nostrResponse { - case .event(_, let nostrEvent): - if seenEvents.contains(nostrEvent.id) { break } // Don't send two of the same events. - continuation.yield(with: .success(.event(nostrEvent))) - seenEvents.insert(nostrEvent.id) - case .notice(let note): - break // We do not support handling these yet - case .eose(_): - relaysWhoFinishedInitialResults.insert(relayUrl) - let desiredAndConnectedRelays = desiredRelays ?? self.relays.filter({ $0.connection.isConnected }).map({ $0.descriptor.url }) - Log.debug("RelayPool subscription %s: EOSE from %s. EOSE count: %d/%d. Elapsed: %.2f seconds.", for: .networking, id.uuidString, relayUrl.absoluteString, relaysWhoFinishedInitialResults.count, Set(desiredAndConnectedRelays).count, CFAbsoluteTimeGetCurrent() - startTime) - if relaysWhoFinishedInitialResults == Set(desiredAndConnectedRelays) { - continuation.yield(with: .success(.eose)) - eoseSent = true + let upstreamStream = AsyncStream<(RelayURL, NostrConnectionEvent)> { upstreamContinuation in + self.subscribe(sub_id: sub_id, filters: filters, handler: upstreamContinuation, to: desiredRelays.map({ $0.descriptor.url })) + } + let upstreamStreamingTask = Task { + for await (relayUrl, connectionEvent) in upstreamStream { + try Task.checkCancellation() + switch connectionEvent { + case .ws_connection_event(let ev): + // Websocket events such as connect/disconnect/error are already handled in `RelayConnection`. Do not perform any handling here. + // For the future, perhaps we should abstract away `.ws_connection_event` in `RelayPool`? Seems like something to be handled on the `RelayConnection` layer. + break + case .nostr_event(let nostrResponse): + guard nostrResponse.subid == sub_id else { return } // Do not stream items that do not belong in this subscription + switch nostrResponse { + case .event(_, let nostrEvent): + if seenEvents.contains(nostrEvent.id) { break } // Don't send two of the same events. + continuation.yield(with: .success(.event(nostrEvent))) + seenEvents.insert(nostrEvent.id) + case .notice(let note): + break // We do not support handling these yet + case .eose(_): + relaysWhoFinishedInitialResults.insert(relayUrl) + let desiredAndConnectedRelays = desiredRelays.filter({ $0.connection.isConnected }).map({ $0.descriptor.url }) + Log.debug("RelayPool subscription %s: EOSE from %s. EOSE count: %d/%d. Elapsed: %.2f seconds.", for: .networking, id.uuidString, relayUrl.absoluteString, relaysWhoFinishedInitialResults.count, Set(desiredAndConnectedRelays).count, CFAbsoluteTimeGetCurrent() - startTime) + if relaysWhoFinishedInitialResults == Set(desiredAndConnectedRelays) { + continuation.yield(with: .success(.eose)) + eoseSent = true + } + case .ok(_): break // No need to handle this, we are not sending an event to the relay + case .auth(_): break // Handled in a separate function in RelayPool } - case .ok(_): break // No need to handle this, we are not sending an event to the relay - case .auth(_): break // Handled in a separate function in RelayPool } } - }, to: desiredRelays) + } let timeoutTask = Task { try? await Task.sleep(for: eoseTimeout) if !eoseSent { continuation.yield(with: .success(.eose)) } @@ -308,9 +348,12 @@ class RelayPool { @unknown default: break } - self.unsubscribe(sub_id: sub_id, to: desiredRelays) - self.remove_handler(sub_id: sub_id) + Task { + await self.unsubscribe(sub_id: sub_id, to: desiredRelays.map({ $0.descriptor.url })) + await self.remove_handler(sub_id: sub_id) + } timeoutTask.cancel() + upstreamStreamingTask.cancel() } } } @@ -322,11 +365,11 @@ class RelayPool { case eose } - func subscribe_to(sub_id: String, filters: [NostrFilter], to: [RelayURL]?, handler: @escaping (RelayURL, NostrConnectionEvent) -> ()) { + func subscribe_to(sub_id: String, filters: [NostrFilter], to: [RelayURL]?, handler: AsyncStream<(RelayURL, NostrConnectionEvent)>.Continuation) { Task { await register_handler(sub_id: sub_id, filters: filters, to: to, handler: handler) - send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) + await send(.subscribe(.init(filters: filters, sub_id: sub_id)), to: to) } } @@ -341,7 +384,6 @@ class RelayPool { return c } - @MainActor func queue_req(r: NostrRequestType, relay: RelayURL, skip_ephemeral: Bool) { let count = count_queued(relay: relay) guard count <= 10 else { @@ -365,8 +407,8 @@ class RelayPool { } } - func send_raw(_ req: NostrRequestType, to: [RelayURL]? = nil, skip_ephemeral: Bool = true) { - let relays = to.map{ get_relays($0) } ?? self.relays + func send_raw(_ req: NostrRequestType, to: [RelayURL]? = nil, skip_ephemeral: Bool = true) async { + let relays = await getRelays(targetRelays: to) self.send_raw_to_local_ndb(req) // Always send Nostr events and data to NostrDB for a local copy @@ -394,15 +436,17 @@ class RelayPool { } } - func send(_ req: NostrRequest, to: [RelayURL]? = nil, skip_ephemeral: Bool = true) { - send_raw(.typical(req), to: to, skip_ephemeral: skip_ephemeral) + func send(_ req: NostrRequest, to: [RelayURL]? = nil, skip_ephemeral: Bool = true) async { + await send_raw(.typical(req), to: to, skip_ephemeral: skip_ephemeral) } + @MainActor func get_relays(_ ids: [RelayURL]) -> [Relay] { // don't include ephemeral relays in the default list to query relays.filter { ids.contains($0.id) } } + @MainActor func get_relay(_ id: RelayURL) -> Relay? { relays.first(where: { $0.id == id }) } @@ -415,7 +459,7 @@ class RelayPool { } print("running queueing request: \(req.req) for \(relay_id)") - self.send_raw(req.req, to: [relay_id], skip_ephemeral: false) + Task { await self.send_raw(req.req, to: [relay_id], skip_ephemeral: false) } } } @@ -432,7 +476,7 @@ class RelayPool { } } - func resubscribeAll(relayId: RelayURL) { + func resubscribeAll(relayId: RelayURL) async { for handler in self.handlers { guard let filters = handler.filters else { continue } // When the caller specifies no relays, it is implied that the user wants to use the ones in the user relay list. Skip ephemeral relays in that case. @@ -446,11 +490,11 @@ class RelayPool { } Log.debug("%s: Sending resubscribe request to %s", for: .networking, handler.sub_id, relayId.absoluteString) - send(.subscribe(.init(filters: filters, sub_id: handler.sub_id)), to: [relayId], skip_ephemeral: shouldSkipEphemeralRelays) + await send(.subscribe(.init(filters: filters, sub_id: handler.sub_id)), to: [relayId], skip_ephemeral: shouldSkipEphemeralRelays) } } - func handle_event(relay_id: RelayURL, event: NostrConnectionEvent) { + func handle_event(relay_id: RelayURL, event: NostrConnectionEvent) async { record_seen(relay_id: relay_id, event: event) // When we reconnect, do two things @@ -459,20 +503,20 @@ class RelayPool { if case .ws_connection_event(let ws) = event { if case .connected = ws { run_queue(relay_id) - self.resubscribeAll(relayId: relay_id) + await self.resubscribeAll(relayId: relay_id) } } // Handle auth if case let .nostr_event(nostrResponse) = event, case let .auth(challenge_string) = nostrResponse { - if let relay = get_relay(relay_id) { + if let relay = await get_relay(relay_id) { print("received auth request from \(relay.descriptor.url.id)") relay.authentication_state = .pending if let keypair { if let fullKeypair = keypair.to_full() { if let authRequest = make_auth_request(keypair: fullKeypair, challenge_string: challenge_string, relay: relay) { - send(.auth(authRequest), to: [relay_id], skip_ephemeral: false) + await send(.auth(authRequest), to: [relay_id], skip_ephemeral: false) relay.authentication_state = .verified } else { print("failed to make auth request") @@ -491,13 +535,13 @@ class RelayPool { } for handler in handlers { - handler.callback(relay_id, event) + handler.handler.yield((relay_id, event)) } } } -func add_rw_relay(_ pool: RelayPool, _ url: RelayURL) { - try? pool.add_relay(RelayPool.RelayDescriptor(url: url, info: .readWrite)) +func add_rw_relay(_ pool: RelayPool, _ url: RelayURL) async { + try? await pool.add_relay(RelayPool.RelayDescriptor(url: url, info: .readWrite)) } diff --git a/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift b/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift index 7c9b06d9a..a6c4cfc7e 100644 --- a/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift +++ b/damus/Features/Actions/ActionBar/Models/ActionBarModel.swift @@ -46,7 +46,8 @@ class ActionBarModel: ObservableObject { self.relays = relays } - func update(damus: DamusState, evid: NoteId) { + @MainActor + func update(damus: DamusState, evid: NoteId) async { self.likes = damus.likes.counts[evid] ?? 0 self.boosts = damus.boosts.counts[evid] ?? 0 self.zaps = damus.zaps.event_counts[evid] ?? 0 @@ -58,7 +59,7 @@ class ActionBarModel: ObservableObject { self.our_zap = damus.zaps.our_zaps[evid]?.first self.our_reply = damus.replies.our_reply(evid) self.our_quote_repost = damus.quote_reposts.our_events[evid] - self.relays = (damus.nostrNetwork.relayURLsThatSawNote(id: evid) ?? []).count + self.relays = (await damus.nostrNetwork.relayURLsThatSawNote(id: evid) ?? []).count self.objectWillChange.send() } diff --git a/damus/Features/Actions/ActionBar/Views/EventActionBar.swift b/damus/Features/Actions/ActionBar/Views/EventActionBar.swift index 7973b67d9..7bbe2a6a2 100644 --- a/damus/Features/Actions/ActionBar/Views/EventActionBar.swift +++ b/damus/Features/Actions/ActionBar/Views/EventActionBar.swift @@ -89,8 +89,10 @@ struct EventActionBar: View { var like_swipe_button: some View { SwipeAction(image: "shaka", backgroundColor: DamusColors.adaptableGrey) { - send_like(emoji: damus_state.settings.default_emoji_reaction) - self.swipe_context?.state.wrappedValue = .closed + Task { + await send_like(emoji: damus_state.settings.default_emoji_reaction) + self.swipe_context?.state.wrappedValue = .closed + } } .swipeButtonStyle() .accessibilityLabel(NSLocalizedString("React with default reaction emoji", comment: "Accessibility label for react button")) @@ -138,7 +140,7 @@ struct EventActionBar: View { if bar.liked { //notify(.delete, bar.our_like) } else { - send_like(emoji: emoji) + Task { await send_like(emoji: emoji) } } } @@ -225,8 +227,15 @@ struct EventActionBar: View { } } - var event_relay_url_strings: [RelayURL] { - let relays = damus_state.nostrNetwork.relaysForEvent(event: event) + @State var event_relay_url_strings: [RelayURL] = [] + + func updateEventRelayURLStrings() async { + let newValue = await fetchEventRelayURLStrings() + self.event_relay_url_strings = newValue + } + + func fetchEventRelayURLStrings() async -> [RelayURL] { + let relays = await damus_state.nostrNetwork.relaysForEvent(event: event) if !relays.isEmpty { return relays.prefix(Constants.MAX_SHARE_RELAYS).map { $0 } } @@ -237,9 +246,10 @@ struct EventActionBar: View { var body: some View { self.content .onAppear { - self.bar.update(damus: damus_state, evid: self.event.id) Task.detached(priority: .background, operation: { + await self.bar.update(damus: damus_state, evid: self.event.id) self.fetchLNURL() + await self.updateEventRelayURLStrings() }) } .sheet(isPresented: $show_share_action, onDismiss: { self.show_share_action = false }) { @@ -268,7 +278,10 @@ struct EventActionBar: View { } .onReceive(handle_notify(.update_stats)) { target in guard target == self.event.id else { return } - self.bar.update(damus: self.damus_state, evid: target) + Task { + await self.bar.update(damus: self.damus_state, evid: target) + await self.updateEventRelayURLStrings() + } } .onReceive(handle_notify(.liked)) { liked in if liked.id != event.id { @@ -281,9 +294,9 @@ struct EventActionBar: View { } } - func send_like(emoji: String) { + func send_like(emoji: String) async { guard let keypair = damus_state.keypair.to_full(), - let like_ev = make_like_event(keypair: keypair, liked: event, content: emoji, relayURL: damus_state.nostrNetwork.relaysForEvent(event: event).first) else { + let like_ev = await make_like_event(keypair: keypair, liked: event, content: emoji, relayURL: damus_state.nostrNetwork.relaysForEvent(event: event).first) else { return } @@ -291,7 +304,7 @@ struct EventActionBar: View { generator.impactOccurred() - damus_state.nostrNetwork.postbox.send(like_ev) + await damus_state.nostrNetwork.postbox.send(like_ev) } // MARK: Helper structures diff --git a/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift b/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift index 604c4e306..f3f5a3afd 100644 --- a/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift +++ b/damus/Features/Actions/ActionBar/Views/EventDetailBar.swift @@ -13,6 +13,7 @@ struct EventDetailBar: View { let target_pk: Pubkey @ObservedObject var bar: ActionBarModel + @State var relays: [RelayURL] = [] init(state: DamusState, target: NoteId, target_pk: Pubkey) { self.state = state @@ -61,7 +62,6 @@ struct EventDetailBar: View { } if bar.relays > 0 { - let relays = Array(state.nostrNetwork.relayURLsThatSawNote(id: target) ?? []) NavigationLink(value: Route.UserRelays(relays: relays)) { let nounString = pluralizedString(key: "relays_count", count: bar.relays) let noun = Text(nounString).foregroundColor(.gray) @@ -70,6 +70,18 @@ struct EventDetailBar: View { .buttonStyle(PlainButtonStyle()) } } + .onAppear { + Task { await self.updateSeenRelays() } + } + .onReceive(handle_notify(.update_stats)) { noteId in + guard noteId == target else { return } + Task { await self.updateSeenRelays() } + } + } + + func updateSeenRelays() async { + let relays = await Array(state.nostrNetwork.relayURLsThatSawNote(id: target) ?? []) + self.relays = relays } } diff --git a/damus/Features/Actions/ActionBar/Views/ShareAction.swift b/damus/Features/Actions/ActionBar/Views/ShareAction.swift index c3a1309ab..2e040469c 100644 --- a/damus/Features/Actions/ActionBar/Views/ShareAction.swift +++ b/damus/Features/Actions/ActionBar/Views/ShareAction.swift @@ -27,8 +27,15 @@ struct ShareAction: View { self._show_share = show_share } - var event_relay_url_strings: [RelayURL] { - let relays = userProfile.damus.nostrNetwork.relaysForEvent(event: event) + @State var event_relay_url_strings: [RelayURL] = [] + + func updateEventRelayURLStrings() async { + let newValue = await fetchEventRelayURLStrings() + self.event_relay_url_strings = newValue + } + + func fetchEventRelayURLStrings() async -> [RelayURL] { + let relays = await userProfile.damus.nostrNetwork.relaysForEvent(event: event) if !relays.isEmpty { return relays.prefix(Constants.MAX_SHARE_RELAYS).map { $0 } } @@ -80,8 +87,13 @@ struct ShareAction: View { } } } + .onReceive(handle_notify(.update_stats), perform: { noteId in + guard noteId == event.id else { return } + Task { await self.updateEventRelayURLStrings() } + }) .onAppear() { userProfile.subscribeToFindRelays() + Task { await self.updateEventRelayURLStrings() } } .onDisappear() { userProfile.unsubscribeFindRelays() diff --git a/damus/Features/Actions/Reports/Views/ReportView.swift b/damus/Features/Actions/Reports/Views/ReportView.swift index 6d5fcc789..33c622ec2 100644 --- a/damus/Features/Actions/Reports/Views/ReportView.swift +++ b/damus/Features/Actions/Reports/Views/ReportView.swift @@ -57,13 +57,13 @@ struct ReportView: View { .padding() } - func do_send_report() { + func do_send_report() async { guard let selected_report_type, let ev = NostrEvent(content: report_message, keypair: keypair.to_keypair(), kind: 1984, tags: target.reportTags(type: selected_report_type)) else { return } - postbox.send(ev) + await postbox.send(ev) report_sent = true report_id = bech32_note_id(ev.id) @@ -116,7 +116,7 @@ struct ReportView: View { Section(content: { Button(send_report_button_text) { - do_send_report() + Task { await do_send_report() } } .disabled(selected_report_type == nil) }, footer: { diff --git a/damus/Features/Actions/Reposts/Views/RepostAction.swift b/damus/Features/Actions/Reposts/Views/RepostAction.swift index 4f7ac4dcf..8669ce2eb 100644 --- a/damus/Features/Actions/Reposts/Views/RepostAction.swift +++ b/damus/Features/Actions/Reposts/Views/RepostAction.swift @@ -19,13 +19,15 @@ struct RepostAction: View { Button { dismiss() - - guard let keypair = self.damus_state.keypair.to_full(), - let boost = make_boost_event(keypair: keypair, boosted: self.event, relayURL: damus_state.nostrNetwork.relaysForEvent(event: self.event).first) else { - return + + Task { + guard let keypair = self.damus_state.keypair.to_full(), + let boost = await make_boost_event(keypair: keypair, boosted: self.event, relayURL: damus_state.nostrNetwork.relaysForEvent(event: self.event).first) else { + return + } + + await damus_state.nostrNetwork.postbox.send(boost) } - - damus_state.nostrNetwork.postbox.send(boost) } label: { Label(NSLocalizedString("Repost", comment: "Button to repost a note"), image: "repost") .frame(maxWidth: .infinity, minHeight: 50, maxHeight: 50, alignment: .leading) diff --git a/damus/Features/Chat/ChatEventView.swift b/damus/Features/Chat/ChatEventView.swift index 530e1b6cd..c7df2fbfd 100644 --- a/damus/Features/Chat/ChatEventView.swift +++ b/damus/Features/Chat/ChatEventView.swift @@ -197,8 +197,10 @@ struct ChatEventView: View { } .onChange(of: selected_emoji) { newSelectedEmoji in if let newSelectedEmoji { - send_like(emoji: newSelectedEmoji.value) - popover_state = .closed + Task { + await send_like(emoji: newSelectedEmoji.value) + popover_state = .closed + } } } } @@ -233,9 +235,9 @@ struct ChatEventView: View { ) } - func send_like(emoji: String) { + func send_like(emoji: String) async { guard let keypair = damus_state.keypair.to_full(), - let like_ev = make_like_event(keypair: keypair, liked: event, content: emoji, relayURL: damus_state.nostrNetwork.relaysForEvent(event: event).first) else { + let like_ev = make_like_event(keypair: keypair, liked: event, content: emoji, relayURL: await damus_state.nostrNetwork.relaysForEvent(event: event).first) else { return } @@ -244,7 +246,7 @@ struct ChatEventView: View { let generator = UIImpactFeedbackGenerator(style: .medium) generator.impactOccurred() - damus_state.nostrNetwork.postbox.send(like_ev) + await damus_state.nostrNetwork.postbox.send(like_ev) } var action_bar: some View { diff --git a/damus/Features/DMs/Views/DMChatView.swift b/damus/Features/DMs/Views/DMChatView.swift index 39a749368..50d13ccce 100644 --- a/damus/Features/DMs/Views/DMChatView.swift +++ b/damus/Features/DMs/Views/DMChatView.swift @@ -108,7 +108,7 @@ struct DMChatView: View, KeyboardReadable { Button( role: .none, action: { - send_message() + Task { await send_message() } } ) { Label("", image: "send") @@ -124,7 +124,7 @@ struct DMChatView: View, KeyboardReadable { */ } - func send_message() { + func send_message() async { let tags = [["p", pubkey.hex()]] guard let post_blocks = parse_post_blocks(content: dms.draft)?.blocks else { return @@ -138,7 +138,7 @@ struct DMChatView: View, KeyboardReadable { dms.draft = "" - damus_state.nostrNetwork.postbox.send(dm) + await damus_state.nostrNetwork.postbox.send(dm) handle_incoming_dm(ev: dm, our_pubkey: damus_state.pubkey, dms: damus_state.dms, prev_events: NewEventsBits()) diff --git a/damus/Features/Events/EventMenu.swift b/damus/Features/Events/EventMenu.swift index 28691fdd2..2e2bd6b82 100644 --- a/damus/Features/Events/EventMenu.swift +++ b/damus/Features/Events/EventMenu.swift @@ -64,8 +64,8 @@ struct MenuItems: View { self.profileModel = profileModel } - var event_relay_url_strings: [RelayURL] { - let relays = damus_state.nostrNetwork.relaysForEvent(event: event) + func event_relay_url_strings() async -> [RelayURL] { + let relays = await damus_state.nostrNetwork.relaysForEvent(event: event) if !relays.isEmpty { return relays.prefix(Constants.MAX_SHARE_RELAYS).map { $0 } } @@ -88,7 +88,7 @@ struct MenuItems: View { } Button { - UIPasteboard.general.string = Bech32Object.encode(.nevent(NEvent(event: event, relays: event_relay_url_strings))) + Task { UIPasteboard.general.string = Bech32Object.encode(.nevent(NEvent(event: event, relays: await event_relay_url_strings()))) } } label: { Label(NSLocalizedString("Copy note ID", comment: "Context menu option for copying the ID of the note."), image: "note-book") } @@ -122,7 +122,7 @@ struct MenuItems: View { if let full_keypair = self.damus_state.keypair.to_full(), let new_mutelist_ev = toggle_from_mutelist(keypair: full_keypair, prev: damus_state.mutelist_manager.event, to_toggle: .thread(event.thread_id(), duration?.date_from_now)) { damus_state.mutelist_manager.set_mutelist(new_mutelist_ev) - damus_state.nostrNetwork.postbox.send(new_mutelist_ev) + Task { await damus_state.nostrNetwork.postbox.send(new_mutelist_ev) } } let muted = damus_state.mutelist_manager.is_event_muted(event) isMutedThread = muted diff --git a/damus/Features/Events/EventView.swift b/damus/Features/Events/EventView.swift index cb4d87277..9885fe600 100644 --- a/damus/Features/Events/EventView.swift +++ b/damus/Features/Events/EventView.swift @@ -106,7 +106,7 @@ func format_date(date: Date, time_style: DateFormatter.Style = .short) -> String func make_actionbar_model(ev: NoteId, damus: DamusState) -> ActionBarModel { let model = ActionBarModel.empty() - model.update(damus: damus, evid: ev) + Task { await model.update(damus: damus, evid: ev) } return model } diff --git a/damus/Features/Events/SelectedEventView.swift b/damus/Features/Events/SelectedEventView.swift index b107f1034..5cff475d3 100644 --- a/damus/Features/Events/SelectedEventView.swift +++ b/damus/Features/Events/SelectedEventView.swift @@ -74,7 +74,7 @@ struct SelectedEventView: View { } .onReceive(handle_notify(.update_stats)) { target in guard target == self.event.id else { return } - self.bar.update(damus: self.damus, evid: target) + Task { await self.bar.update(damus: self.damus, evid: target) } } .compositingGroup() } diff --git a/damus/Features/FollowPack/Models/FollowPackModel.swift b/damus/Features/FollowPack/Models/FollowPackModel.swift index a66e669b3..87c6bf901 100644 --- a/damus/Features/FollowPack/Models/FollowPackModel.swift +++ b/damus/Features/FollowPack/Models/FollowPackModel.swift @@ -37,7 +37,7 @@ class FollowPackModel: ObservableObject { } func listenForUpdates(follow_pack_users: [Pubkey]) async { - let to_relays = damus_state.nostrNetwork.determineToRelays(filters: damus_state.relay_filters) + let to_relays = await damus_state.nostrNetwork.determineToRelays(filters: damus_state.relay_filters) var filter = NostrFilter(kinds: [.text, .chat]) filter.until = UInt32(Date.now.timeIntervalSince1970) filter.authors = follow_pack_users diff --git a/damus/Features/Follows/Models/Contacts+.swift b/damus/Features/Follows/Models/Contacts+.swift index 81dd98036..e52e24c75 100644 --- a/damus/Features/Follows/Models/Contacts+.swift +++ b/damus/Features/Follows/Models/Contacts+.swift @@ -9,17 +9,17 @@ import Foundation -func follow_reference(box: PostBox, our_contacts: NostrEvent?, keypair: FullKeypair, follow: FollowRef) -> NostrEvent? { +func follow_reference(box: PostBox, our_contacts: NostrEvent?, keypair: FullKeypair, follow: FollowRef) async -> NostrEvent? { guard let ev = follow_user_event(our_contacts: our_contacts, keypair: keypair, follow: follow) else { return nil } - box.send(ev) + await box.send(ev) return ev } -func unfollow_reference(postbox: PostBox, our_contacts: NostrEvent?, keypair: FullKeypair, unfollow: FollowRef) -> NostrEvent? { +func unfollow_reference(postbox: PostBox, our_contacts: NostrEvent?, keypair: FullKeypair, unfollow: FollowRef) async -> NostrEvent? { guard let cs = our_contacts else { return nil } @@ -28,7 +28,7 @@ func unfollow_reference(postbox: PostBox, our_contacts: NostrEvent?, keypair: Fu return nil } - postbox.send(ev) + await postbox.send(ev) return ev } diff --git a/damus/Features/Muting/Models/MutedThreadsManager.swift b/damus/Features/Muting/Models/MutedThreadsManager.swift index 7b463971a..7f766a22e 100644 --- a/damus/Features/Muting/Models/MutedThreadsManager.swift +++ b/damus/Features/Muting/Models/MutedThreadsManager.swift @@ -34,7 +34,7 @@ func migrate_old_muted_threads_to_new_mutelist(keypair: Keypair, damus_state: Da let previous_mute_list_event = damus_state.mutelist_manager.event guard let new_mutelist_event = create_or_update_mutelist(keypair: fullKeypair, mprev: previous_mute_list_event, to_add: Set(mutedThreads.map { MuteItem.thread($0, nil) })) else { return } damus_state.mutelist_manager.set_mutelist(new_mutelist_event) - damus_state.nostrNetwork.postbox.send(new_mutelist_event) + Task { await damus_state.nostrNetwork.postbox.send(new_mutelist_event) } // Set existing muted threads to an empty array UserDefaults.standard.set([], forKey: getMutedThreadsKey(pubkey: keypair.pubkey)) } diff --git a/damus/Features/Muting/Views/AddMuteItemView.swift b/damus/Features/Muting/Views/AddMuteItemView.swift index 4a4643953..bc5fb1500 100644 --- a/damus/Features/Muting/Views/AddMuteItemView.swift +++ b/damus/Features/Muting/Views/AddMuteItemView.swift @@ -87,7 +87,7 @@ struct AddMuteItemView: View { } state.mutelist_manager.set_mutelist(mutelist) - state.nostrNetwork.postbox.send(mutelist) + Task { await state.nostrNetwork.postbox.send(mutelist) } } new_text = "" diff --git a/damus/Features/Muting/Views/MutelistView.swift b/damus/Features/Muting/Views/MutelistView.swift index dcc25500e..d152dd0c7 100644 --- a/damus/Features/Muting/Views/MutelistView.swift +++ b/damus/Features/Muting/Views/MutelistView.swift @@ -30,8 +30,10 @@ struct MutelistView: View { } damus_state.mutelist_manager.set_mutelist(new_ev) - damus_state.nostrNetwork.postbox.send(new_ev) - updateMuteItems() + Task { + await damus_state.nostrNetwork.postbox.send(new_ev) + updateMuteItems() + } } label: { Label(NSLocalizedString("Delete", comment: "Button to remove a user from their mutelist."), image: "delete") } diff --git a/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift b/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift index 264ecff4b..dbfdcbba4 100644 --- a/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift +++ b/damus/Features/Onboarding/Views/OnboardingSuggestionsView.swift @@ -56,7 +56,7 @@ struct OnboardingSuggestionsView: View { // - We don't have other mechanisms to allow the user to edit this yet // // Therefore, it is better to just save it locally, and retrieve this once we build out https://github.com/damus-io/damus/issues/3042 - model.damus_state.nostrNetwork.sendToNostrDB(event: event) + Task { await model.damus_state.nostrNetwork.sendToNostrDB(event: event) } } var body: some View { diff --git a/damus/Features/Onboarding/Views/SaveKeysView.swift b/damus/Features/Onboarding/Views/SaveKeysView.swift index 9939d778e..6ec5dc3d7 100644 --- a/damus/Features/Onboarding/Views/SaveKeysView.swift +++ b/damus/Features/Onboarding/Views/SaveKeysView.swift @@ -75,7 +75,7 @@ struct SaveKeysView: View { .foregroundColor(.red) Button(action: { - complete_account_creation(account) + Task { await complete_account_creation(account) } }) { HStack { Text("Retry", comment: "Button to retry completing account creation after an error occurred.") @@ -89,7 +89,7 @@ struct SaveKeysView: View { Button(action: { save_key(account) - complete_account_creation(account) + Task { await complete_account_creation(account) } }) { HStack { Text("Save", comment: "Button to save key, complete account creation, and start using the app.") @@ -101,7 +101,7 @@ struct SaveKeysView: View { .padding(.top, 20) Button(action: { - complete_account_creation(account) + Task { await complete_account_creation(account) } }) { HStack { Text("Not now", comment: "Button to not save key, complete account creation, and start using the app.") @@ -125,7 +125,7 @@ struct SaveKeysView: View { credential_handler.save_credential(pubkey: account.pubkey, privkey: account.privkey) } - func complete_account_creation(_ account: CreateAccountModel) { + func complete_account_creation(_ account: CreateAccountModel) async { guard let first_contact_event else { error = NSLocalizedString("Could not create your initial contact list event. This is a software bug, please contact Damus support via support@damus.io or through our Nostr account for help.", comment: "Error message to the user indicating that the initial contact list failed to be created.") return @@ -139,14 +139,21 @@ struct SaveKeysView: View { let bootstrap_relays = load_bootstrap_relays(pubkey: account.pubkey) for relay in bootstrap_relays { - add_rw_relay(self.pool, relay) + await add_rw_relay(self.pool, relay) + } + + Task { + let stream = AsyncStream<(RelayURL, NostrConnectionEvent)> { streamContinuation in + Task { await self.pool.register_handler(sub_id: "signup", filters: nil, handler: streamContinuation) } + } + for await (relayUrl, connectionEvent) in stream { + await handle_event(relay: relayUrl, ev: connectionEvent) + } } - - Task { await self.pool.register_handler(sub_id: "signup", filters: nil, handler: handle_event) } self.loading = true - self.pool.connect() + await self.pool.connect() } func save_to_storage(first_contact_event: NdbNote, first_relay_list_event: NdbNote, for account: CreateAccountModel) { @@ -160,7 +167,7 @@ struct SaveKeysView: View { settings.latestRelayListEventIdHex = first_relay_list_event.id.hex() } - func handle_event(relay: RelayURL, ev: NostrConnectionEvent) { + func handle_event(relay: RelayURL, ev: NostrConnectionEvent) async { switch ev { case .ws_connection_event(let wsev): switch wsev { @@ -169,15 +176,15 @@ struct SaveKeysView: View { if let keypair = account.keypair.to_full(), let metadata_ev = make_metadata_event(keypair: keypair, metadata: metadata) { - self.pool.send(.event(metadata_ev)) + await self.pool.send(.event(metadata_ev)) } if let first_contact_event { - self.pool.send(.event(first_contact_event)) + await self.pool.send(.event(first_contact_event)) } if let first_relay_list_event { - self.pool.send(.event(first_relay_list_event)) + await self.pool.send(.event(first_relay_list_event)) } do { diff --git a/damus/Features/Posting/Models/DraftsModel.swift b/damus/Features/Posting/Models/DraftsModel.swift index 882e7b5e6..de00ac1b8 100644 --- a/damus/Features/Posting/Models/DraftsModel.swift +++ b/damus/Features/Posting/Models/DraftsModel.swift @@ -64,9 +64,9 @@ class DraftArtifacts: Equatable { /// - damus_state: The damus state, needed for encrypting, fetching Nostr data depedencies, and forming the NIP-37 draft /// - references: references in the post? /// - Returns: The NIP-37 draft packaged in a way that can be easily wrapped/unwrapped. - func to_nip37_draft(action: PostAction, damus_state: DamusState) throws -> NIP37Draft? { + func to_nip37_draft(action: PostAction, damus_state: DamusState) async throws -> NIP37Draft? { guard let keypair = damus_state.keypair.to_full() else { return nil } - let post = build_post(state: damus_state, action: action, draft: self) + let post = await build_post(state: damus_state, action: action, draft: self) guard let note = post.to_event(keypair: keypair) else { return nil } return try NIP37Draft(unwrapped_note: note, draft_id: self.id, keypair: keypair) } @@ -227,24 +227,24 @@ class Drafts: ObservableObject { func save(damus_state: DamusState) async { var draft_events: [NdbNote] = [] post_artifact_block: if let post_artifacts = self.post { - let nip37_draft = try? post_artifacts.to_nip37_draft(action: .posting(.user(damus_state.pubkey)), damus_state: damus_state) + let nip37_draft = try? await post_artifacts.to_nip37_draft(action: .posting(.user(damus_state.pubkey)), damus_state: damus_state) guard let wrapped_note = nip37_draft?.wrapped_note else { break post_artifact_block } draft_events.append(wrapped_note) } for (replied_to_note_id, reply_artifacts) in self.replies { guard let replied_to_note = damus_state.ndb.lookup_note(replied_to_note_id)?.unsafeUnownedValue?.to_owned() else { continue } - let nip37_draft = try? reply_artifacts.to_nip37_draft(action: .replying_to(replied_to_note), damus_state: damus_state) + let nip37_draft = try? await reply_artifacts.to_nip37_draft(action: .replying_to(replied_to_note), damus_state: damus_state) guard let wrapped_note = nip37_draft?.wrapped_note else { continue } draft_events.append(wrapped_note) } for (quoted_note_id, quote_note_artifacts) in self.quotes { guard let quoted_note = damus_state.ndb.lookup_note(quoted_note_id)?.unsafeUnownedValue?.to_owned() else { continue } - let nip37_draft = try? quote_note_artifacts.to_nip37_draft(action: .quoting(quoted_note), damus_state: damus_state) + let nip37_draft = try? await quote_note_artifacts.to_nip37_draft(action: .quoting(quoted_note), damus_state: damus_state) guard let wrapped_note = nip37_draft?.wrapped_note else { continue } draft_events.append(wrapped_note) } for (highlight, highlight_note_artifacts) in self.highlights { - let nip37_draft = try? highlight_note_artifacts.to_nip37_draft(action: .highlighting(highlight), damus_state: damus_state) + let nip37_draft = try? await highlight_note_artifacts.to_nip37_draft(action: .highlighting(highlight), damus_state: damus_state) guard let wrapped_note = nip37_draft?.wrapped_note else { continue } draft_events.append(wrapped_note) } @@ -254,7 +254,7 @@ class Drafts: ObservableObject { // TODO: Once it is time to implement draft syncing with relays, please consider the following: // - Privacy: Sending drafts to the network leaks metadata about app activity, and may break user expectations // - Down-sync conflict resolution: Consider how to solve conflicts for different draft versions holding the same ID (e.g. edited in Damus, then another client, then Damus again) - damus_state.nostrNetwork.sendToNostrDB(event: draft_event) + await damus_state.nostrNetwork.sendToNostrDB(event: draft_event) } DispatchQueue.main.async { diff --git a/damus/Features/Posting/Models/PostBox.swift b/damus/Features/Posting/Models/PostBox.swift index db5bb3b87..b34e7b60b 100644 --- a/damus/Features/Posting/Models/PostBox.swift +++ b/damus/Features/Posting/Models/PostBox.swift @@ -60,7 +60,14 @@ class PostBox { init(pool: RelayPool) { self.pool = pool self.events = [:] - Task { await pool.register_handler(sub_id: "postbox", filters: nil, to: nil, handler: handle_event) } + Task { + let stream = AsyncStream<(RelayURL, NostrConnectionEvent)> { streamContinuation in + Task { await self.pool.register_handler(sub_id: "postbox", filters: nil, to: nil, handler: streamContinuation) } + } + for await (relayUrl, connectionEvent) in stream { + handle_event(relay_id: relayUrl, connectionEvent) + } + } } // only works reliably on delay-sent events @@ -81,7 +88,7 @@ class PostBox { return nil } - func try_flushing_events() { + func try_flushing_events() async { let now = Int64(Date().timeIntervalSince1970) for kv in events { let event = kv.value @@ -95,7 +102,7 @@ class PostBox { if relayer.last_attempt == nil || (now >= (relayer.last_attempt! + Int64(relayer.retry_after))) { print("attempt #\(relayer.attempts) to flush event '\(event.event.content)' to \(relayer.relay) after \(relayer.retry_after) seconds") - flush_event(event, to_relay: relayer) + await flush_event(event, to_relay: relayer) } } } @@ -140,7 +147,7 @@ class PostBox { return prev_count != after_count } - private func flush_event(_ event: PostedEvent, to_relay: Relayer? = nil) { + private func flush_event(_ event: PostedEvent, to_relay: Relayer? = nil) async { var relayers = event.remaining if let to_relay { relayers = [to_relay] @@ -150,29 +157,35 @@ class PostBox { relayer.attempts += 1 relayer.last_attempt = Int64(Date().timeIntervalSince1970) relayer.retry_after *= 1.5 - if pool.get_relay(relayer.relay) != nil { + if await pool.get_relay(relayer.relay) != nil { print("flushing event \(event.event.id) to \(relayer.relay)") } else { print("could not find relay when flushing: \(relayer.relay)") } - pool.send(.event(event.event), to: [relayer.relay], skip_ephemeral: event.skip_ephemeral) + await pool.send(.event(event.event), to: [relayer.relay], skip_ephemeral: event.skip_ephemeral) } } - func send(_ event: NostrEvent, to: [RelayURL]? = nil, skip_ephemeral: Bool = true, delay: TimeInterval? = nil, on_flush: OnFlush? = nil) { + func send(_ event: NostrEvent, to: [RelayURL]? = nil, skip_ephemeral: Bool = true, delay: TimeInterval? = nil, on_flush: OnFlush? = nil) async { // Don't add event if we already have it if events[event.id] != nil { return } - let remaining = to ?? pool.our_descriptors.map { $0.url } + let remaining: [RelayURL] + if let to { + remaining = to + } + else { + remaining = await pool.our_descriptors.map { $0.url } + } let after = delay.map { d in Date.now.addingTimeInterval(d) } let posted_ev = PostedEvent(event: event, remaining: remaining, skip_ephemeral: skip_ephemeral, flush_after: after, on_flush: on_flush) events[event.id] = posted_ev if after == nil { - flush_event(posted_ev) + await flush_event(posted_ev) } } } diff --git a/damus/Features/Posting/Views/PostView.swift b/damus/Features/Posting/Views/PostView.swift index 1f9672a49..c9c501bc9 100644 --- a/damus/Features/Posting/Views/PostView.swift +++ b/damus/Features/Posting/Views/PostView.swift @@ -121,8 +121,8 @@ struct PostView: View { uploadTasks.removeAll() } - func send_post() { - let new_post = build_post(state: self.damus_state, post: self.post, action: action, uploadedMedias: uploadedMedias, references: self.references, filtered_pubkeys: filtered_pubkeys) + func send_post() async { + let new_post = await build_post(state: self.damus_state, post: self.post, action: action, uploadedMedias: uploadedMedias, references: self.references, filtered_pubkeys: filtered_pubkeys) notify(.post(.post(new_post))) @@ -190,7 +190,7 @@ struct PostView: View { var PostButton: some View { Button(NSLocalizedString("Post", comment: "Button to post a note.")) { - self.send_post() + Task { await self.send_post() } } .disabled(posting_disabled) .opacity(posting_disabled ? 0.5 : 1.0) @@ -829,8 +829,8 @@ func nip10_reply_tags(replying_to: NostrEvent, keypair: Keypair, relayURL: Relay return tags } -func build_post(state: DamusState, action: PostAction, draft: DraftArtifacts) -> NostrPost { - return build_post( +func build_post(state: DamusState, action: PostAction, draft: DraftArtifacts) async -> NostrPost { + return await build_post( state: state, post: draft.content, action: action, @@ -840,7 +840,7 @@ func build_post(state: DamusState, action: PostAction, draft: DraftArtifacts) -> ) } -func build_post(state: DamusState, post: NSAttributedString, action: PostAction, uploadedMedias: [UploadedMedia], references: [RefId], filtered_pubkeys: Set) -> NostrPost { +func build_post(state: DamusState, post: NSAttributedString, action: PostAction, uploadedMedias: [UploadedMedia], references: [RefId], filtered_pubkeys: Set) async -> NostrPost { // don't add duplicate pubkeys but retain order var pkset = Set() @@ -858,7 +858,7 @@ func build_post(state: DamusState, post: NSAttributedString, action: PostAction, acc.append(pk) } - return build_post(state: state, post: post, action: action, uploadedMedias: uploadedMedias, pubkeys: pks) + return await build_post(state: state, post: post, action: action, uploadedMedias: uploadedMedias, pubkeys: pks) } /// This builds a Nostr post from draft data from `PostView` or other draft-related classes @@ -874,7 +874,7 @@ func build_post(state: DamusState, post: NSAttributedString, action: PostAction, /// - uploadedMedias: The medias attached to this post /// - pubkeys: The referenced pubkeys /// - Returns: A NostrPost, which can then be signed into an event. -func build_post(state: DamusState, post: NSAttributedString, action: PostAction, uploadedMedias: [UploadedMedia], pubkeys: [Pubkey]) -> NostrPost { +func build_post(state: DamusState, post: NSAttributedString, action: PostAction, uploadedMedias: [UploadedMedia], pubkeys: [Pubkey]) async -> NostrPost { let post = NSMutableAttributedString(attributedString: post) post.enumerateAttributes(in: NSRange(location: 0, length: post.length), options: []) { attributes, range, stop in let linkValue = attributes[.link] @@ -916,10 +916,10 @@ func build_post(state: DamusState, post: NSAttributedString, action: PostAction, switch action { case .replying_to(let replying_to): // start off with the reply tags - tags = nip10_reply_tags(replying_to: replying_to, keypair: state.keypair, relayURL: state.nostrNetwork.relaysForEvent(event: replying_to).first) + tags = nip10_reply_tags(replying_to: replying_to, keypair: state.keypair, relayURL: await state.nostrNetwork.relaysForEvent(event: replying_to).first) case .quoting(let ev): - let relay_urls = state.nostrNetwork.relaysForEvent(event: ev) + let relay_urls = await state.nostrNetwork.relaysForEvent(event: ev) let nevent = Bech32Object.encode(.nevent(NEvent(event: ev, relays: relay_urls.prefix(4).map { $0 }))) content.append("\n\nnostr:\(nevent)") diff --git a/damus/Features/Profile/Views/EditMetadataView.swift b/damus/Features/Profile/Views/EditMetadataView.swift index 99c4214de..6dc65b5bf 100644 --- a/damus/Features/Profile/Views/EditMetadataView.swift +++ b/damus/Features/Profile/Views/EditMetadataView.swift @@ -58,7 +58,7 @@ struct EditMetadataView: View { return profile } - func save() { + func save() async { let profile = to_profile() guard let keypair = damus_state.keypair.to_full(), let metadata_ev = make_metadata_event(keypair: keypair, metadata: profile) @@ -66,7 +66,7 @@ struct EditMetadataView: View { return } - damus_state.nostrNetwork.postbox.send(metadata_ev) + await damus_state.nostrNetwork.postbox.send(metadata_ev) } func is_ln_valid(ln: String) -> Bool { @@ -211,8 +211,10 @@ struct EditMetadataView: View { if !ln.isEmpty && !is_ln_valid(ln: ln) { confirm_ln_address = true } else { - save() - dismiss() + Task { + await save() + dismiss() + } } }, label: { Text(NSLocalizedString("Save", comment: "Button for saving profile.")) diff --git a/damus/Features/Profile/Views/ProfileView.swift b/damus/Features/Profile/Views/ProfileView.swift index 5e1d247b3..b9350ab6b 100644 --- a/damus/Features/Profile/Views/ProfileView.swift +++ b/damus/Features/Profile/Views/ProfileView.swift @@ -219,7 +219,7 @@ struct ProfileView: View { } damus_state.mutelist_manager.set_mutelist(new_ev) - damus_state.nostrNetwork.postbox.send(new_ev) + Task { await damus_state.nostrNetwork.postbox.send(new_ev) } } } else { Button(NSLocalizedString("Mute", comment: "Button to mute a profile"), role: .destructive) { diff --git a/damus/Features/Relays/Views/AddRelayView.swift b/damus/Features/Relays/Views/AddRelayView.swift index f4e4b7f65..dc11d8d0c 100644 --- a/damus/Features/Relays/Views/AddRelayView.swift +++ b/damus/Features/Relays/Views/AddRelayView.swift @@ -80,30 +80,32 @@ struct AddRelayView: View { } Button(action: { - if new_relay.starts(with: "wss://") == false && new_relay.starts(with: "ws://") == false { - new_relay = "wss://" + new_relay - } - - guard let url = RelayURL(new_relay) else { - relayAddErrorTitle = NSLocalizedString("Invalid relay address", comment: "Heading for an error when adding a relay") - relayAddErrorMessage = NSLocalizedString("Please check the address and try again", comment: "Tip for an error where the relay address being added is invalid") - return - } - - do { - try state.nostrNetwork.userRelayList.insert(relay: NIP65.RelayList.RelayItem(url: url, rwConfiguration: .readWrite)) - relayAddErrorTitle = nil // Clear error title - relayAddErrorMessage = nil // Clear error message - } - catch { - present_sheet(.error(self.humanReadableError(for: error))) + Task { + if new_relay.starts(with: "wss://") == false && new_relay.starts(with: "ws://") == false { + new_relay = "wss://" + new_relay + } + + guard let url = RelayURL(new_relay) else { + relayAddErrorTitle = NSLocalizedString("Invalid relay address", comment: "Heading for an error when adding a relay") + relayAddErrorMessage = NSLocalizedString("Please check the address and try again", comment: "Tip for an error where the relay address being added is invalid") + return + } + + do { + try await state.nostrNetwork.userRelayList.insert(relay: NIP65.RelayList.RelayItem(url: url, rwConfiguration: .readWrite)) + relayAddErrorTitle = nil // Clear error title + relayAddErrorMessage = nil // Clear error message + } + catch { + present_sheet(.error(self.humanReadableError(for: error))) + } + + new_relay = "" + + this_app.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) + + dismiss() } - - new_relay = "" - - this_app.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for: nil) - - dismiss() }) { HStack { Text("Add relay", comment: "Button to add a relay.") diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index 4c99fbca9..4d34eba3f 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -55,7 +55,7 @@ class SearchHomeModel: ObservableObject { DispatchQueue.main.async { self.loading = true } - let to_relays = damus_state.nostrNetwork.ourRelayDescriptors + let to_relays = await damus_state.nostrNetwork.ourRelayDescriptors .map { $0.url } .filter { !damus_state.relay_filters.is_filtered(timeline: .search, relay_id: $0) } diff --git a/damus/Features/Search/Views/SearchHeaderView.swift b/damus/Features/Search/Views/SearchHeaderView.swift index 7f9449dc9..7b19a62f9 100644 --- a/damus/Features/Search/Views/SearchHeaderView.swift +++ b/damus/Features/Search/Views/SearchHeaderView.swift @@ -125,7 +125,7 @@ struct HashtagUnfollowButton: View { func unfollow(_ hashtag: String) { is_following = false - handle_unfollow(state: damus_state, unfollow: FollowRef.hashtag(hashtag)) + Task { await handle_unfollow(state: damus_state, unfollow: FollowRef.hashtag(hashtag)) } } } @@ -144,7 +144,7 @@ struct HashtagFollowButton: View { func follow(_ hashtag: String) { is_following = true - handle_follow(state: damus_state, follow: .hashtag(hashtag)) + Task { await handle_follow(state: damus_state, follow: .hashtag(hashtag)) } } } diff --git a/damus/Features/Search/Views/SearchView.swift b/damus/Features/Search/Views/SearchView.swift index dd0e233ab..b43b51491 100644 --- a/damus/Features/Search/Views/SearchView.swift +++ b/damus/Features/Search/Views/SearchView.swift @@ -69,7 +69,7 @@ struct SearchView: View { } appstate.mutelist_manager.set_mutelist(mutelist) - appstate.nostrNetwork.postbox.send(mutelist) + Task { await appstate.nostrNetwork.postbox.send(mutelist) } } label: { Text("Unmute Hashtag", comment: "Label represnting a button that the user can tap to unmute a given hashtag so they start seeing it in their feed again.") } @@ -104,7 +104,7 @@ struct SearchView: View { } appstate.mutelist_manager.set_mutelist(mutelist) - appstate.nostrNetwork.postbox.send(mutelist) + Task { await appstate.nostrNetwork.postbox.send(mutelist) } } var described_search: DescribedSearch { diff --git a/damus/Features/Settings/Views/ConfigView.swift b/damus/Features/Settings/Views/ConfigView.swift index b16ef68df..d3f5b5e8e 100644 --- a/damus/Features/Settings/Views/ConfigView.swift +++ b/damus/Features/Settings/Views/ConfigView.swift @@ -182,8 +182,10 @@ struct ConfigView: View { let ev = created_deleted_account_profile(keypair: keypair) else { return } - state.nostrNetwork.postbox.send(ev) - logout(state) + Task { + await state.nostrNetwork.postbox.send(ev) + logout(state) + } } } .alert(NSLocalizedString("Logout", comment: "Alert for logging out the user."), isPresented: $confirm_logout) { diff --git a/damus/Features/Settings/Views/FirstAidSettingsView.swift b/damus/Features/Settings/Views/FirstAidSettingsView.swift index 84fbe48e1..354b94e6c 100644 --- a/damus/Features/Settings/Views/FirstAidSettingsView.swift +++ b/damus/Features/Settings/Views/FirstAidSettingsView.swift @@ -68,13 +68,13 @@ struct FirstAidSettingsView: View { guard let new_contact_list_event = make_first_contact_event(keypair: damus_state.keypair) else { throw FirstAidError.cannotMakeFirstContactEvent } - damus_state.nostrNetwork.send(event: new_contact_list_event) + await damus_state.nostrNetwork.send(event: new_contact_list_event) damus_state.settings.latest_contact_event_id_hex = new_contact_list_event.id.hex() } func resetRelayList() async throws { let bestEffortRelayList = damus_state.nostrNetwork.userRelayList.getBestEffortRelayList() - try damus_state.nostrNetwork.userRelayList.set(userRelayList: bestEffortRelayList) + try await damus_state.nostrNetwork.userRelayList.set(userRelayList: bestEffortRelayList) } enum FirstAidError: Error { diff --git a/damus/Features/Status/Views/UserStatusSheet.swift b/damus/Features/Status/Views/UserStatusSheet.swift index dbff37bf9..0ce18d413 100644 --- a/damus/Features/Status/Views/UserStatusSheet.swift +++ b/damus/Features/Status/Views/UserStatusSheet.swift @@ -109,16 +109,18 @@ struct UserStatusSheet: View { Spacer() Button(action: { - guard let status = self.status.general, - let kp = keypair.to_full(), - let ev = make_user_status_note(status: status, keypair: kp, expiry: duration.expiration) - else { - return + Task { + guard let status = self.status.general, + let kp = keypair.to_full(), + let ev = make_user_status_note(status: status, keypair: kp, expiry: duration.expiration) + else { + return + } + + await postbox.send(ev) + + dismiss() } - - postbox.send(ev) - - dismiss() }, label: { Text("Share", comment: "Save button text for saving profile status settings.") }) diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index 913cd8a61..0280ab571 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -812,13 +812,15 @@ class HomeModel: ContactsDelegate, ObservableObject { } -func update_signal_from_pool(signal: SignalModel, pool: RelayPool) { - if signal.max_signal != pool.relays.count { - signal.max_signal = pool.relays.count +func update_signal_from_pool(signal: SignalModel, pool: RelayPool) async { + let relayCount = await pool.relays.count + if signal.max_signal != relayCount { + signal.max_signal = relayCount } - if signal.signal != pool.num_connected { - signal.signal = pool.num_connected + let numberOfConnectedRelays = await pool.num_connected + if signal.signal != numberOfConnectedRelays { + signal.signal = numberOfConnectedRelays } } diff --git a/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift b/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift index b7f3a2c1e..530395e43 100644 --- a/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift +++ b/damus/Features/Wallet/Models/WalletConnect/WalletConnect+.swift @@ -17,14 +17,14 @@ extension WalletConnect { /// - Parameters: /// - url: The Nostr Wallet Connect URL containing connection info to the NWC wallet /// - pool: The RelayPool to send the subscription request through - static func subscribe(url: WalletConnectURL, pool: RelayPool) { + static func subscribe(url: WalletConnectURL, pool: RelayPool) async { var filter = NostrFilter(kinds: [.nwc_response]) filter.authors = [url.pubkey] filter.pubkeys = [url.keypair.pubkey] filter.limit = 0 let sub = NostrSubscribe(filters: [filter], sub_id: "nwc") - pool.send(.subscribe(sub), to: [url.relay], skip_ephemeral: false) + await pool.send(.subscribe(sub), to: [url.relay], skip_ephemeral: false) } /// Sends out a request to pay an invoice to the NWC relay, and ensures that: @@ -41,16 +41,16 @@ extension WalletConnect { /// - on_flush: A callback to call after the event has been flushed to the network /// - Returns: The Nostr Event that was sent to the network, representing the request that was made @discardableResult - static func pay(url: WalletConnectURL, pool: RelayPool, post: PostBox, invoice: String, zap_request: NostrEvent?, delay: TimeInterval? = 5.0, on_flush: OnFlush? = nil) -> NostrEvent? { + static func pay(url: WalletConnectURL, pool: RelayPool, post: PostBox, invoice: String, zap_request: NostrEvent?, delay: TimeInterval? = 5.0, on_flush: OnFlush? = nil) async -> NostrEvent? { let req = WalletConnect.Request.payZapRequest(invoice: invoice, zapRequest: zap_request) guard let ev = req.to_nostr_event(to_pk: url.pubkey, keypair: url.keypair) else { return nil } - try? pool.add_relay(.nwc(url: url.relay)) // Ensure the NWC relay is connected - WalletConnect.subscribe(url: url, pool: pool) // Ensure we are listening to NWC updates from the relay - post.send(ev, to: [url.relay], skip_ephemeral: false, delay: delay, on_flush: on_flush) + try? await pool.add_relay(.nwc(url: url.relay)) // Ensure the NWC relay is connected + await WalletConnect.subscribe(url: url, pool: pool) // Ensure we are listening to NWC updates from the relay + await post.send(ev, to: [url.relay], skip_ephemeral: false, delay: delay, on_flush: on_flush) return ev } diff --git a/damus/Features/Wallet/Models/WalletModel.swift b/damus/Features/Wallet/Models/WalletModel.swift index ecde51215..240b8bcc4 100644 --- a/damus/Features/Wallet/Models/WalletModel.swift +++ b/damus/Features/Wallet/Models/WalletModel.swift @@ -181,7 +181,7 @@ class WalletModel: ObservableObject { ) ] - nostrNetwork.send(event: requestEvent, to: [currentNwcUrl.relay], skipEphemeralRelays: false) + await nostrNetwork.send(event: requestEvent, to: [currentNwcUrl.relay], skipEphemeralRelays: false) for await event in nostrNetwork.reader.timedStream(filters: responseFilters, to: [currentNwcUrl.relay], timeout: timeout) { guard let responseEvent = try? event.getCopy() else { throw .internalError } diff --git a/damus/Features/Wallet/Views/NWCSettings.swift b/damus/Features/Wallet/Views/NWCSettings.swift index c0240ae59..c72d87628 100644 --- a/damus/Features/Wallet/Views/NWCSettings.swift +++ b/damus/Features/Wallet/Views/NWCSettings.swift @@ -268,7 +268,7 @@ struct NWCSettings: View { guard let meta = make_metadata_event(keypair: keypair, metadata: prof) else { return } - damus_state.nostrNetwork.postbox.send(meta) + Task { await damus_state.nostrNetwork.postbox.send(meta) } } } diff --git a/damus/Features/Wallet/Views/SendPaymentView.swift b/damus/Features/Wallet/Views/SendPaymentView.swift index 19f3d93ae..9d1804852 100644 --- a/damus/Features/Wallet/Views/SendPaymentView.swift +++ b/damus/Features/Wallet/Views/SendPaymentView.swift @@ -182,18 +182,18 @@ struct SendPaymentView: View { .buttonStyle(NeutralButtonStyle()) Button(action: { - sendState = .processing - - // Process payment - guard let payRequestEv = damus_state.nostrNetwork.nwcPay(url: nwc, post: damus_state.nostrNetwork.postbox, invoice: invoice.string, zap_request: nil) else { - sendState = .failed(error: .init( - user_visible_description: NSLocalizedString("The payment request could not be made to your wallet provider.", comment: "A human-readable error message"), - tip: NSLocalizedString("Check if your wallet looks configured correctly and try again. If the error persists, please contact support.", comment: "A human-readable tip for an error when a payment request cannot be made to a wallet."), - technical_info: "Cannot form Nostr Event to send to the NWC provider when calling `pay` from the \"send payment\" feature. Wallet provider relay: \"\(nwc.relay)\"" - )) - return - } Task { + sendState = .processing + + // Process payment + guard let payRequestEv = await damus_state.nostrNetwork.nwcPay(url: nwc, post: damus_state.nostrNetwork.postbox, invoice: invoice.string, zap_request: nil) else { + sendState = .failed(error: .init( + user_visible_description: NSLocalizedString("The payment request could not be made to your wallet provider.", comment: "A human-readable error message"), + tip: NSLocalizedString("Check if your wallet looks configured correctly and try again. If the error persists, please contact support.", comment: "A human-readable tip for an error when a payment request cannot be made to a wallet."), + technical_info: "Cannot form Nostr Event to send to the NWC provider when calling `pay` from the \"send payment\" feature. Wallet provider relay: \"\(nwc.relay)\"" + )) + return + } do { let result = try await model.waitForResponse(for: payRequestEv.id, timeout: SEND_PAYMENT_TIMEOUT) guard case .pay_invoice(_) = result else { diff --git a/damus/Features/Zaps/Models/Zaps.swift b/damus/Features/Zaps/Models/Zaps.swift index c16e0d0ec..3c370e41c 100644 --- a/damus/Features/Zaps/Models/Zaps.swift +++ b/damus/Features/Zaps/Models/Zaps.swift @@ -95,7 +95,7 @@ class Zaps { event_counts[note_id] = event_counts[note_id]! + 1 event_totals[note_id] = event_totals[note_id]! + zap.amount - notify(.update_stats(note_id: note_id)) + Task { await notify(.update_stats(note_id: note_id)) } } } } diff --git a/damus/Features/Zaps/Views/NoteZapButton.swift b/damus/Features/Zaps/Views/NoteZapButton.swift index d7a1c9efe..fe15f3962 100644 --- a/damus/Features/Zaps/Views/NoteZapButton.swift +++ b/damus/Features/Zaps/Views/NoteZapButton.swift @@ -179,7 +179,7 @@ func send_zap(damus_state: DamusState, target: ZapTarget, lnurl: String, is_cust } // Only take the first 10 because reasons - let relays = Array(damus_state.nostrNetwork.ourRelayDescriptors.prefix(10)) + let relays = Array(await damus_state.nostrNetwork.ourRelayDescriptors.prefix(10)) let content = comment ?? "" guard let mzapreq = make_zap_request_event(keypair: keypair, content: content, relays: relays, target: target, zap_type: zap_type) else { @@ -240,7 +240,7 @@ func send_zap(damus_state: DamusState, target: ZapTarget, lnurl: String, is_cust // we don't have a delay on one-tap nozaps (since this will be from customize zap view) let delay = damus_state.settings.nozaps ? nil : 5.0 - let nwc_req = damus_state.nostrNetwork.nwcPay(url: nwc_state.url, post: damus_state.nostrNetwork.postbox, invoice: inv, delay: delay, on_flush: flusher) + let nwc_req = await damus_state.nostrNetwork.nwcPay(url: nwc_state.url, post: damus_state.nostrNetwork.postbox, invoice: inv, delay: delay, on_flush: flusher) guard let nwc_req, case .nwc(let pzap_state) = pending_zap_state else { print("nwc: failed to send nwc request for zapreq \(reqid.reqid)") diff --git a/damus/Notify/Notify.swift b/damus/Notify/Notify.swift index 0557c2d80..bcc01d960 100644 --- a/damus/Notify/Notify.swift +++ b/damus/Notify/Notify.swift @@ -33,7 +33,9 @@ struct NotifyHandler { } func notify(_ notify: Notifications) { let notify = notify.notify - NotificationCenter.default.post(name: T.name, object: notify.payload) + DispatchQueue.main.async { + NotificationCenter.default.post(name: T.name, object: notify.payload) + } } func handle_notify(_ handler: NotifyHandler) -> AnyPublisher { diff --git a/damus/Notify/PresentFullScreenItemNotify.swift b/damus/Notify/PresentFullScreenItemNotify.swift index afc88b97a..0cd5d990f 100644 --- a/damus/Notify/PresentFullScreenItemNotify.swift +++ b/damus/Notify/PresentFullScreenItemNotify.swift @@ -37,6 +37,6 @@ extension Notifications { /// The requests from this function will be received and handled at the top level app view (`ContentView`), which contains a `.damus_full_screen_cover`. /// func present(full_screen_item: FullScreenItem) { - notify(.present_full_screen_item(full_screen_item)) + Task { await notify(.present_full_screen_item(full_screen_item)) } } diff --git a/highlighter action extension/ActionViewController.swift b/highlighter action extension/ActionViewController.swift index 0ee8bbc32..d886cbf47 100644 --- a/highlighter action extension/ActionViewController.swift +++ b/highlighter action extension/ActionViewController.swift @@ -135,7 +135,7 @@ struct ShareExtensionView: View { return } self.state = DamusState(keypair: keypair) - self.state?.nostrNetwork.connect() + Task { await self.state?.nostrNetwork.connect() } }) .onChange(of: self.highlighter_state) { if case .cancelled = highlighter_state { @@ -144,10 +144,10 @@ struct ShareExtensionView: View { } .onReceive(handle_notify(.post)) { post_notification in switch post_notification { - case .post(let post): - self.post(post) - case .cancel: - self.highlighter_state = .cancelled + case .post(let post): + Task { await self.post(post) } + case .cancel: + self.highlighter_state = .cancelled } } .onChange(of: scenePhase) { (phase: ScenePhase) in @@ -164,7 +164,7 @@ struct ShareExtensionView: View { break case .active: print("txn: 📙 HIGHLIGHTER ACTIVE") - state.nostrNetwork.ping() + Task { await state.nostrNetwork.ping() } @unknown default: break } @@ -225,7 +225,7 @@ struct ShareExtensionView: View { } } - func post(_ post: NostrPost) { + func post(_ post: NostrPost) async { self.highlighter_state = .posting guard let state else { self.highlighter_state = .failed(error: "Damus state not initialized") @@ -239,7 +239,7 @@ struct ShareExtensionView: View { self.highlighter_state = .failed(error: "Cannot convert post data into a nostr event") return } - state.nostrNetwork.postbox.send(posted_event, on_flush: .once({ flushed_event in + await state.nostrNetwork.postbox.send(posted_event, on_flush: .once({ flushed_event in if flushed_event.event.id == posted_event.id { DispatchQueue.main.asyncAfter(deadline: .now() + 1, execute: { // Offset labor perception bias self.highlighter_state = .posted(event: flushed_event.event) diff --git a/nostrdb/UnownedNdbNote.swift b/nostrdb/UnownedNdbNote.swift index 79ef237b0..2c9716598 100644 --- a/nostrdb/UnownedNdbNote.swift +++ b/nostrdb/UnownedNdbNote.swift @@ -64,7 +64,19 @@ enum NdbNoteLender: Sendable { case .owned(let note): return try lendingFunction(UnownedNdbNote(note)) } - + } + + /// Borrows the note temporarily (asynchronously) + func borrow(_ lendingFunction: (_: borrowing UnownedNdbNote) async throws -> T) async throws -> T { + switch self { + case .ndbNoteKey(let ndb, let noteKey): + guard !ndb.is_closed else { throw LendingError.ndbClosed } + guard let ndbNoteTxn = ndb.lookup_note_by_key(noteKey) else { throw LendingError.errorLoadingNote } + guard let unownedNote = UnownedNdbNote(ndbNoteTxn) else { throw LendingError.errorLoadingNote } + return try await lendingFunction(unownedNote) + case .owned(let note): + return try await lendingFunction(UnownedNdbNote(note)) + } } /// Gets an owned copy of the note diff --git a/nostrscript/NostrScript.swift b/nostrscript/NostrScript.swift index 34f0e1028..917ccb783 100644 --- a/nostrscript/NostrScript.swift +++ b/nostrscript/NostrScript.swift @@ -310,7 +310,10 @@ public func nscript_nostr_cmd(interp: UnsafeMutablePointer?, cmd: I func nscript_add_relay(script: NostrScript, relay: String) -> Bool { guard let url = RelayURL(relay) else { return false } let desc = RelayPool.RelayDescriptor(url: url, info: .readWrite, variant: .ephemeral) - return (try? script.pool.add_relay(desc)) != nil + // Interacting with RelayPool needs to be done asynchronously, thus we cannot return the answer synchronously + // return (try? await script.pool.add_relay(desc)) != nil + Task { try await script.pool.add_relay(desc) } + return true } @@ -344,9 +347,7 @@ public func nscript_pool_send_to(interp: UnsafeMutablePointer?, pre return 0 } - DispatchQueue.main.async { - script.pool.send_raw(.custom(req_str), to: [to_relay_url], skip_ephemeral: false) - } + Task { await script.pool.send_raw(.custom(req_str), to: [to_relay_url], skip_ephemeral: false) } return 1; } @@ -354,9 +355,7 @@ public func nscript_pool_send_to(interp: UnsafeMutablePointer?, pre func nscript_pool_send(script: NostrScript, req req_str: String) -> Int32 { //script.test("pool_send: '\(req_str)'") - DispatchQueue.main.sync { - script.pool.send_raw(.custom(req_str), skip_ephemeral: false) - } + Task { await script.pool.send_raw(.custom(req_str), skip_ephemeral: false) } return 1; } diff --git a/share extension/ShareViewController.swift b/share extension/ShareViewController.swift index 67c38f76a..6b17d894a 100644 --- a/share extension/ShareViewController.swift +++ b/share extension/ShareViewController.swift @@ -173,7 +173,7 @@ struct ShareExtensionView: View { .onReceive(handle_notify(.post)) { post_notification in switch post_notification { case .post(let post): - self.post(post) + Task { await self.post(post) } case .cancel: self.share_state = .cancelled dismissParent?() @@ -193,7 +193,7 @@ struct ShareExtensionView: View { break case .active: print("txn: 📙 SHARE ACTIVE") - state.nostrNetwork.ping() + Task { await state.nostrNetwork.ping() } @unknown default: break } @@ -216,7 +216,7 @@ struct ShareExtensionView: View { } } - func post(_ post: NostrPost) { + func post(_ post: NostrPost) async { self.share_state = .posting guard let state else { self.share_state = .failed(error: "Damus state not initialized") @@ -230,7 +230,7 @@ struct ShareExtensionView: View { self.share_state = .failed(error: "Cannot convert post data into a nostr event") return } - state.nostrNetwork.postbox.send(posted_event, on_flush: .once({ flushed_event in + await state.nostrNetwork.postbox.send(posted_event, on_flush: .once({ flushed_event in if flushed_event.event.id == posted_event.id { DispatchQueue.main.asyncAfter(deadline: .now() + 1, execute: { // Offset labor perception bias self.share_state = .posted(event: flushed_event.event) @@ -250,7 +250,7 @@ struct ShareExtensionView: View { return false } state = DamusState(keypair: keypair) - state?.nostrNetwork.connect() + Task { await state?.nostrNetwork.connect() } return true } From ab2c16288b83309063ffd1d772d0405a9e3665a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 13 Oct 2025 15:16:54 -0700 Subject: [PATCH 90/91] Fix test compilation issues MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Daniel D’Aquino --- damusTests/AuthIntegrationTests.swift | 38 ++++++++++++------- damusTests/MutingTests.swift | 2 +- damusTests/NIP10Tests.swift | 4 +- .../NostrNetworkManagerTests.swift | 10 +++-- .../ThreadModelTests.swift | 9 +++-- damusTests/PostViewTests.swift | 12 +++--- damusTests/ReplyTests.swift | 4 +- damusTests/WalletConnectTests.swift | 5 ++- 8 files changed, 50 insertions(+), 34 deletions(-) diff --git a/damusTests/AuthIntegrationTests.swift b/damusTests/AuthIntegrationTests.swift index cc28b810e..dc2508023 100644 --- a/damusTests/AuthIntegrationTests.swift +++ b/damusTests/AuthIntegrationTests.swift @@ -70,14 +70,15 @@ final class AuthIntegrationTests: XCTestCase { } */ - func testAuthIntegrationRelayDamusIo() { + @MainActor + func testAuthIntegrationRelayDamusIo() async { // Create relay pool and connect to `wss://relay.damus.io` let relay_url = RelayURL("wss://relay.damus.io")! var received_messages: [String] = [] var sent_messages: [String] = [] let keypair: Keypair = generate_new_keypair().to_keypair() let pool = RelayPool(ndb: Ndb.test, keypair: keypair) - pool.message_received_function = { obj in + await pool.set_message_received_function({ obj in let str = obj.0 let descriptor = obj.1 @@ -86,8 +87,8 @@ final class AuthIntegrationTests: XCTestCase { } received_messages.append(str) - } - pool.message_sent_function = { obj in + }) + await pool.set_message_sent_function({ obj in let str = obj.0 let relay = obj.1 @@ -96,10 +97,10 @@ final class AuthIntegrationTests: XCTestCase { } sent_messages.append(str) - } + }) XCTAssertEqual(pool.relays.count, 0) let relay_descriptor = RelayPool.RelayDescriptor.init(url: relay_url, info: .readWrite) - try! pool.add_relay(relay_descriptor) + try! await pool.add_relay(relay_descriptor) XCTAssertEqual(pool.relays.count, 1) let connection_expectation = XCTestExpectation(description: "Waiting for connection") Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { timer in @@ -114,14 +115,15 @@ final class AuthIntegrationTests: XCTestCase { XCTAssertEqual(received_messages.count, 0) } - func testAuthIntegrationNostrWine() { + @MainActor + func testAuthIntegrationNostrWine() async { // Create relay pool and connect to `wss://nostr.wine` let relay_url = RelayURL("wss://nostr.wine")! var received_messages: [String] = [] var sent_messages: [String] = [] let keypair: Keypair = generate_new_keypair().to_keypair() let pool = RelayPool(ndb: Ndb.test, keypair: keypair) - pool.message_received_function = { obj in + await pool.set_message_received_function({ obj in let str = obj.0 let descriptor = obj.1 @@ -130,8 +132,8 @@ final class AuthIntegrationTests: XCTestCase { } received_messages.append(str) - } - pool.message_sent_function = { obj in + }) + await pool.set_message_sent_function({ obj in let str = obj.0 let relay = obj.1 @@ -140,10 +142,10 @@ final class AuthIntegrationTests: XCTestCase { } sent_messages.append(str) - } + }) XCTAssertEqual(pool.relays.count, 0) let relay_descriptor = RelayPool.RelayDescriptor.init(url: relay_url, info: .readWrite) - try! pool.add_relay(relay_descriptor) + try! await pool.add_relay(relay_descriptor) XCTAssertEqual(pool.relays.count, 1) let connection_expectation = XCTestExpectation(description: "Waiting for connection") Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { timer in @@ -162,7 +164,7 @@ final class AuthIntegrationTests: XCTestCase { let subscribe = NostrSubscribe(filters: [ NostrFilter(kinds: [.dm]) ], sub_id: uuid) - pool.send(NostrRequest.subscribe(subscribe)) + await pool.send(NostrRequest.subscribe(subscribe)) // Wait for AUTH message to have been received & sent let msg_expectation = XCTestExpectation(description: "Waiting for messages") Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { timer in @@ -186,3 +188,13 @@ final class AuthIntegrationTests: XCTestCase { } } + +extension RelayPool { + func set_message_received_function(_ newFunction: (((String, RelayDescriptor)) -> Void)?) { + self.message_received_function = newFunction + } + + func set_message_sent_function(_ newFunction: (((String, Relay)) -> Void)? = nil) { + self.message_sent_function = newFunction + } +} diff --git a/damusTests/MutingTests.swift b/damusTests/MutingTests.swift index 530c0d489..26c9bb1b2 100644 --- a/damusTests/MutingTests.swift +++ b/damusTests/MutingTests.swift @@ -35,7 +35,7 @@ final class MutingTests: XCTestCase { } await test_damus_state.mutelist_manager.set_mutelist(mutelist) - test_damus_state.nostrNetwork.postbox.send(mutelist) + await test_damus_state.nostrNetwork.postbox.send(mutelist) let spammy_note_muted = await test_damus_state.mutelist_manager.is_event_muted(spammy_test_note) XCTAssert(spammy_note_muted) diff --git a/damusTests/NIP10Tests.swift b/damusTests/NIP10Tests.swift index e99caabab..cef674910 100644 --- a/damusTests/NIP10Tests.swift +++ b/damusTests/NIP10Tests.swift @@ -147,7 +147,7 @@ final class NIP10Tests: XCTestCase { XCTAssertEqual(tr.is_reply_to_root, true) } - func test_marker_reply() { + func test_marker_reply() async { let note_json = """ { "pubkey": "5b0183ab6c3e322bf4d41c6b3aef98562a144847b7499543727c5539a114563e", @@ -181,7 +181,7 @@ final class NIP10Tests: XCTestCase { let pk = Pubkey(hex: "5b0183ab6c3e322bf4d41c6b3aef98562a144847b7499543727c5539a114563e")! //let last_reply_hex = "1bb940ce0ba0d4a3b2a589355d908498dcd7452f941cf520072218f7e6ede75e" let note = decode_nostr_event_json(json: note_json)! - let reply = build_post(state: test_damus_state, post: .init(string: "hello"), action: .replying_to(note), uploadedMedias: [], pubkeys: [pk] + note.referenced_pubkeys.map({pk in pk})) + let reply = await build_post(state: test_damus_state, post: .init(string: "hello"), action: .replying_to(note), uploadedMedias: [], pubkeys: [pk] + note.referenced_pubkeys.map({pk in pk})) let root_hex = "00152d2945459fb394fed2ea95af879c903c4ec42d96327a739fa27c023f20e0" XCTAssertEqual(reply.tags, diff --git a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift index 7ea5dc193..01ce5e56f 100644 --- a/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift +++ b/damusTests/NostrNetworkManagerTests/NostrNetworkManagerTests.swift @@ -15,8 +15,6 @@ class NostrNetworkManagerTests: XCTestCase { override func setUpWithError() throws { // Put setup code here. This method is called before the invocation of each test method in the class. damusState = generate_test_damus_state(mock_profile_info: nil) - try! damusState?.nostrNetwork.userRelayList.set(userRelayList: NIP65.RelayList()) - damusState?.nostrNetwork.connect() let notesJSONL = getTestNotesJSONL() @@ -59,9 +57,10 @@ class NostrNetworkManagerTests: XCTestCase { gotAtLeastExpectedCount.fulfill() } case .eose: + continue + case .ndbEose: // End of stream, break out of the loop endOfStream.fulfill() - case .ndbEose: continue case .networkEose: continue @@ -83,7 +82,10 @@ class NostrNetworkManagerTests: XCTestCase { /// ``` /// nak req --kind 1 ws://localhost:10547 | wc -l /// ``` - func testNdbSubscription() { + func testNdbSubscription() async { + try! await damusState?.nostrNetwork.userRelayList.set(userRelayList: NIP65.RelayList()) + await damusState?.nostrNetwork.connect() + ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.text]), expectedCount: 57) ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(authors: [Pubkey(hex: "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245")!]), expectedCount: 22) ensureSubscribeGetsAllExpectedNotes(filter: NostrFilter(kinds: [.boost], referenced_ids: [NoteId(hex: "64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1")!]), expectedCount: 5) diff --git a/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift b/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift index d0824b14c..fa72d03e8 100644 --- a/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift +++ b/damusTests/NostrNetworkManagerTests/ThreadModelTests.swift @@ -15,8 +15,6 @@ final class ThreadModelTests: XCTestCase { override func setUpWithError() throws { // Put setup code here. This method is called before the invocation of each test method in the class. damusState = generate_test_damus_state(mock_profile_info: nil) - try! damusState?.nostrNetwork.userRelayList.set(userRelayList: NIP65.RelayList()) - damusState?.nostrNetwork.connect() let notesJSONL = getTestNotesJSONL() @@ -40,7 +38,10 @@ final class ThreadModelTests: XCTestCase { } /// Tests loading up a thread and checking if the repost count loads as expected. - func testActionBarModel() throws { + func testActionBarModel() async throws { + try! await damusState?.nostrNetwork.userRelayList.set(userRelayList: NIP65.RelayList()) + await damusState?.nostrNetwork.connect() + let testNoteJson = """ {"content":"https://smartflowsocial.s3.us-east-1.amazonaws.com/clients/cm7kdrwdk0000qyu6fwtd96ui/0cab65a9-0142-48e3-abd7-94d20e30d3b2.jpg\n\n","pubkey":"71ecabd8b6b33548e075ff01b31568ffda19d0ac2788067d99328c6de4885975","tags":[["t","meme"],["t","memes"],["t","memestr"],["t","plebchain"]],"created_at":1755694800,"id":"64b26d0a587f5f894470e1e4783756b4d8ba971226de975ee30ac1b69970d5a1","kind":1,"sig":"c000794da8c4f7549b546630b16ed17f6edc0af0269b8c46ce14f5b1937431e7575b78351bc152007ebab5720028e5fe4b738f99e8887f273d35dd2217d1cc3d"} """ @@ -52,7 +53,7 @@ final class ThreadModelTests: XCTestCase { let actionBarModel = make_actionbar_model(ev: note.id, damus: damusState!) while true { try await Task.sleep(nanoseconds: 500_000_000) - actionBarModel.update(damus: damusState!, evid: note.id) + await actionBarModel.update(damus: damusState!, evid: note.id) if actionBarModel.boosts >= 5 { break } diff --git a/damusTests/PostViewTests.swift b/damusTests/PostViewTests.swift index 028102861..8b7f4e5ac 100644 --- a/damusTests/PostViewTests.swift +++ b/damusTests/PostViewTests.swift @@ -171,13 +171,13 @@ final class PostViewTests: XCTestCase { nonAlphaNumerics.forEach { testAddingStringAfterLink(str: $0)} } - func testQuoteRepost() { - let post = build_post(state: test_damus_state, post: .init(), action: .quoting(test_note), uploadedMedias: [], pubkeys: []) + func testQuoteRepost() async { + let post = await build_post(state: test_damus_state, post: .init(), action: .quoting(test_note), uploadedMedias: [], pubkeys: []) XCTAssertEqual(post.tags, [["q", test_note.id.hex(), "", jack_keypair.pubkey.hex()], ["p", jack_keypair.pubkey.hex()]]) } - func testBuildPostRecognizesStringsAsNpubs() throws { + func testBuildPostRecognizesStringsAsNpubs() async throws { // given let expectedLink = "nostr:\(test_pubkey.npub)" let content = NSMutableAttributedString(string: "@test", attributes: [ @@ -185,7 +185,7 @@ final class PostViewTests: XCTestCase { ]) // when - let post = build_post( + let post = await build_post( state: test_damus_state, post: content, action: .posting(.user(test_pubkey)), @@ -197,7 +197,7 @@ final class PostViewTests: XCTestCase { XCTAssertEqual(post.content, expectedLink) } - func testBuildPostRecognizesUrlsAsNpubs() throws { + func testBuildPostRecognizesUrlsAsNpubs() async throws { // given guard let npubUrl = URL(string: "damus:nostr:\(test_pubkey.npub)") else { return XCTFail("Could not create URL") @@ -207,7 +207,7 @@ final class PostViewTests: XCTestCase { ]) // when - let post = build_post( + let post = await build_post( state: test_damus_state, post: content, action: .posting(.user(test_pubkey)), diff --git a/damusTests/ReplyTests.swift b/damusTests/ReplyTests.swift index 5a9a16d4b..f500ff396 100644 --- a/damusTests/ReplyTests.swift +++ b/damusTests/ReplyTests.swift @@ -86,7 +86,7 @@ class ReplyTests: XCTestCase { // XCTAssertEqual(post_blocks.count, 1) // } - func testNewlineMentions() throws { + func testNewlineMentions() async throws { let bech32_pk = "npub1xtscya34g58tk0z605fvr788k263gsu6cy9x0mhnm87echrgufzsevkk5s" let pk = bech32_pubkey_decode(bech32_pk)! @@ -96,7 +96,7 @@ class ReplyTests: XCTestCase { post.append(user_tag_attr_string(profile: profile, pubkey: pk)) post.append(.init(string: "\n")) - let post_note = build_post(state: test_damus_state, post: post, action: .posting(.none), uploadedMedias: [], pubkeys: [pk]) + let post_note = await build_post(state: test_damus_state, post: post, action: .posting(.none), uploadedMedias: [], pubkeys: [pk]) let expected_render = "nostr:\(pk.npub)\nnostr:\(pk.npub)" XCTAssertEqual(post_note.content, expected_render) diff --git a/damusTests/WalletConnectTests.swift b/damusTests/WalletConnectTests.swift index ef0a8010c..4f6f96d54 100644 --- a/damusTests/WalletConnectTests.swift +++ b/damusTests/WalletConnectTests.swift @@ -79,7 +79,8 @@ final class WalletConnectTests: XCTestCase { XCTAssertEqual(url_2.relay.url.absoluteString, relay_2) } - func testNWCEphemeralRelay() { + @MainActor + func testNWCEphemeralRelay() async { let sec = "8ba3a6b3b57d0f4211bb1ea4d8d1e351a367e9b4ea694746e0a4a452b2bc4d37" let pk = "89446b900c70d62438dcf66756405eea6225ad94dc61f3856f62f9699111a9a6" let nwc = WalletConnectURL(str: "nostrwalletconnect://\(pk)?relay=ws://127.0.0.1&secret=\(sec)&lud16=jb55@jb55.com")! @@ -87,7 +88,7 @@ final class WalletConnectTests: XCTestCase { let pool = RelayPool(ndb: .empty) let box = PostBox(pool: pool) - WalletConnect.pay(url: nwc, pool: pool, post: box, invoice: "invoice", zap_request: nil) + await WalletConnect.pay(url: nwc, pool: pool, post: box, invoice: "invoice", zap_request: nil) XCTAssertEqual(pool.our_descriptors.count, 0) XCTAssertEqual(pool.all_descriptors.count, 1) From 690f8b891e2484287267c9af00738f4980f0acb5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20D=E2=80=99Aquino?= Date: Mon, 13 Oct 2025 16:56:37 -0700 Subject: [PATCH 91/91] Implement timestamp-based network subscription optimization MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Changelog-Changed: Optimized network bandwidth usage and improved timeline performance Signed-off-by: Daniel D’Aquino --- damus.xcodeproj/project.pbxproj | 8 + .../NostrNetworkManager/ProfilesManager.swift | 2 +- .../SubscriptionManager.swift | 86 +++- damus/Core/Nostr/RelayConnection.swift | 9 + damus/Core/Nostr/RelayPool.swift | 2 + .../Search/Models/SearchHomeModel.swift | 2 +- .../Features/Timeline/Models/HomeModel.swift | 4 +- .../Utilities/StreamPipelineDiagnostics.swift | 27 + devtools/visualize_stream_pipeline.py | 475 ++++++++++++++++++ shell.nix | 2 +- 10 files changed, 591 insertions(+), 26 deletions(-) create mode 100644 damus/Shared/Utilities/StreamPipelineDiagnostics.swift create mode 100644 devtools/visualize_stream_pipeline.py diff --git a/damus.xcodeproj/project.pbxproj b/damus.xcodeproj/project.pbxproj index d1792c398..d9a461364 100644 --- a/damus.xcodeproj/project.pbxproj +++ b/damus.xcodeproj/project.pbxproj @@ -1765,6 +1765,9 @@ D7DF58322DFCF18D00E9AD28 /* SendPaymentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7DF58312DFCF18800E9AD28 /* SendPaymentView.swift */; }; D7DF58332DFCF18D00E9AD28 /* SendPaymentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7DF58312DFCF18800E9AD28 /* SendPaymentView.swift */; }; D7DF58342DFCF18D00E9AD28 /* SendPaymentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7DF58312DFCF18800E9AD28 /* SendPaymentView.swift */; }; + D7E5B2D32EA0188200CF47AC /* StreamPipelineDiagnostics.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7E5B2D22EA0187B00CF47AC /* StreamPipelineDiagnostics.swift */; }; + D7E5B2D42EA0188200CF47AC /* StreamPipelineDiagnostics.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7E5B2D22EA0187B00CF47AC /* StreamPipelineDiagnostics.swift */; }; + D7E5B2D52EA0188200CF47AC /* StreamPipelineDiagnostics.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7E5B2D22EA0187B00CF47AC /* StreamPipelineDiagnostics.swift */; }; D7EB00B02CD59C8D00660C07 /* PresentFullScreenItemNotify.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */; }; D7EB00B12CD59C8D00660C07 /* PresentFullScreenItemNotify.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */; }; D7EBF8BB2E59022A004EAE29 /* NostrNetworkManagerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */; }; @@ -2707,6 +2710,7 @@ D7DB93092D69485A00DA1EE5 /* NIP65.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NIP65.swift; sourceTree = ""; }; D7DEEF2E2A8C021E00E0C99F /* NostrEventTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NostrEventTests.swift; sourceTree = ""; }; D7DF58312DFCF18800E9AD28 /* SendPaymentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SendPaymentView.swift; sourceTree = ""; }; + D7E5B2D22EA0187B00CF47AC /* StreamPipelineDiagnostics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPipelineDiagnostics.swift; sourceTree = ""; }; D7EB00AF2CD59C8300660C07 /* PresentFullScreenItemNotify.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PresentFullScreenItemNotify.swift; sourceTree = ""; }; D7EBF8BA2E5901F7004EAE29 /* NostrNetworkManagerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NostrNetworkManagerTests.swift; sourceTree = ""; }; D7EBF8BD2E594708004EAE29 /* test_notes.jsonl */ = {isa = PBXFileReference; lastKnownFileType = text; path = test_notes.jsonl; sourceTree = ""; }; @@ -4687,6 +4691,7 @@ 5C78A7B82E3047DE00CF177D /* Utilities */ = { isa = PBXGroup; children = ( + D7E5B2D22EA0187B00CF47AC /* StreamPipelineDiagnostics.swift */, D77135D22E7B766300E7639F /* DataExtensions.swift */, 4CF0ABEA29844B2F00D66079 /* AnyCodable */, D73B74E02D8365B40067BDBC /* ExtraFonts.swift */, @@ -5811,6 +5816,7 @@ 4CDA128C29EB19C40006FA5A /* LocalNotification.swift in Sources */, 4C3BEFD6281D995700B3DE84 /* ActionBarModel.swift in Sources */, 4C7D09762A0AF19E00943473 /* FillAndStroke.swift in Sources */, + D7E5B2D42EA0188200CF47AC /* StreamPipelineDiagnostics.swift in Sources */, 4CA927612A290E340098A105 /* EventShell.swift in Sources */, D74EC8502E1856B70091DC51 /* NonCopyableLinkedList.swift in Sources */, 4C363AA428296DEE006E126D /* SearchModel.swift in Sources */, @@ -6421,6 +6427,7 @@ 82D6FC0E2CD99F7900C925F4 /* ProfilePicView.swift in Sources */, 82D6FC0F2CD99F7900C925F4 /* ProfileView.swift in Sources */, 82D6FC102CD99F7900C925F4 /* ProfileNameView.swift in Sources */, + D7E5B2D52EA0188200CF47AC /* StreamPipelineDiagnostics.swift in Sources */, 5CB017212D2D985E00A9ED05 /* CoinosButton.swift in Sources */, 82D6FC112CD99F7900C925F4 /* MaybeAnonPfpView.swift in Sources */, 82D6FC122CD99F7900C925F4 /* EventProfileName.swift in Sources */, @@ -7026,6 +7033,7 @@ D703D7752C670BBF00A400EA /* Constants.swift in Sources */, D73E5E172C6A962A007EB227 /* ImageUploadModel.swift in Sources */, D703D76A2C670B2C00A400EA /* Bech32Object.swift in Sources */, + D7E5B2D32EA0188200CF47AC /* StreamPipelineDiagnostics.swift in Sources */, D73E5E162C6A9619007EB227 /* PostView.swift in Sources */, D703D7872C670C7E00A400EA /* DamusPurpleEnvironment.swift in Sources */, D703D7892C670C8600A400EA /* DeepLPlan.swift in Sources */, diff --git a/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift index 43535efba..573c0245e 100644 --- a/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/ProfilesManager.swift @@ -81,7 +81,7 @@ extension NostrNetworkManager { guard pubkeys.count > 0 else { return } let profileFilter = NostrFilter(kinds: [.metadata], authors: pubkeys) try Task.checkCancellation() - for await ndbLender in self.subscriptionManager.streamIndefinitely(filters: [profileFilter], streamMode: .ndbFirst) { + for await ndbLender in self.subscriptionManager.streamIndefinitely(filters: [profileFilter], streamMode: .ndbFirst(optimizeNetworkFilter: true)) { try Task.checkCancellation() try? ndbLender.borrow { ev in publishProfileUpdates(metadataEvent: ev) diff --git a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift index d08de04fb..70c190a37 100644 --- a/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift +++ b/damus/Core/Networking/NostrNetworkManager/SubscriptionManager.swift @@ -133,42 +133,75 @@ extension NostrNetworkManager { if canIssueEOSE { Self.logger.debug("Session subscription \(id.uuidString, privacy: .public): Issued EOSE for session. Elapsed: \(CFAbsoluteTimeGetCurrent() - startTime, format: .fixed(precision: 2), privacy: .public) seconds") + logStreamPipelineStats("SubscriptionManager_Advanced_Stream_\(id)", "Consumer_\(id)") continuation.yield(.eose) } } - let streamTask = Task { - while !Task.isCancelled { - for await item in self.multiSessionNetworkStream(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { - try Task.checkCancellation() - switch item { - case .event(let lender): - continuation.yield(item) - case .eose: - break // Should not happen - case .ndbEose: - break // Should not happen - case .networkEose: - continuation.yield(item) - networkEOSEIssued = true - yieldEOSEIfReady() + var networkStreamTask: Task? = nil + var latestNoteTimestampSeen: UInt32? = nil + + let startNetworkStreamTask = { + networkStreamTask = Task { + while !Task.isCancelled { + let optimizedFilters = filters.map { + var optimizedFilter = $0 + optimizedFilter.since = latestNoteTimestampSeen + return optimizedFilter + } + for await item in self.multiSessionNetworkStream(filters: optimizedFilters, to: desiredRelays, streamMode: streamMode, id: id) { + try Task.checkCancellation() + logStreamPipelineStats("SubscriptionManager_Network_Stream_\(id)", "SubscriptionManager_Advanced_Stream_\(id)") + switch item { + case .event(let lender): + logStreamPipelineStats("SubscriptionManager_Advanced_Stream_\(id)", "Consumer_\(id)") + continuation.yield(item) + case .eose: + break // Should not happen + case .ndbEose: + break // Should not happen + case .networkEose: + logStreamPipelineStats("SubscriptionManager_Advanced_Stream_\(id)", "Consumer_\(id)") + continuation.yield(item) + networkEOSEIssued = true + yieldEOSEIfReady() + } } } } } + if streamMode.optimizeNetworkFilter == false { + // Start streaming from the network straight away + startNetworkStreamTask() + } + let ndbStreamTask = Task { while !Task.isCancelled { for await item in self.multiSessionNdbStream(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { try Task.checkCancellation() + logStreamPipelineStats("SubscriptionManager_Ndb_MultiSession_Stream_\(id)", "SubscriptionManager_Advanced_Stream_\(id)") switch item { case .event(let lender): + logStreamPipelineStats("SubscriptionManager_Advanced_Stream_\(id)", "Consumer_\(id)") + try? lender.borrow({ event in + if let latestTimestamp = latestNoteTimestampSeen { + latestNoteTimestampSeen = max(latestTimestamp, event.createdAt) + } + else { + latestNoteTimestampSeen = event.createdAt + } + }) continuation.yield(item) case .eose: break // Should not happen case .ndbEose: + logStreamPipelineStats("SubscriptionManager_Advanced_Stream_\(id)", "Consumer_\(id)") continuation.yield(item) ndbEOSEIssued = true + if streamMode.optimizeNetworkFilter { + startNetworkStreamTask() + } yieldEOSEIfReady() case .networkEose: break // Should not happen @@ -178,7 +211,7 @@ extension NostrNetworkManager { } continuation.onTermination = { @Sendable _ in - streamTask.cancel() + networkStreamTask?.cancel() ndbStreamTask.cancel() } } @@ -200,9 +233,8 @@ extension NostrNetworkManager { do { for await item in await self.pool.subscribe(filters: filters, to: desiredRelays, id: id) { - // NO-OP. Notes will be automatically ingested by NostrDB - // TODO: Improve efficiency of subscriptions? try Task.checkCancellation() + logStreamPipelineStats("RelayPool_Handler_\(id)", "SubscriptionManager_Network_Stream_\(id)") switch item { case .event(let event): if EXTRA_VERBOSE_LOGGING { @@ -249,6 +281,7 @@ extension NostrNetworkManager { Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Streaming from NDB.") for await item in self.sessionNdbStream(filters: filters, to: desiredRelays, streamMode: streamMode, id: id) { try Task.checkCancellation() + logStreamPipelineStats("SubscriptionManager_Ndb_Session_Stream_\(id?.uuidString ?? "NoID")", "SubscriptionManager_Ndb_MultiSession_Stream_\(id?.uuidString ?? "NoID")") continuation.yield(item) } Self.logger.info("\(subscriptionId.uuidString, privacy: .public): Session subscription ended. Sleeping for 1 second before resuming.") @@ -318,7 +351,7 @@ extension NostrNetworkManager { // MARK: - Utility functions private func defaultStreamMode() -> StreamMode { - self.experimentalLocalRelayModelSupport ? .ndbFirst : .ndbAndNetworkParallel + self.experimentalLocalRelayModelSupport ? .ndbFirst(optimizeNetworkFilter: false) : .ndbAndNetworkParallel(optimizeNetworkFilter: false) } // MARK: - Finding specific data from Nostr @@ -496,8 +529,19 @@ extension NostrNetworkManager { /// The mode of streaming enum StreamMode { /// Returns notes exclusively through NostrDB, treating it as the only channel for information in the pipeline. Generic EOSE is fired when EOSE is received from NostrDB - case ndbFirst + /// `optimizeNetworkFilter`: Returns notes from ndb, then streams from the network with an added "since" filter set to the latest note stored on ndb. + case ndbFirst(optimizeNetworkFilter: Bool) /// Returns notes from both NostrDB and the network, in parallel, treating it with similar importance against the network relays. Generic EOSE is fired when EOSE is received from both the network and NostrDB - case ndbAndNetworkParallel + /// `optimizeNetworkFilter`: Returns notes from ndb, then streams from the network with an added "since" filter set to the latest note stored on ndb. + case ndbAndNetworkParallel(optimizeNetworkFilter: Bool) + + var optimizeNetworkFilter: Bool { + switch self { + case .ndbFirst(optimizeNetworkFilter: let optimizeNetworkFilter): + return optimizeNetworkFilter + case .ndbAndNetworkParallel(optimizeNetworkFilter: let optimizeNetworkFilter): + return optimizeNetworkFilter + } + } } } diff --git a/damus/Core/Nostr/RelayConnection.swift b/damus/Core/Nostr/RelayConnection.swift index 608c9ea72..1581b018a 100644 --- a/damus/Core/Nostr/RelayConnection.swift +++ b/damus/Core/Nostr/RelayConnection.swift @@ -35,6 +35,15 @@ enum NostrConnectionEvent { } } } + + var subId: String? { + switch self { + case .ws_connection_event(_): + return nil + case .nostr_event(let event): + return event.subid + } + } } final class RelayConnection: ObservableObject { diff --git a/damus/Core/Nostr/RelayPool.swift b/damus/Core/Nostr/RelayPool.swift index 8cf8e4343..be40b3ede 100644 --- a/damus/Core/Nostr/RelayPool.swift +++ b/damus/Core/Nostr/RelayPool.swift @@ -535,6 +535,8 @@ actor RelayPool { } for handler in handlers { + guard handler.sub_id == event.subId else { continue } + logStreamPipelineStats("RelayPool_\(relay_id.absoluteString)", "RelayPool_Handler_\(handler.sub_id)") handler.handler.yield((relay_id, event)) } } diff --git a/damus/Features/Search/Models/SearchHomeModel.swift b/damus/Features/Search/Models/SearchHomeModel.swift index 4d34eba3f..a94c42e4e 100644 --- a/damus/Features/Search/Models/SearchHomeModel.swift +++ b/damus/Features/Search/Models/SearchHomeModel.swift @@ -19,7 +19,7 @@ class SearchHomeModel: ObservableObject { let base_subid = UUID().description let follow_pack_subid = UUID().description let profiles_subid = UUID().description - let limit: UInt32 = 500 + let limit: UInt32 = 200 //let multiple_events_per_pubkey: Bool = false init(damus_state: DamusState) { diff --git a/damus/Features/Timeline/Models/HomeModel.swift b/damus/Features/Timeline/Models/HomeModel.swift index 0280ab571..65caa0037 100644 --- a/damus/Features/Timeline/Models/HomeModel.swift +++ b/damus/Features/Timeline/Models/HomeModel.swift @@ -524,7 +524,7 @@ class HomeModel: ContactsDelegate, ObservableObject { } self.generalHandlerTask?.cancel() self.generalHandlerTask = Task { - for await item in damus_state.nostrNetwork.reader.advancedStream(filters: dms_filters + contacts_filters) { + for await item in damus_state.nostrNetwork.reader.advancedStream(filters: dms_filters + contacts_filters, streamMode: .ndbAndNetworkParallel(optimizeNetworkFilter: true)) { switch item { case .event(let lender): await lender.justUseACopy({ await process_event(ev: $0, context: .other) }) @@ -602,7 +602,7 @@ class HomeModel: ContactsDelegate, ObservableObject { DispatchQueue.main.async { self.loading = true } - for await item in damus_state.nostrNetwork.reader.advancedStream(filters: home_filters, id: id) { + for await item in damus_state.nostrNetwork.reader.advancedStream(filters: home_filters, streamMode: .ndbAndNetworkParallel(optimizeNetworkFilter: true), id: id) { switch item { case .event(let lender): let currentTime = CFAbsoluteTimeGetCurrent() diff --git a/damus/Shared/Utilities/StreamPipelineDiagnostics.swift b/damus/Shared/Utilities/StreamPipelineDiagnostics.swift new file mode 100644 index 000000000..7e64651f3 --- /dev/null +++ b/damus/Shared/Utilities/StreamPipelineDiagnostics.swift @@ -0,0 +1,27 @@ +// +// StreamPipelineDiagnostics.swift +// damus +// +// Created by Daniel D’Aquino on 2025-10-15. +// +import Foundation + +let ENABLE_PIPELINE_DIAGNOSTICS = false + +fileprivate func getTimestamp() -> String { + let d = Date() + let df = DateFormatter() + df.dateFormat = "y-MM-dd H:mm:ss.SSSS" + + return df.string(from: d) +} + +/// Logs stream pipeline data in CSV format that can later be used for plotting and analysis +/// See `devtools/visualize_stream_pipeline.py` +/// +/// Implementation note: This function is inlined for performance purposes. +@inline(__always) func logStreamPipelineStats(_ sourceNode: String, _ destinationNode: String) { + if ENABLE_PIPELINE_DIAGNOSTICS { + print("STREAM_PIPELINE: \(getTimestamp()),\(sourceNode),\(destinationNode)") + } +} diff --git a/devtools/visualize_stream_pipeline.py b/devtools/visualize_stream_pipeline.py new file mode 100644 index 000000000..d260d752e --- /dev/null +++ b/devtools/visualize_stream_pipeline.py @@ -0,0 +1,475 @@ +#!/usr/bin/env python3 +"""Generate interactive Sankey diagram from network CSV data using Plotly.""" + +from __future__ import annotations + +import argparse +import csv +from datetime import datetime +from pathlib import Path +from collections import defaultdict +from typing import Dict, List, Tuple, Optional + +import plotly.graph_objects as go +import plotly.express as px + + +def parse_timestamp(timestamp_str: str) -> float: + """Parse timestamp string and return as milliseconds since epoch.""" + # Strip whitespace + timestamp_str = timestamp_str.strip() + + # Remove any prefix (e.g., "STREAM_PIPELINE: ") + if ": " in timestamp_str: + timestamp_str = timestamp_str.split(": ", 1)[1] + + try: + # Try parsing as ISO format with milliseconds + dt = datetime.fromisoformat(timestamp_str) + return dt.timestamp() * 1000 + except ValueError: + try: + # Try replacing space with 'T' for ISO format (e.g., "2025-10-13 15:36:46.3650") + if " " in timestamp_str and "-" in timestamp_str: + timestamp_str = timestamp_str.replace(" ", "T") + dt = datetime.fromisoformat(timestamp_str) + return dt.timestamp() * 1000 + raise ValueError() + except ValueError: + try: + # Try parsing as float (milliseconds) + return float(timestamp_str) + except ValueError: + raise ValueError(f"Could not parse timestamp: {timestamp_str}") + + +def load_network_data(csv_file: str, start_time: Optional[str] = None, + end_time: Optional[str] = None) -> Dict[Tuple[str, str], int]: + """ + Load network data from CSV and aggregate edge counts. + + Args: + csv_file: Path to CSV file + start_time: Optional start time filter (ISO format) + end_time: Optional end time filter (ISO format) + + Returns: + Dictionary mapping (source, destination) tuples to counts + """ + edge_counts = defaultdict(int) + timestamps = [] + + # Parse time filters if provided + start_ts = parse_timestamp(start_time) if start_time else None + end_ts = parse_timestamp(end_time) if end_time else None + + with open(csv_file, 'r') as f: + reader = csv.reader(f) + + # Skip header if present + first_row = next(reader, None) + if first_row is None: + print("Empty CSV file") + return edge_counts + + # Check if first row is a header + try: + parse_timestamp(first_row[0]) + rows = [first_row] # First row is data + except (ValueError, IndexError): + rows = [] # First row is header, skip it + + # Add remaining rows + rows.extend(reader) + + for row_idx, row in enumerate(rows): + if len(row) < 3: + print(f"Skipping invalid row {row_idx + 1}: {row}") + continue + + try: + timestamp_str = row[0] + source = row[1].strip() + destination = row[2].strip() + + # Parse timestamp + timestamp_ms = parse_timestamp(timestamp_str) + + # Apply time filters + if start_ts and timestamp_ms < start_ts: + continue + if end_ts and timestamp_ms > end_ts: + continue + + timestamps.append(timestamp_ms) + edge_counts[(source, destination)] += 1 + + except (ValueError, IndexError) as e: + print(f"Error processing row {row_idx + 1}: {e}") + continue + + if timestamps: + start_dt = datetime.fromtimestamp(min(timestamps) / 1000.0) + end_dt = datetime.fromtimestamp(max(timestamps) / 1000.0) + print(f"\nLoaded {sum(edge_counts.values())} events") + print(f"Time range: {start_dt} to {end_dt}") + print(f"Unique edges: {len(edge_counts)}") + + return edge_counts + + +def filter_top_edges(edge_counts: Dict[Tuple[str, str], int], + top_n: Optional[int] = None) -> Dict[Tuple[str, str], int]: + """Filter to keep only top N most active edges.""" + if top_n is None or top_n <= 0: + return edge_counts + + # Sort by count and take top N + sorted_edges = sorted(edge_counts.items(), key=lambda x: x[1], reverse=True) + return dict(sorted_edges[:top_n]) + + +def filter_top_nodes(edge_counts: Dict[Tuple[str, str], int], + top_n: Optional[int] = None) -> Dict[Tuple[str, str], int]: + """Filter to keep only edges involving top N most active nodes.""" + if top_n is None or top_n <= 0: + return edge_counts + + # Calculate node activity (both as source and destination) + node_activity = defaultdict(int) + for (source, dest), count in edge_counts.items(): + node_activity[source] += count + node_activity[dest] += count + + # Get top N nodes + top_nodes = set(sorted(node_activity.items(), key=lambda x: x[1], reverse=True)[:top_n]) + top_nodes = {node for node, _ in top_nodes} + + # Filter edges to only include top nodes + filtered = {} + for (source, dest), count in edge_counts.items(): + if source in top_nodes and dest in top_nodes: + filtered[(source, dest)] = count + + return filtered + + +def create_sankey_diagram(edge_counts: Dict[Tuple[str, str], int], + title: str = "Network Flow Sankey Diagram", + color_scheme: str = "Viridis", + show_values: bool = True) -> go.Figure: + """ + Create an interactive Sankey diagram from edge counts. + + Args: + edge_counts: Dictionary mapping (source, destination) to flow count + title: Title for the diagram + color_scheme: Plotly color scheme name + show_values: Whether to show flow values on hover + + Returns: + Plotly Figure object + """ + if not edge_counts: + print("No data to visualize") + return go.Figure() + + # Create node list (unique sources and destinations) + all_nodes = set() + for source, dest in edge_counts.keys(): + all_nodes.add(source) + all_nodes.add(dest) + + # Create node index mapping + node_list = sorted(all_nodes) + node_to_idx = {node: idx for idx, node in enumerate(node_list)} + + # Prepare Sankey data + sources = [] + targets = [] + values = [] + link_colors = [] + + for (source, dest), count in edge_counts.items(): + sources.append(node_to_idx[source]) + targets.append(node_to_idx[dest]) + values.append(count) + + # Calculate node colors based on total flow + node_flow = defaultdict(int) + for (source, dest), count in edge_counts.items(): + node_flow[source] += count + node_flow[dest] += count + + # Get color scale + max_flow = max(node_flow.values()) if node_flow else 1 + colors = px.colors.sample_colorscale( + color_scheme, + [node_flow.get(node, 0) / max_flow for node in node_list] + ) + + # Create link colors (semi-transparent version of source node color) + for source_idx in sources: + color = colors[source_idx] + # Convert to rgba with transparency + if color.startswith('rgb'): + link_colors.append(color.replace('rgb', 'rgba').replace(')', ', 0.4)')) + else: + link_colors.append(color) + + # Create hover text for nodes + node_hover = [] + for node in node_list: + total_flow = node_flow.get(node, 0) + # Calculate in/out flows + inflow = sum(count for (s, d), count in edge_counts.items() if d == node) + outflow = sum(count for (s, d), count in edge_counts.items() if s == node) + hover_text = f"{node}
" + hover_text += f"Total Flow: {total_flow}
" + hover_text += f"Inflow: {inflow}
" + hover_text += f"Outflow: {outflow}" + node_hover.append(hover_text) + + # Create hover text for links + link_hover = [] + for i, ((source, dest), count) in enumerate(edge_counts.items()): + hover_text = f"{source} → {dest}
" + hover_text += f"Flow: {count} events
" + if sum(values) > 0: + percentage = (count / sum(values)) * 100 + hover_text += f"Percentage: {percentage:.1f}%" + link_hover.append(hover_text) + + # Create the Sankey diagram + fig = go.Figure(data=[go.Sankey( + node=dict( + pad=15, + thickness=20, + line=dict(color="black", width=0.5), + label=node_list, + color=colors, + customdata=node_hover, + hovertemplate='%{customdata}' + ), + link=dict( + source=sources, + target=targets, + value=values, + color=link_colors, + customdata=link_hover, + hovertemplate='%{customdata}' + ) + )]) + + # Update layout + fig.update_layout( + title=dict( + text=title, + font=dict(size=20, color='#333') + ), + font=dict(size=12), + plot_bgcolor='white', + paper_bgcolor='white', + height=800, + margin=dict(l=20, r=20, t=80, b=20) + ) + + return fig + + +def print_summary_statistics(edge_counts: Dict[Tuple[str, str], int]) -> None: + """Print summary statistics about the network flows.""" + if not edge_counts: + print("No data to summarize") + return + + print("\n" + "="*70) + print("SANKEY DIAGRAM SUMMARY") + print("="*70) + + # Calculate statistics + total_events = sum(edge_counts.values()) + unique_edges = len(edge_counts) + + all_sources = {source for source, _ in edge_counts.keys()} + all_destinations = {dest for _, dest in edge_counts.keys()} + all_nodes = all_sources | all_destinations + + print(f"\nTotal Events: {total_events}") + print(f"Unique Edges: {unique_edges}") + print(f"Unique Nodes: {len(all_nodes)}") + print(f" - Source nodes: {len(all_sources)}") + print(f" - Destination nodes: {len(all_destinations)}") + + # Node activity + node_activity = defaultdict(lambda: {'in': 0, 'out': 0, 'total': 0}) + for (source, dest), count in edge_counts.items(): + node_activity[source]['out'] += count + node_activity[source]['total'] += count + node_activity[dest]['in'] += count + node_activity[dest]['total'] += count + + print(f"\nTop 10 Most Active Edges:") + sorted_edges = sorted(edge_counts.items(), key=lambda x: x[1], reverse=True) + for i, ((source, dest), count) in enumerate(sorted_edges[:10], 1): + pct = (count / total_events) * 100 + print(f" {i:2d}. {source:<25s} → {dest:<25s} {count:>6d} ({pct:>5.1f}%)") + + print(f"\nTop 10 Most Active Nodes (by total flow):") + sorted_nodes = sorted(node_activity.items(), key=lambda x: x[1]['total'], reverse=True) + for i, (node, flows) in enumerate(sorted_nodes[:10], 1): + print(f" {i:2d}. {node:<30s} Total: {flows['total']:>6d} " + f"(In: {flows['in']:>5d}, Out: {flows['out']:>5d})") + + print("\n" + "="*70 + "\n") + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Generate interactive Sankey diagram from network CSV data.", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +Examples: + # Generate basic Sankey diagram + %(prog)s data.csv + + # Filter to top 20 edges and save to HTML + %(prog)s data.csv --top-edges 20 --output sankey.html + + # Filter to top 15 nodes with custom title + %(prog)s data.csv --top-nodes 15 --title "My Network Flows" + + # Filter by time range + %(prog)s data.csv --start-time "2025-01-13 10:00:00" --end-time "2025-01-13 12:00:00" + + # Combine filters + %(prog)s data.csv --top-nodes 10 --color-scheme Plasma --output flows.html + """ + ) + + parser.add_argument( + "csv_file", + type=str, + help="Path to CSV file with format: timestamp, source_node, destination_node" + ) + + parser.add_argument( + "--output", + type=str, + default=None, + help="Output HTML file path (if not specified, opens in browser)" + ) + + parser.add_argument( + "--top-edges", + type=int, + default=None, + help="Show only top N most active edges (default: all)" + ) + + parser.add_argument( + "--top-nodes", + type=int, + default=None, + help="Show only edges involving top N most active nodes (default: all)" + ) + + parser.add_argument( + "--start-time", + type=str, + default=None, + help="Start time filter (ISO format, e.g., '2025-01-13 10:00:00')" + ) + + parser.add_argument( + "--end-time", + type=str, + default=None, + help="End time filter (ISO format, e.g., '2025-01-13 12:00:00')" + ) + + parser.add_argument( + "--title", + type=str, + default="Network Flow Sankey Diagram", + help="Title for the diagram (default: 'Network Flow Sankey Diagram')" + ) + + parser.add_argument( + "--color-scheme", + type=str, + default="Viridis", + choices=["Viridis", "Plasma", "Inferno", "Magma", "Cividis", "Turbo", + "Blues", "Greens", "Reds", "Purples", "Rainbow"], + help="Color scheme for nodes (default: Viridis)" + ) + + parser.add_argument( + "--no-summary", + action="store_true", + help="Skip printing summary statistics" + ) + + parser.add_argument( + "--auto-open", + action="store_true", + help="Automatically open in browser (default: True if no output file specified)" + ) + + args = parser.parse_args() + + # Check if CSV file exists + csv_path = Path(args.csv_file) + if not csv_path.exists(): + print(f"Error: CSV file not found: {args.csv_file}") + return + + # Load data + print(f"Loading data from {args.csv_file}...") + edge_counts = load_network_data(args.csv_file, args.start_time, args.end_time) + + if not edge_counts: + print("No data to visualize!") + return + + # Apply filters + if args.top_edges: + print(f"Filtering to top {args.top_edges} edges...") + edge_counts = filter_top_edges(edge_counts, args.top_edges) + + if args.top_nodes: + print(f"Filtering to edges involving top {args.top_nodes} nodes...") + edge_counts = filter_top_nodes(edge_counts, args.top_nodes) + + # Print summary statistics + if not args.no_summary: + print_summary_statistics(edge_counts) + + # Create Sankey diagram + print("Generating Sankey diagram...") + fig = create_sankey_diagram( + edge_counts, + title=args.title, + color_scheme=args.color_scheme + ) + + # Save or show + if args.output: + output_path = Path(args.output) + output_path.parent.mkdir(parents=True, exist_ok=True) + fig.write_html(str(output_path)) + print(f"\nSaved Sankey diagram to: {output_path}") + print(f"Open the file in a web browser to view the interactive diagram.") + + if args.auto_open: + import webbrowser + webbrowser.open(f"file://{output_path.absolute()}") + else: + print("\nOpening Sankey diagram in browser...") + fig.show() + + print("\nDone!") + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/shell.nix b/shell.nix index a53606424..ae2425cde 100644 --- a/shell.nix +++ b/shell.nix @@ -1,5 +1,5 @@ { pkgs ? import {} }: with pkgs; mkShell { - buildInputs = with python3Packages; [ Mako requests wabt todo-txt-cli pyyaml ]; + buildInputs = with python3Packages; [ Mako requests wabt todo-txt-cli pyyaml plotly numpy ]; }