notedeck

One damus client to rule them all
git clone git://jb55.com/notedeck
Log | Files | Refs | README | LICENSE

commit cfa56ab056e1af20e6211dec880f64dc4d71b423
parent 06e71271861e37d2057e415134ce61baa8f44d63
Author: William Casarin <jb55@jb55.com>
Date:   Fri, 27 Feb 2026 08:24:26 -0800

fix: remove sync NDB queries that defeated async timeline loading

The outbox merge added setup_selected_account_timeline_subs() to the
Initializing path, which ran synchronous ndb.query() + insert_new()
on the UI thread for every timeline—exactly the blocking work the
async TimelineLoader was created to avoid.

Strip the queries from setup_initial_timeline, keeping only the local
NDB subscription setup (try_add_local) needed by poll_notes_into_view.
The heavy note-loading stays with the async loader.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

Diffstat:
Mcrates/notedeck_columns/src/app.rs | 2--
Mcrates/notedeck_columns/src/timeline/mod.rs | 77+++++++----------------------------------------------------------------------
Mcrates/notedeck_columns/src/ui/add_column.rs | 4----
3 files changed, 7 insertions(+), 76 deletions(-)

diff --git a/crates/notedeck_columns/src/app.rs b/crates/notedeck_columns/src/app.rs @@ -373,9 +373,7 @@ pub(crate) fn setup_selected_account_timeline_subs( ) { if let Err(err) = timeline::setup_initial_nostrdb_subs( app_ctx.ndb, - app_ctx.note_cache, timeline_cache, - app_ctx.unknown_ids, *app_ctx.accounts.selected_account_pubkey(), ) { warn!("update_damus init: {err}"); diff --git a/crates/notedeck_columns/src/timeline/mod.rs b/crates/notedeck_columns/src/timeline/mod.rs @@ -673,24 +673,19 @@ pub fn merge_sorted_vecs<T: Ord + Copy>(vec1: &[T], vec2: &[T]) -> (Vec<T>, Merg /// /// We do this by maintaining this sub_id in the filter state, even when /// in the ready state. See: [`FilterReady`] -#[allow(clippy::too_many_arguments)] pub fn setup_new_timeline( timeline: &mut Timeline, ndb: &Ndb, txn: &Transaction, scoped_subs: &mut ScopedSubApi<'_, '_>, - note_cache: &mut NoteCache, since_optimize: bool, accounts: &Accounts, - unknown_ids: &mut UnknownIds, ) { let account_pk = *accounts.selected_account_pubkey(); // if we're ready, setup local subs if is_timeline_ready(ndb, scoped_subs, timeline, accounts) { - if let Err(err) = - setup_timeline_nostrdb_sub(ndb, txn, note_cache, timeline, unknown_ids, account_pk) - { + if let Err(err) = setup_initial_timeline(ndb, timeline, account_pk) { error!("setup_new_timeline: {err}"); } } @@ -812,71 +807,29 @@ pub fn fetch_people_list(ndb: &Ndb, txn: &Transaction, timeline: &mut Timeline) timeline.filter = FilterState::GotRemote; } +/// Set up the local NDB subscription for a timeline without running +/// blocking queries. The actual note loading is handled by the async +/// timeline loader. #[profiling::function] -fn setup_initial_timeline( - ndb: &Ndb, - txn: &Transaction, - timeline: &mut Timeline, - note_cache: &mut NoteCache, - unknown_ids: &mut UnknownIds, - account_pk: Pubkey, -) -> Result<()> { +fn setup_initial_timeline(ndb: &Ndb, timeline: &mut Timeline, account_pk: Pubkey) -> Result<()> { let FilterState::Ready(filters) = &timeline.filter else { return Err(Error::App(notedeck::Error::empty_contact_list())); }; - // some timelines are one-shot and a refreshed, like last_per_pubkey algo feed + // some timelines are one-shot and refreshed, like last_per_pubkey algo feed if timeline.kind.should_subscribe_locally() { timeline .subscription .try_add_local(account_pk, ndb, filters); } - debug!( - "querying nostrdb sub {:?} {:?}", - timeline.subscription, timeline.filter - ); - - let notes = { - let mut notes = Vec::new(); - - for package in filters.local().packages { - let mut lim = 0i32; - for filter in package.filters { - lim += filter.limit().unwrap_or(1) as i32; - } - - debug!("setup_initial_timeline: limit for local filter is {}", lim); - - let cur_notes: Vec<NoteRef> = ndb - .query(txn, package.filters, lim)? - .into_iter() - .map(NoteRef::from_query_result) - .collect(); - tracing::debug!( - "Found {} notes for kind: {:?}", - cur_notes.len(), - package.kind - ); - notes.extend(&cur_notes); - } - - notes - }; - - if let Some(pks) = timeline.insert_new(txn, ndb, note_cache, &notes) { - pks.process(ndb, txn, unknown_ids); - } - Ok(()) } #[profiling::function] pub fn setup_initial_nostrdb_subs( ndb: &Ndb, - note_cache: &mut NoteCache, timeline_cache: &mut TimelineCache, - unknown_ids: &mut UnknownIds, account_pk: Pubkey, ) -> Result<()> { for (_kind, timeline) in timeline_cache { @@ -884,10 +837,7 @@ pub fn setup_initial_nostrdb_subs( continue; } - let txn = Transaction::new(ndb).expect("txn"); - if let Err(err) = - setup_timeline_nostrdb_sub(ndb, &txn, note_cache, timeline, unknown_ids, account_pk) - { + if let Err(err) = setup_initial_timeline(ndb, timeline, account_pk) { error!("setup_initial_nostrdb_subs: {err}"); } } @@ -895,19 +845,6 @@ pub fn setup_initial_nostrdb_subs( Ok(()) } -fn setup_timeline_nostrdb_sub( - ndb: &Ndb, - txn: &Transaction, - note_cache: &mut NoteCache, - timeline: &mut Timeline, - unknown_ids: &mut UnknownIds, - account_pk: Pubkey, -) -> Result<()> { - setup_initial_timeline(ndb, txn, timeline, note_cache, unknown_ids, account_pk)?; - - Ok(()) -} - /// Check our timeline filter and see if we have any filter data ready. /// Our timelines may require additional data before it is functional. For /// example, when we have to fetch a contact list before we do the actual diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs @@ -917,10 +917,8 @@ fn attach_timeline_column( ctx.ndb, &txn, &mut scoped_subs, - ctx.note_cache, app.options.contains(AppOptions::SinceOptimize), ctx.accounts, - ctx.unknown_ids, ); let route_kind = timeline.kind.clone(); @@ -1141,10 +1139,8 @@ fn handle_create_people_list(app: &mut Damus, ctx: &mut AppContext<'_>, col: usi ctx.ndb, &txn, &mut scoped_subs, - ctx.note_cache, app.options.contains(AppOptions::SinceOptimize), ctx.accounts, - ctx.unknown_ids, ); app.columns_mut(ctx.i18n, ctx.accounts)