commit fd030f5b5ce7d5c641b764741d22d3a89c16809d
parent ae85f2dd3415ed4427b9ac0fe396f6db72b1d431
Author: William Casarin <jb55@jb55.com>
Date: Wed, 5 Feb 2025 18:45:22 -0800
Merge rewrite deck serialization, timeline cache, add algo timelines #712
William Casarin (19):
algos: introduce last_n_per_pubkey_from_tags
wip algo timelines
Initial token parser combinator
token_parser: unify parsing and serialization
token_serializer: introduce TokenWriter
token_parser: simplify AddColumnRoute serialization
tokens: add a more advanced tokens parser
tokens: add AccountsRoute token serializer
tokens: add PubkeySource and ListKinds token serializer
tokens: add TimelineRoute token serializer
tokens: initial Route token serializer
add tokenator crate
note_id: add hex helpers for root notes
tokens: add token serialization for AlgoTimeline
tokens: add token serialization for TimelineKind
tokens: switch over to using token serialization
Switch to unified timeline cache via TimelineKinds
hashtags: click hashtags to open them
Diffstat:
46 files changed, 2249 insertions(+), 2302 deletions(-)
diff --git a/.gitignore b/.gitignore
@@ -1,6 +1,8 @@
.buildcmd
build.log
perf.data
+rusty-tags.vi
+notedeck-settings
perf.data.old
crates/notedeck_chrome/android/app/build
.privenv
diff --git a/Cargo.lock b/Cargo.lock
@@ -2828,6 +2828,7 @@ dependencies = [
"strum_macros",
"tempfile",
"thiserror 2.0.7",
+ "tokenator",
"tokio",
"tracing",
"tracing-appender",
@@ -4528,6 +4529,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
+name = "tokenator"
+version = "0.1.0"
+dependencies = [
+ "hex",
+]
+
+[[package]]
name = "tokio"
version = "1.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
@@ -6,7 +6,7 @@ members = [
"crates/notedeck_chrome",
"crates/notedeck_columns",
- "crates/enostr",
+ "crates/enostr", "crates/tokenator",
]
[workspace.dependencies]
@@ -35,6 +35,7 @@ nostrdb = { git = "https://github.com/damus-io/nostrdb-rs", rev = "2111948b078b2
notedeck = { path = "crates/notedeck" }
notedeck_chrome = { path = "crates/notedeck_chrome" }
notedeck_columns = { path = "crates/notedeck_columns" }
+tokenator = { path = "crates/tokenator" }
open = "5.3.0"
poll-promise = { version = "0.3.0", features = ["tokio"] }
puffin = { git = "https://github.com/jb55/puffin", package = "puffin", rev = "70ff86d5503815219b01a009afd3669b7903a057" }
diff --git a/Makefile b/Makefile
@@ -7,7 +7,7 @@ check:
cargo check
tags: fake
- find . -type d -name target -prune -o -type f -name '*.rs' -print | xargs ctags
+ rusty-tags vi
jni: fake
cargo ndk --target arm64-v8a -o $(ANDROID_DIR)/app/src/main/jniLibs/ build --profile release
diff --git a/crates/enostr/src/note.rs b/crates/enostr/src/note.rs
@@ -9,7 +9,7 @@ pub struct NoteId([u8; 32]);
impl fmt::Debug for NoteId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "{}", self.hex())
+ write!(f, "NoteId({})", self.hex())
}
}
diff --git a/crates/notedeck/src/accounts.rs b/crates/notedeck/src/accounts.rs
@@ -414,6 +414,12 @@ impl Accounts {
.or_else(|| self.accounts.iter().find_map(|a| a.to_full()))
}
+ /// Get the selected account's pubkey as bytes. Common operation so
+ /// we make it a helper here.
+ pub fn selected_account_pubkey_bytes(&self) -> Option<&[u8; 32]> {
+ self.get_selected_account().map(|kp| kp.pubkey.bytes())
+ }
+
pub fn get_selected_account(&self) -> Option<&UserAccount> {
if let Some(account_index) = self.currently_selected_account {
if let Some(account) = self.get_account(account_index) {
diff --git a/crates/notedeck/src/error.rs b/crates/notedeck/src/error.rs
@@ -35,6 +35,9 @@ impl From<String> for Error {
pub enum FilterError {
#[error("empty contact list")]
EmptyContactList,
+
+ #[error("filter not ready")]
+ FilterNotReady,
}
#[derive(Debug, Eq, PartialEq, Copy, Clone, thiserror::Error)]
diff --git a/crates/notedeck/src/filter.rs b/crates/notedeck/src/filter.rs
@@ -1,6 +1,5 @@
use crate::error::{Error, FilterError};
use crate::note::NoteRef;
-use crate::Result;
use nostrdb::{Filter, FilterBuilder, Note, Subscription};
use std::collections::HashMap;
use tracing::{debug, warn};
@@ -24,7 +23,7 @@ pub struct FilterStates {
}
impl FilterStates {
- pub fn get(&mut self, relay: &str) -> &FilterState {
+ pub fn get_mut(&mut self, relay: &str) -> &FilterState {
// if our initial state is ready, then just use that
if let FilterState::Ready(_) = self.initial_state {
&self.initial_state
@@ -190,13 +189,67 @@ impl FilteredTags {
}
}
+/// Create a "last N notes per pubkey" query.
+pub fn last_n_per_pubkey_from_tags(
+ note: &Note,
+ kind: u64,
+ notes_per_pubkey: u64,
+) -> Result<Vec<Filter>, Error> {
+ let mut filters: Vec<Filter> = vec![];
+
+ for tag in note.tags() {
+ // TODO: fix arbitrary MAX_FILTER limit in nostrdb
+ if filters.len() == 15 {
+ break;
+ }
+
+ if tag.count() < 2 {
+ continue;
+ }
+
+ let t = if let Some(t) = tag.get_unchecked(0).variant().str() {
+ t
+ } else {
+ continue;
+ };
+
+ if t == "p" {
+ let author = if let Some(author) = tag.get_unchecked(1).variant().id() {
+ author
+ } else {
+ continue;
+ };
+
+ let mut filter = Filter::new();
+ filter.start_authors_field()?;
+ filter.add_id_element(author)?;
+ filter.end_field();
+ filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build());
+ } else if t == "t" {
+ let hashtag = if let Some(hashtag) = tag.get_unchecked(1).variant().str() {
+ hashtag
+ } else {
+ continue;
+ };
+
+ let mut filter = Filter::new();
+ filter.start_tags_field('t')?;
+ filter.add_str_element(hashtag)?;
+ filter.end_field();
+ filters.push(filter.kinds([kind]).limit(notes_per_pubkey).build());
+ }
+ }
+
+ Ok(filters)
+}
+
/// Create a filter from tags. This can be used to create a filter
/// from a contact list
pub fn filter_from_tags(
note: &Note,
add_pubkey: Option<&[u8; 32]>,
with_hashtags: bool,
-) -> Result<FilteredTags> {
+) -> Result<FilteredTags, Error> {
let mut author_filter = Filter::new();
let mut hashtag_filter = Filter::new();
let mut author_res: Option<FilterBuilder> = None;
@@ -284,3 +337,11 @@ pub fn filter_from_tags(
hashtags: hashtag_res,
})
}
+
+pub fn make_filters_since(raw: &[Filter], since: u64) -> Vec<Filter> {
+ let mut filters = Vec::with_capacity(raw.len());
+ for builder in raw {
+ filters.push(Filter::copy_from(builder).since(since).build());
+ }
+ filters
+}
diff --git a/crates/notedeck/src/note.rs b/crates/notedeck/src/note.rs
@@ -3,6 +3,7 @@ use enostr::NoteId;
use nostrdb::{Ndb, Note, NoteKey, QueryResult, Transaction};
use std::borrow::Borrow;
use std::cmp::Ordering;
+use std::fmt;
#[derive(Debug, Eq, PartialEq, Copy, Clone, Hash)]
pub struct NoteRef {
@@ -10,9 +11,15 @@ pub struct NoteRef {
pub created_at: u64,
}
-#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
+#[derive(Clone, Copy, Eq, PartialEq, Hash)]
pub struct RootNoteIdBuf([u8; 32]);
+impl fmt::Debug for RootNoteIdBuf {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "RootNoteIdBuf({})", self.hex())
+ }
+}
+
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
pub struct RootNoteId<'a>(&'a [u8; 32]);
@@ -34,6 +41,10 @@ impl RootNoteIdBuf {
root_note_id_from_selected_id(ndb, note_cache, txn, id).map(|rnid| Self(*rnid.bytes()))
}
+ pub fn hex(&self) -> String {
+ hex::encode(self.bytes())
+ }
+
pub fn new_unsafe(id: [u8; 32]) -> Self {
Self(id)
}
@@ -52,6 +63,10 @@ impl<'a> RootNoteId<'a> {
self.0
}
+ pub fn hex(&self) -> String {
+ hex::encode(self.bytes())
+ }
+
pub fn to_owned(&self) -> RootNoteIdBuf {
RootNoteIdBuf::new_unsafe(*self.bytes())
}
diff --git a/crates/notedeck_chrome/src/notedeck.rs b/crates/notedeck_chrome/src/notedeck.rs
@@ -183,21 +183,20 @@ mod tests {
.column(0)
.router()
.top()
- .timeline_id();
+ .timeline_id()
+ .unwrap();
let tl2 = app
.columns(app_ctx.accounts)
.column(1)
.router()
.top()
- .timeline_id();
+ .timeline_id()
+ .unwrap();
- assert_eq!(tl1.is_some(), true);
- assert_eq!(tl2.is_some(), true);
-
- let timelines = app.columns(app_ctx.accounts).timelines();
- assert!(timelines[0].kind.is_notifications());
- assert!(timelines[1].kind.is_contacts());
+ let timelines = app.timeline_cache.timelines.len() == 2;
+ assert!(app.timeline_cache.timelines.get(&tl1).is_some());
+ assert!(app.timeline_cache.timelines.get(&tl2).is_some());
rmrf(tmpdir);
}
diff --git a/crates/notedeck_columns/Cargo.toml b/crates/notedeck_columns/Cargo.toml
@@ -12,6 +12,7 @@ crate-type = ["lib", "cdylib"]
[dependencies]
notedeck = { workspace = true }
+tokenator = { workspace = true }
bitflags = { workspace = true }
dirs = { workspace = true }
eframe = { workspace = true }
diff --git a/crates/notedeck_columns/src/accounts/route.rs b/crates/notedeck_columns/src/accounts/route.rs
@@ -1,5 +1,6 @@
use super::{AccountLoginResponse, AccountsViewResponse};
use serde::{Deserialize, Serialize};
+use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
pub enum AccountsRouteResponse {
Accounts(AccountsViewResponse),
@@ -11,3 +12,77 @@ pub enum AccountsRoute {
Accounts,
AddAccount,
}
+
+impl AccountsRoute {
+ /// Route tokens use in both serialization and deserialization
+ fn tokens(&self) -> &'static [&'static str] {
+ match self {
+ Self::Accounts => &["accounts", "show"],
+ Self::AddAccount => &["accounts", "new"],
+ }
+ }
+}
+
+impl TokenSerializable for AccountsRoute {
+ fn serialize_tokens(&self, writer: &mut TokenWriter) {
+ for token in self.tokens() {
+ writer.write_token(token);
+ }
+ }
+
+ fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
+ parser.peek_parse_token("accounts")?;
+
+ TokenParser::alt(
+ parser,
+ &[
+ |p| parse_accounts_route(p, AccountsRoute::Accounts),
+ |p| parse_accounts_route(p, AccountsRoute::AddAccount),
+ ],
+ )
+ }
+}
+
+fn parse_accounts_route<'a>(
+ parser: &mut TokenParser<'a>,
+ route: AccountsRoute,
+) -> Result<AccountsRoute, ParseError<'a>> {
+ parser.parse_all(|p| {
+ for token in route.tokens() {
+ p.parse_token(token)?;
+ }
+ Ok(route)
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use tokenator::{TokenParser, TokenSerializable, TokenWriter};
+
+ #[test]
+ fn test_accounts_route_serialize() {
+ let data_str = "accounts:show";
+ let data = &data_str.split(":").collect::<Vec<&str>>();
+ let mut token_writer = TokenWriter::default();
+ let mut parser = TokenParser::new(&data);
+ let parsed = AccountsRoute::parse_from_tokens(&mut parser).unwrap();
+ let expected = AccountsRoute::Accounts;
+ parsed.serialize_tokens(&mut token_writer);
+ assert_eq!(expected, parsed);
+ assert_eq!(token_writer.str(), data_str);
+ }
+
+ #[test]
+ fn test_new_accounts_route_serialize() {
+ let data_str = "accounts:new";
+ let data = &data_str.split(":").collect::<Vec<&str>>();
+ let mut token_writer = TokenWriter::default();
+ let mut parser = TokenParser::new(data);
+ let parsed = AccountsRoute::parse_from_tokens(&mut parser).unwrap();
+ let expected = AccountsRoute::AddAccount;
+ parsed.serialize_tokens(&mut token_writer);
+ assert_eq!(expected, parsed);
+ assert_eq!(token_writer.str(), data_str);
+ }
+}
diff --git a/crates/notedeck_columns/src/actionbar.rs b/crates/notedeck_columns/src/actionbar.rs
@@ -1,114 +1,50 @@
use crate::{
column::Columns,
route::{Route, Router},
- timeline::{TimelineCache, TimelineCacheKey},
+ timeline::{TimelineCache, TimelineKind},
};
-use enostr::{NoteId, Pubkey, RelayPool};
+use enostr::{NoteId, RelayPool};
use nostrdb::{Ndb, NoteKey, Transaction};
-use notedeck::{note::root_note_id_from_selected_id, NoteCache, RootIdError, UnknownIds};
+use notedeck::{NoteCache, UnknownIds};
use tracing::error;
-#[derive(Debug, Eq, PartialEq, Copy, Clone)]
+#[derive(Debug, Eq, PartialEq, Clone)]
pub enum NoteAction {
Reply(NoteId),
Quote(NoteId),
- OpenThread(NoteId),
- OpenProfile(Pubkey),
+ OpenTimeline(TimelineKind),
}
-pub struct NewNotes<'a> {
- pub id: TimelineCacheKey<'a>,
+pub struct NewNotes {
+ pub id: TimelineKind,
pub notes: Vec<NoteKey>,
}
-pub enum TimelineOpenResult<'a> {
- NewNotes(NewNotes<'a>),
-}
-
-/// open_thread is called when a note is selected and we need to navigate
-/// to a thread It is responsible for managing the subscription and
-/// making sure the thread is up to date. In a sense, it's a model for
-/// the thread view. We don't have a concept of model/view/controller etc
-/// in egui, but this is the closest thing to that.
-#[allow(clippy::too_many_arguments)]
-fn open_thread<'txn>(
- ndb: &Ndb,
- txn: &'txn Transaction,
- router: &mut Router<Route>,
- note_cache: &mut NoteCache,
- pool: &mut RelayPool,
- timeline_cache: &mut TimelineCache,
- selected_note: &'txn [u8; 32],
-) -> Option<TimelineOpenResult<'txn>> {
- router.route_to(Route::thread(NoteId::new(selected_note.to_owned())));
-
- match root_note_id_from_selected_id(ndb, note_cache, txn, selected_note) {
- Ok(root_id) => timeline_cache.open(
- ndb,
- note_cache,
- txn,
- pool,
- TimelineCacheKey::thread(root_id),
- ),
-
- Err(RootIdError::NoteNotFound) => {
- error!(
- "open_thread: note not found: {}",
- hex::encode(selected_note)
- );
- None
- }
-
- Err(RootIdError::NoRootId) => {
- error!(
- "open_thread: note has no root id: {}",
- hex::encode(selected_note)
- );
- None
- }
- }
+pub enum TimelineOpenResult {
+ NewNotes(NewNotes),
}
impl NoteAction {
#[allow(clippy::too_many_arguments)]
- pub fn execute<'txn, 'a>(
- &'a self,
+ pub fn execute(
+ &self,
ndb: &Ndb,
router: &mut Router<Route>,
timeline_cache: &mut TimelineCache,
note_cache: &mut NoteCache,
pool: &mut RelayPool,
- txn: &'txn Transaction,
- ) -> Option<TimelineOpenResult<'txn>>
- where
- 'a: 'txn,
- {
+ txn: &Transaction,
+ ) -> Option<TimelineOpenResult> {
match self {
NoteAction::Reply(note_id) => {
router.route_to(Route::reply(*note_id));
None
}
- NoteAction::OpenThread(note_id) => open_thread(
- ndb,
- txn,
- router,
- note_cache,
- pool,
- timeline_cache,
- note_id.bytes(),
- ),
-
- NoteAction::OpenProfile(pubkey) => {
- router.route_to(Route::profile(*pubkey));
- timeline_cache.open(
- ndb,
- note_cache,
- txn,
- pool,
- TimelineCacheKey::profile(pubkey.as_ref()),
- )
+ NoteAction::OpenTimeline(kind) => {
+ router.route_to(Route::Timeline(kind.to_owned()));
+ timeline_cache.open(ndb, note_cache, txn, pool, kind)
}
NoteAction::Quote(note_id) => {
@@ -121,7 +57,7 @@ impl NoteAction {
/// Execute the NoteAction and process the TimelineOpenResult
#[allow(clippy::too_many_arguments)]
pub fn execute_and_process_result(
- self,
+ &self,
ndb: &Ndb,
columns: &mut Columns,
col: usize,
@@ -138,8 +74,8 @@ impl NoteAction {
}
}
-impl<'a> TimelineOpenResult<'a> {
- pub fn new_notes(notes: Vec<NoteKey>, id: TimelineCacheKey<'a>) -> Self {
+impl TimelineOpenResult {
+ pub fn new_notes(notes: Vec<NoteKey>, id: TimelineKind) -> Self {
Self::NewNotes(NewNotes::new(notes, id))
}
@@ -160,8 +96,8 @@ impl<'a> TimelineOpenResult<'a> {
}
}
-impl<'a> NewNotes<'a> {
- pub fn new(notes: Vec<NoteKey>, id: TimelineCacheKey<'a>) -> Self {
+impl NewNotes {
+ pub fn new(notes: Vec<NoteKey>, id: TimelineKind) -> Self {
NewNotes { notes, id }
}
@@ -175,46 +111,18 @@ impl<'a> NewNotes<'a> {
unknown_ids: &mut UnknownIds,
note_cache: &mut NoteCache,
) {
- match self.id {
- TimelineCacheKey::Profile(pubkey) => {
- let profile = if let Some(profile) = timeline_cache.profiles.get_mut(pubkey.bytes())
- {
- profile
- } else {
- return;
- };
-
- let reversed = false;
-
- if let Err(err) = profile.timeline.insert(
- &self.notes,
- ndb,
- txn,
- unknown_ids,
- note_cache,
- reversed,
- ) {
- error!("error inserting notes into profile timeline: {err}")
- }
- }
-
- TimelineCacheKey::Thread(root_id) => {
- // threads are chronological, ie reversed from reverse-chronological, the default.
- let reversed = true;
- let thread = if let Some(thread) = timeline_cache.threads.get_mut(root_id.bytes()) {
- thread
- } else {
- return;
- };
-
- if let Err(err) =
- thread
- .timeline
- .insert(&self.notes, ndb, txn, unknown_ids, note_cache, reversed)
- {
- error!("error inserting notes into thread timeline: {err}")
- }
- }
+ let reversed = matches!(&self.id, TimelineKind::Thread(_));
+
+ let timeline = if let Some(profile) = timeline_cache.timelines.get_mut(&self.id) {
+ profile
+ } else {
+ error!("NewNotes: could not get timeline for key {}", self.id);
+ return;
+ };
+
+ if let Err(err) = timeline.insert(&self.notes, ndb, txn, unknown_ids, note_cache, reversed)
+ {
+ error!("error inserting notes into profile timeline: {err}")
}
}
}
diff --git a/crates/notedeck_columns/src/app.rs b/crates/notedeck_columns/src/app.rs
@@ -111,7 +111,7 @@ fn try_process_event(
timeline::send_initial_timeline_filters(
app_ctx.ndb,
damus.since_optimize,
- get_active_columns_mut(app_ctx.accounts, &mut damus.decks_cache),
+ &mut damus.timeline_cache,
&mut damus.subscriptions,
app_ctx.pool,
&ev.relay,
@@ -127,30 +127,16 @@ fn try_process_event(
}
}
- let current_columns = get_active_columns_mut(app_ctx.accounts, &mut damus.decks_cache);
- let n_timelines = current_columns.timelines().len();
- for timeline_ind in 0..n_timelines {
- let is_ready = {
- let timeline = &mut current_columns.timelines[timeline_ind];
- timeline::is_timeline_ready(
- app_ctx.ndb,
- app_ctx.pool,
- app_ctx.note_cache,
- timeline,
- app_ctx
- .accounts
- .get_selected_account()
- .as_ref()
- .map(|sa| &sa.pubkey),
- )
- };
+ for (_kind, timeline) in damus.timeline_cache.timelines.iter_mut() {
+ let is_ready =
+ timeline::is_timeline_ready(app_ctx.ndb, app_ctx.pool, app_ctx.note_cache, timeline);
if is_ready {
let txn = Transaction::new(app_ctx.ndb).expect("txn");
// only thread timelines are reversed
let reversed = false;
- if let Err(err) = current_columns.timelines_mut()[timeline_ind].poll_notes_into_view(
+ if let Err(err) = timeline.poll_notes_into_view(
app_ctx.ndb,
&txn,
app_ctx.unknown_ids,
@@ -193,7 +179,7 @@ fn update_damus(damus: &mut Damus, app_ctx: &mut AppContext<'_>, ctx: &egui::Con
if let Err(err) = timeline::setup_initial_nostrdb_subs(
app_ctx.ndb,
app_ctx.note_cache,
- &mut damus.decks_cache,
+ &mut damus.timeline_cache,
) {
warn!("update_damus init: {err}");
}
@@ -208,15 +194,16 @@ fn update_damus(damus: &mut Damus, app_ctx: &mut AppContext<'_>, ctx: &egui::Con
}
fn handle_eose(
- damus: &mut Damus,
+ subscriptions: &Subscriptions,
+ timeline_cache: &mut TimelineCache,
ctx: &mut AppContext<'_>,
subid: &str,
relay_url: &str,
) -> Result<()> {
- let sub_kind = if let Some(sub_kind) = damus.subscriptions().get(subid) {
+ let sub_kind = if let Some(sub_kind) = subscriptions.subs.get(subid) {
sub_kind
} else {
- let n_subids = damus.subscriptions().len();
+ let n_subids = subscriptions.subs.len();
warn!(
"got unknown eose subid {}, {} tracked subscriptions",
subid, n_subids
@@ -224,7 +211,7 @@ fn handle_eose(
return Ok(());
};
- match *sub_kind {
+ match sub_kind {
SubKind::Timeline(_) => {
// eose on timeline? whatevs
}
@@ -233,7 +220,7 @@ fn handle_eose(
unknowns::update_from_columns(
&txn,
ctx.unknown_ids,
- get_active_columns(ctx.accounts, &damus.decks_cache),
+ timeline_cache,
ctx.ndb,
ctx.note_cache,
);
@@ -250,10 +237,7 @@ fn handle_eose(
}
SubKind::FetchingContactList(timeline_uid) => {
- let timeline = if let Some(tl) =
- get_active_columns_mut(ctx.accounts, &mut damus.decks_cache)
- .find_timeline_mut(timeline_uid)
- {
+ let timeline = if let Some(tl) = timeline_cache.timelines.get_mut(timeline_uid) {
tl
} else {
error!(
@@ -263,7 +247,7 @@ fn handle_eose(
return Ok(());
};
- let filter_state = timeline.filter.get(relay_url);
+ let filter_state = timeline.filter.get_mut(relay_url);
// If this request was fetching a contact list, our filter
// state should be "FetchingRemote". We look at the local
@@ -325,7 +309,13 @@ fn process_message(damus: &mut Damus, ctx: &mut AppContext<'_>, relay: &str, msg
RelayMessage::Notice(msg) => warn!("Notice from {}: {}", relay, msg),
RelayMessage::OK(cr) => info!("OK {:?}", cr),
RelayMessage::Eose(sid) => {
- if let Err(err) = handle_eose(damus, ctx, sid, relay) {
+ if let Err(err) = handle_eose(
+ &damus.subscriptions,
+ &mut damus.timeline_cache,
+ ctx,
+ sid,
+ relay,
+ ) {
error!("error handling eose: {}", err);
}
}
@@ -367,39 +357,58 @@ impl Damus {
pub fn new(ctx: &mut AppContext<'_>, args: &[String]) -> Self {
// arg parsing
- let parsed_args = ColumnsArgs::parse(args);
+ let parsed_args = ColumnsArgs::parse(
+ args,
+ ctx.accounts
+ .get_selected_account()
+ .as_ref()
+ .map(|kp| &kp.pubkey),
+ );
+
let account = ctx
.accounts
.get_selected_account()
.as_ref()
.map(|a| a.pubkey.bytes());
+ let mut timeline_cache = TimelineCache::default();
let tmp_columns = !parsed_args.columns.is_empty();
let decks_cache = if tmp_columns {
info!("DecksCache: loading from command line arguments");
let mut columns: Columns = Columns::new();
+ let txn = Transaction::new(ctx.ndb).unwrap();
for col in parsed_args.columns {
- if let Some(timeline) = col.into_timeline(ctx.ndb, account) {
- columns.add_new_timeline_column(timeline);
+ let timeline_kind = col.into_timeline_kind();
+ if let Some(add_result) = columns.add_new_timeline_column(
+ &mut timeline_cache,
+ &txn,
+ ctx.ndb,
+ ctx.note_cache,
+ ctx.pool,
+ &timeline_kind,
+ ) {
+ add_result.process(
+ ctx.ndb,
+ ctx.note_cache,
+ &txn,
+ &mut timeline_cache,
+ ctx.unknown_ids,
+ );
}
}
columns_to_decks_cache(columns, account)
- } else if let Some(decks_cache) = crate::storage::load_decks_cache(ctx.path, ctx.ndb) {
+ } else if let Some(decks_cache) =
+ crate::storage::load_decks_cache(ctx.path, ctx.ndb, &mut timeline_cache)
+ {
info!(
"DecksCache: loading from disk {}",
crate::storage::DECKS_CACHE_FILE
);
decks_cache
- } else if let Some(cols) = storage::deserialize_columns(ctx.path, ctx.ndb, account) {
- info!(
- "DecksCache: loading from disk at depreciated location {}",
- crate::storage::COLUMNS_FILE
- );
- columns_to_decks_cache(cols, account)
} else {
info!("DecksCache: creating new with demo configuration");
- let mut cache = DecksCache::new_with_demo_config(ctx.ndb);
+ let mut cache = DecksCache::new_with_demo_config(&mut timeline_cache, ctx);
for account in ctx.accounts.get_accounts() {
cache.add_deck_default(account.pubkey);
}
@@ -414,7 +423,7 @@ impl Damus {
Self {
subscriptions: Subscriptions::default(),
since_optimize: parsed_args.since_optimize,
- timeline_cache: TimelineCache::default(),
+ timeline_cache,
drafts: Drafts::default(),
state: DamusState::Initializing,
textmode: parsed_args.textmode,
@@ -565,7 +574,8 @@ fn timelines_view(ui: &mut egui::Ui, sizes: Size, app: &mut Damus, ctx: &mut App
let mut save_cols = false;
if let Some(action) = side_panel_action {
- save_cols = save_cols || action.process(&mut app.decks_cache, ctx);
+ save_cols =
+ save_cols || action.process(&mut app.timeline_cache, &mut app.decks_cache, ctx);
}
let num_cols = app.columns(ctx.accounts).num_columns();
diff --git a/crates/notedeck_columns/src/args.rs b/crates/notedeck_columns/src/args.rs
@@ -1,8 +1,5 @@
-use notedeck::FilterState;
-
-use crate::timeline::{PubkeySource, Timeline, TimelineKind, TimelineTab};
+use crate::timeline::TimelineKind;
use enostr::{Filter, Pubkey};
-use nostrdb::Ndb;
use tracing::{debug, error, info};
pub struct ColumnsArgs {
@@ -12,7 +9,7 @@ pub struct ColumnsArgs {
}
impl ColumnsArgs {
- pub fn parse(args: &[String]) -> Self {
+ pub fn parse(args: &[String], deck_author: Option<&Pubkey>) -> Self {
let mut res = Self {
columns: vec![],
since_optimize: true,
@@ -55,40 +52,48 @@ impl ColumnsArgs {
if let Ok(pubkey) = Pubkey::parse(rest) {
info!("contact column for user {}", pubkey.hex());
res.columns
- .push(ArgColumn::Timeline(TimelineKind::contact_list(
- PubkeySource::Explicit(pubkey),
- )))
+ .push(ArgColumn::Timeline(TimelineKind::contact_list(pubkey)))
} else {
error!("error parsing contacts pubkey {}", rest);
continue;
}
} else if column_name == "contacts" {
- res.columns
- .push(ArgColumn::Timeline(TimelineKind::contact_list(
- PubkeySource::DeckAuthor,
- )))
+ if let Some(deck_author) = deck_author {
+ res.columns
+ .push(ArgColumn::Timeline(TimelineKind::contact_list(
+ deck_author.to_owned(),
+ )))
+ } else {
+ panic!("No accounts available, could not handle implicit pubkey contacts column")
+ }
} else if let Some(notif_pk_str) = column_name.strip_prefix("notifications:") {
if let Ok(pubkey) = Pubkey::parse(notif_pk_str) {
info!("got notifications column for user {}", pubkey.hex());
res.columns
- .push(ArgColumn::Timeline(TimelineKind::notifications(
- PubkeySource::Explicit(pubkey),
- )))
+ .push(ArgColumn::Timeline(TimelineKind::notifications(pubkey)))
} else {
error!("error parsing notifications pubkey {}", notif_pk_str);
continue;
}
} else if column_name == "notifications" {
debug!("got notification column for default user");
- res.columns
- .push(ArgColumn::Timeline(TimelineKind::notifications(
- PubkeySource::DeckAuthor,
- )))
+ if let Some(deck_author) = deck_author {
+ res.columns
+ .push(ArgColumn::Timeline(TimelineKind::notifications(
+ deck_author.to_owned(),
+ )));
+ } else {
+ panic!("Tried to push notifications timeline with no available users");
+ }
} else if column_name == "profile" {
debug!("got profile column for default user");
- res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
- PubkeySource::DeckAuthor,
- )))
+ if let Some(deck_author) = deck_author {
+ res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
+ deck_author.to_owned(),
+ )));
+ } else {
+ panic!("Tried to push profile timeline with no available users");
+ }
} else if column_name == "universe" {
debug!("got universe column");
res.columns
@@ -96,9 +101,8 @@ impl ColumnsArgs {
} else if let Some(profile_pk_str) = column_name.strip_prefix("profile:") {
if let Ok(pubkey) = Pubkey::parse(profile_pk_str) {
info!("got profile column for user {}", pubkey.hex());
- res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
- PubkeySource::Explicit(pubkey),
- )))
+ res.columns
+ .push(ArgColumn::Timeline(TimelineKind::profile(pubkey)))
} else {
error!("error parsing profile pubkey {}", profile_pk_str);
continue;
@@ -146,14 +150,13 @@ pub enum ArgColumn {
}
impl ArgColumn {
- pub fn into_timeline(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Timeline> {
+ pub fn into_timeline_kind(self) -> TimelineKind {
match self {
- ArgColumn::Generic(filters) => Some(Timeline::new(
- TimelineKind::Generic,
- FilterState::ready(filters),
- TimelineTab::full_tabs(),
- )),
- ArgColumn::Timeline(tk) => tk.into_timeline(ndb, user),
+ ArgColumn::Generic(_filters) => {
+ // TODO: fix generic filters by referencing some filter map
+ TimelineKind::Generic(0)
+ }
+ ArgColumn::Timeline(tk) => tk,
}
}
}
diff --git a/crates/notedeck_columns/src/column.rs b/crates/notedeck_columns/src/column.rs
@@ -1,8 +1,12 @@
-use crate::route::{Route, Router};
-use crate::timeline::{Timeline, TimelineId};
-use indexmap::IndexMap;
+use crate::{
+ actionbar::TimelineOpenResult,
+ route::{Route, Router},
+ timeline::{Timeline, TimelineCache, TimelineKind},
+};
+use enostr::RelayPool;
+use nostrdb::{Ndb, Transaction};
+use notedeck::NoteCache;
use std::iter::Iterator;
-use std::sync::atomic::{AtomicU32, Ordering};
use tracing::warn;
#[derive(Clone)]
@@ -28,36 +32,29 @@ impl Column {
#[derive(Default)]
pub struct Columns {
/// Columns are simply routers into settings, timelines, etc
- columns: IndexMap<u32, Column>,
-
- /// Timeline state is not tied to routing logic separately, so that
- /// different columns can navigate to and from settings to timelines,
- /// etc.
- pub timelines: IndexMap<u32, Timeline>,
+ columns: Vec<Column>,
/// The selected column for key navigation
selected: i32,
}
-static UIDS: AtomicU32 = AtomicU32::new(0);
impl Columns {
pub fn new() -> Self {
Columns::default()
}
- pub fn add_new_timeline_column(&mut self, timeline: Timeline) {
- let id = Self::get_new_id();
- let routes = vec![Route::timeline(timeline.id)];
- self.timelines.insert(id, timeline);
- self.columns.insert(id, Column::new(routes));
- }
-
- pub fn add_timeline_to_column(&mut self, col: usize, timeline: Timeline) {
- let col_id = self.get_column_id_at_index(col);
- self.column_mut(col)
- .router_mut()
- .route_to_replaced(Route::timeline(timeline.id));
- self.timelines.insert(col_id, timeline);
+ pub fn add_new_timeline_column(
+ &mut self,
+ timeline_cache: &mut TimelineCache,
+ txn: &Transaction,
+ ndb: &Ndb,
+ note_cache: &mut NoteCache,
+ pool: &mut RelayPool,
+ kind: &TimelineKind,
+ ) -> Option<TimelineOpenResult> {
+ self.columns
+ .push(Column::new(vec![Route::timeline(kind.to_owned())]));
+ timeline_cache.open(ndb, note_cache, txn, pool, kind)
}
pub fn new_column_picker(&mut self) {
@@ -66,38 +63,38 @@ impl Columns {
)]));
}
- pub fn insert_intermediary_routes(&mut self, intermediary_routes: Vec<IntermediaryRoute>) {
- let id = Self::get_new_id();
-
+ pub fn insert_intermediary_routes(
+ &mut self,
+ timeline_cache: &mut TimelineCache,
+ intermediary_routes: Vec<IntermediaryRoute>,
+ ) {
let routes = intermediary_routes
.into_iter()
.map(|r| match r {
IntermediaryRoute::Timeline(timeline) => {
- let route = Route::timeline(timeline.id);
- self.timelines.insert(id, timeline);
+ let route = Route::timeline(timeline.kind.clone());
+ timeline_cache
+ .timelines
+ .insert(timeline.kind.clone(), timeline);
route
}
IntermediaryRoute::Route(route) => route,
})
.collect();
- self.columns.insert(id, Column::new(routes));
- }
-
- fn get_new_id() -> u32 {
- UIDS.fetch_add(1, Ordering::Relaxed)
+ self.columns.push(Column::new(routes));
}
pub fn add_column_at(&mut self, column: Column, index: u32) {
- self.columns.insert(index, column);
+ self.columns.insert(index as usize, column);
}
pub fn add_column(&mut self, column: Column) {
- self.columns.insert(Self::get_new_id(), column);
+ self.columns.push(column);
}
- pub fn columns_mut(&mut self) -> Vec<&mut Column> {
- self.columns.values_mut().collect()
+ pub fn columns_mut(&mut self) -> &mut Vec<Column> {
+ &mut self.columns
}
pub fn num_columns(&self) -> usize {
@@ -110,72 +107,23 @@ impl Columns {
if self.columns.is_empty() {
self.new_column_picker();
}
- self.columns
- .get_index_mut(0)
- .expect("There should be at least one column")
- .1
- .router_mut()
- }
-
- pub fn timeline_mut(&mut self, timeline_ind: usize) -> &mut Timeline {
- self.timelines
- .get_index_mut(timeline_ind)
- .expect("expected index to be in bounds")
- .1
+ self.columns[0].router_mut()
}
pub fn column(&self, ind: usize) -> &Column {
- self.columns
- .get_index(ind)
- .expect("Expected index to be in bounds")
- .1
+ &self.columns[ind]
}
- pub fn columns(&self) -> Vec<&Column> {
- self.columns.values().collect()
- }
-
- pub fn get_column_id_at_index(&self, ind: usize) -> u32 {
- *self
- .columns
- .get_index(ind)
- .expect("expected index to be within bounds")
- .0
+ pub fn columns(&self) -> &[Column] {
+ &self.columns
}
pub fn selected(&mut self) -> &mut Column {
- self.columns
- .get_index_mut(self.selected as usize)
- .expect("Expected selected index to be in bounds")
- .1
- }
-
- pub fn timelines_mut(&mut self) -> Vec<&mut Timeline> {
- self.timelines.values_mut().collect()
- }
-
- pub fn timelines(&self) -> Vec<&Timeline> {
- self.timelines.values().collect()
- }
-
- pub fn find_timeline_mut(&mut self, id: TimelineId) -> Option<&mut Timeline> {
- self.timelines_mut().into_iter().find(|tl| tl.id == id)
- }
-
- pub fn find_timeline(&self, id: TimelineId) -> Option<&Timeline> {
- self.timelines().into_iter().find(|tl| tl.id == id)
+ &mut self.columns[self.selected as usize]
}
pub fn column_mut(&mut self, ind: usize) -> &mut Column {
- self.columns
- .get_index_mut(ind)
- .expect("Expected index to be in bounds")
- .1
- }
-
- pub fn find_timeline_for_column_index(&self, ind: usize) -> Option<&Timeline> {
- let col_id = self.get_column_id_at_index(ind);
- self.timelines.get(&col_id)
+ &mut self.columns[ind]
}
pub fn select_down(&mut self) {
@@ -200,16 +148,22 @@ impl Columns {
self.selected += 1;
}
- pub fn delete_column(&mut self, index: usize) {
- if let Some((key, _)) = self.columns.get_index_mut(index) {
- self.timelines.shift_remove(key);
+ #[must_use = "you must call timeline_cache.pop() for each returned value"]
+ pub fn delete_column(&mut self, index: usize) -> Vec<TimelineKind> {
+ let mut kinds_to_pop: Vec<TimelineKind> = vec![];
+ for route in self.columns[index].router().routes() {
+ if let Route::Timeline(kind) = route {
+ kinds_to_pop.push(kind.clone());
+ }
}
- self.columns.shift_remove_index(index);
+ self.columns.remove(index);
if self.columns.is_empty() {
self.new_column_picker();
}
+
+ kinds_to_pop
}
pub fn move_col(&mut self, from_index: usize, to_index: usize) {
@@ -220,15 +174,7 @@ impl Columns {
return;
}
- if from_index < to_index {
- for i in from_index..to_index {
- self.columns.swap_indices(i, i + 1);
- }
- } else {
- for i in (to_index..from_index).rev() {
- self.columns.swap_indices(i, i + 1);
- }
- }
+ self.columns.swap(from_index, to_index);
}
}
diff --git a/crates/notedeck_columns/src/decks.rs b/crates/notedeck_columns/src/decks.rs
@@ -1,14 +1,15 @@
use std::collections::{hash_map::ValuesMut, HashMap};
use enostr::Pubkey;
-use nostrdb::Ndb;
+use nostrdb::Transaction;
+use notedeck::AppContext;
use tracing::{error, info};
use crate::{
accounts::AccountsRoute,
column::{Column, Columns},
route::Route,
- timeline::{self, Timeline, TimelineKind},
+ timeline::{TimelineCache, TimelineKind},
ui::{add_column::AddColumnRoute, configure_deck::ConfigureDeckResponse},
};
@@ -44,10 +45,13 @@ impl DecksCache {
}
}
- pub fn new_with_demo_config(ndb: &Ndb) -> Self {
+ pub fn new_with_demo_config(timeline_cache: &mut TimelineCache, ctx: &mut AppContext) -> Self {
let mut account_to_decks: HashMap<Pubkey, Decks> = Default::default();
let fallback_pubkey = FALLBACK_PUBKEY();
- account_to_decks.insert(fallback_pubkey, demo_decks(fallback_pubkey, ndb));
+ account_to_decks.insert(
+ fallback_pubkey,
+ demo_decks(fallback_pubkey, timeline_cache, ctx),
+ );
DecksCache::new(account_to_decks)
}
@@ -298,7 +302,11 @@ impl Deck {
}
}
-pub fn demo_decks(demo_pubkey: Pubkey, ndb: &Ndb) -> Decks {
+pub fn demo_decks(
+ demo_pubkey: Pubkey,
+ timeline_cache: &mut TimelineCache,
+ ctx: &mut AppContext,
+) -> Decks {
let deck = {
let mut columns = Columns::default();
columns.add_column(Column::new(vec![
@@ -306,14 +314,27 @@ pub fn demo_decks(demo_pubkey: Pubkey, ndb: &Ndb) -> Decks {
Route::Accounts(AccountsRoute::Accounts),
]));
- if let Some(timeline) =
- TimelineKind::contact_list(timeline::PubkeySource::Explicit(demo_pubkey))
- .into_timeline(ndb, Some(demo_pubkey.bytes()))
- {
- columns.add_new_timeline_column(timeline);
+ let kind = TimelineKind::contact_list(demo_pubkey);
+ let txn = Transaction::new(ctx.ndb).unwrap();
+
+ if let Some(results) = columns.add_new_timeline_column(
+ timeline_cache,
+ &txn,
+ ctx.ndb,
+ ctx.note_cache,
+ ctx.pool,
+ &kind,
+ ) {
+ results.process(
+ ctx.ndb,
+ ctx.note_cache,
+ &txn,
+ timeline_cache,
+ ctx.unknown_ids,
+ );
}
- columns.add_new_timeline_column(Timeline::hashtag("introductions".to_string()));
+ //columns.add_new_timeline_column(Timeline::hashtag("introductions".to_string()));
Deck {
icon: '🇩',
diff --git a/crates/notedeck_columns/src/error.rs b/crates/notedeck_columns/src/error.rs
@@ -5,6 +5,9 @@ pub enum Error {
#[error("timeline not found")]
TimelineNotFound,
+ #[error("timeline is missing a subscription")]
+ MissingSubscription,
+
#[error("load failed")]
LoadFailed,
diff --git a/crates/notedeck_columns/src/lib.rs b/crates/notedeck_columns/src/lib.rs
@@ -29,7 +29,6 @@ mod route;
mod subscriptions;
mod support;
mod test_data;
-mod thread;
mod timeline;
pub mod ui;
mod unknowns;
diff --git a/crates/notedeck_columns/src/multi_subscriber.rs b/crates/notedeck_columns/src/multi_subscriber.rs
@@ -1,107 +1,145 @@
use enostr::{Filter, RelayPool};
-use nostrdb::Ndb;
+use nostrdb::{Ndb, Subscription};
use tracing::{error, info};
use uuid::Uuid;
-use notedeck::UnifiedSubscription;
-
+#[derive(Debug)]
pub struct MultiSubscriber {
- filters: Vec<Filter>,
- pub sub: Option<UnifiedSubscription>,
- subscribers: u32,
+ pub filters: Vec<Filter>,
+ pub local_subid: Option<Subscription>,
+ pub remote_subid: Option<String>,
+ local_subscribers: u32,
+ remote_subscribers: u32,
}
impl MultiSubscriber {
+ /// Create a MultiSubscriber with an initial local subscription.
+ pub fn with_initial_local_sub(sub: Subscription, filters: Vec<Filter>) -> Self {
+ let mut msub = MultiSubscriber::new(filters);
+ msub.local_subid = Some(sub);
+ msub.local_subscribers = 1;
+ msub
+ }
+
pub fn new(filters: Vec<Filter>) -> Self {
Self {
filters,
- sub: None,
- subscribers: 0,
+ local_subid: None,
+ remote_subid: None,
+ local_subscribers: 0,
+ remote_subscribers: 0,
}
}
- fn real_subscribe(
- ndb: &Ndb,
- pool: &mut RelayPool,
- filters: Vec<Filter>,
- ) -> Option<UnifiedSubscription> {
- let subid = Uuid::new_v4().to_string();
- let sub = ndb.subscribe(&filters).ok()?;
+ fn unsubscribe_remote(&mut self, ndb: &Ndb, pool: &mut RelayPool) {
+ let remote_subid = if let Some(remote_subid) = &self.remote_subid {
+ remote_subid
+ } else {
+ self.err_log(ndb, "unsubscribe_remote: nothing to unsubscribe from?");
+ return;
+ };
- pool.subscribe(subid.clone(), filters);
+ pool.unsubscribe(remote_subid.clone());
- Some(UnifiedSubscription {
- local: sub,
- remote: subid,
- })
+ self.remote_subid = None;
}
- pub fn unsubscribe(&mut self, ndb: &mut Ndb, pool: &mut RelayPool) {
- if self.subscribers == 0 {
- error!("No subscribers to unsubscribe from");
+ /// Locally unsubscribe if we have one
+ fn unsubscribe_local(&mut self, ndb: &mut Ndb) {
+ let local_sub = if let Some(local_sub) = self.local_subid {
+ local_sub
+ } else {
+ self.err_log(ndb, "unsubscribe_local: nothing to unsubscribe from?");
return;
- }
+ };
- self.subscribers -= 1;
- if self.subscribers == 0 {
- let sub = match self.sub {
- Some(ref sub) => sub,
- None => {
- error!("No remote subscription to unsubscribe from");
- return;
- }
- };
- let local_sub = &sub.local;
- if let Err(e) = ndb.unsubscribe(*local_sub) {
- error!(
- "failed to unsubscribe from object: {e}, subid:{}, {} active subscriptions",
- local_sub.id(),
- ndb.subscription_count()
- );
- } else {
- info!(
- "Unsubscribed from object subid:{}. {} active subscriptions",
- local_sub.id(),
- ndb.subscription_count()
- );
+ match ndb.unsubscribe(local_sub) {
+ Err(e) => {
+ self.err_log(ndb, &format!("Failed to unsubscribe: {e}"));
}
+ Ok(_) => {
+ self.local_subid = None;
+ }
+ }
+ }
- // unsub from remote
- pool.unsubscribe(sub.remote.clone());
- self.sub = None;
- } else {
- info!(
- "Locally unsubscribing. {} active ndb subscriptions. {} active subscriptions for this object",
- ndb.subscription_count(),
- self.subscribers,
+ pub fn unsubscribe(&mut self, ndb: &mut Ndb, pool: &mut RelayPool) -> bool {
+ if self.local_subscribers == 0 && self.remote_subscribers == 0 {
+ self.err_log(
+ ndb,
+ "Called multi_subscriber unsubscribe when both sub counts are 0",
);
+ return false;
+ }
+
+ self.local_subscribers = self.local_subscribers.saturating_sub(1);
+ self.remote_subscribers = self.remote_subscribers.saturating_sub(1);
+
+ if self.local_subscribers == 0 && self.remote_subscribers == 0 {
+ self.info_log(ndb, "Locally unsubscribing");
+ self.unsubscribe_local(ndb);
+ self.unsubscribe_remote(ndb, pool);
+ self.local_subscribers = 0;
+ self.remote_subscribers = 0;
+ true
+ } else {
+ false
}
}
+ fn info_log(&self, ndb: &Ndb, msg: &str) {
+ info!(
+ "{msg}. {}/{}/{} active ndb/local/remote subscriptions.",
+ ndb.subscription_count(),
+ self.local_subscribers,
+ self.remote_subscribers,
+ );
+ }
+
+ fn err_log(&self, ndb: &Ndb, msg: &str) {
+ error!(
+ "{msg}. {}/{}/{} active ndb/local/remote subscriptions.",
+ ndb.subscription_count(),
+ self.local_subscribers,
+ self.remote_subscribers,
+ );
+ }
+
pub fn subscribe(&mut self, ndb: &Ndb, pool: &mut RelayPool) {
- self.subscribers += 1;
- if self.subscribers == 1 {
- if self.sub.is_some() {
- error!("Object is first subscriber, but it already had remote subscription");
+ self.local_subscribers += 1;
+ self.remote_subscribers += 1;
+
+ if self.remote_subscribers == 1 {
+ if self.remote_subid.is_some() {
+ self.err_log(
+ ndb,
+ "Object is first subscriber, but it already had a subscription",
+ );
return;
+ } else {
+ let subid = Uuid::new_v4().to_string();
+ pool.subscribe(subid.clone(), self.filters.clone());
+ self.info_log(ndb, "First remote subscription");
+ self.remote_subid = Some(subid);
}
+ }
- self.sub = Self::real_subscribe(ndb, pool, self.filters.clone());
- info!(
- "Remotely subscribing to object. {} total active subscriptions, {} on this object",
- ndb.subscription_count(),
- self.subscribers,
- );
+ if self.local_subscribers == 1 {
+ if self.local_subid.is_some() {
+ self.err_log(ndb, "Should not have a local subscription already");
+ return;
+ }
- if self.sub.is_none() {
- error!("Error subscribing remotely to object");
+ match ndb.subscribe(&self.filters) {
+ Ok(sub) => {
+ self.info_log(ndb, "First local subscription");
+ self.local_subid = Some(sub);
+ }
+
+ Err(err) => {
+ error!("multi_subscriber: error subscribing locally: '{err}'")
+ }
}
- } else {
- info!(
- "Locally subscribing. {} total active subscriptions, {} for this object",
- ndb.subscription_count(),
- self.subscribers,
- )
}
}
}
diff --git a/crates/notedeck_columns/src/nav.rs b/crates/notedeck_columns/src/nav.rs
@@ -1,7 +1,7 @@
use crate::{
accounts::render_accounts_route,
actionbar::NoteAction,
- app::{get_active_columns, get_active_columns_mut, get_decks_mut},
+ app::{get_active_columns_mut, get_decks_mut},
column::ColumnsAction,
deck_state::DeckState,
decks::{Deck, DecksAction, DecksCache},
@@ -9,10 +9,7 @@ use crate::{
profile_state::ProfileState,
relay_pool_manager::RelayPoolManager,
route::Route,
- timeline::{
- route::{render_timeline_route, TimelineRoute},
- Timeline,
- },
+ timeline::{route::render_timeline_route, TimelineCache},
ui::{
self,
add_column::render_add_column_routes,
@@ -27,11 +24,10 @@ use crate::{
Damus,
};
-use notedeck::{AccountsAction, AppContext, RootIdError};
-
use egui_nav::{Nav, NavAction, NavResponse, NavUiType};
-use nostrdb::{Ndb, Transaction};
-use tracing::{error, info};
+use nostrdb::Transaction;
+use notedeck::{AccountsAction, AppContext};
+use tracing::error;
#[allow(clippy::enum_variant_names)]
pub enum RenderNavAction {
@@ -51,7 +47,12 @@ pub enum SwitchingAction {
impl SwitchingAction {
/// process the action, and return whether switching occured
- pub fn process(&self, decks_cache: &mut DecksCache, ctx: &mut AppContext<'_>) -> bool {
+ pub fn process(
+ &self,
+ timeline_cache: &mut TimelineCache,
+ decks_cache: &mut DecksCache,
+ ctx: &mut AppContext<'_>,
+ ) -> bool {
match &self {
SwitchingAction::Accounts(account_action) => match account_action {
AccountsAction::Switch(switch_action) => {
@@ -68,8 +69,15 @@ impl SwitchingAction {
},
SwitchingAction::Columns(columns_action) => match *columns_action {
ColumnsAction::Remove(index) => {
- get_active_columns_mut(ctx.accounts, decks_cache).delete_column(index)
+ let kinds_to_pop =
+ get_active_columns_mut(ctx.accounts, decks_cache).delete_column(index);
+ for kind in &kinds_to_pop {
+ if let Err(err) = timeline_cache.pop(kind, ctx.ndb, ctx.pool) {
+ error!("error popping timeline: {err}");
+ }
+ }
}
+
ColumnsAction::Switch(from, to) => {
get_active_columns_mut(ctx.accounts, decks_cache).move_col(from, to);
}
@@ -133,14 +141,14 @@ impl RenderNavResponse {
}
RenderNavAction::RemoveColumn => {
- let tl = app
- .columns(ctx.accounts)
- .find_timeline_for_column_index(col);
- if let Some(timeline) = tl {
- unsubscribe_timeline(ctx.ndb, timeline);
+ let kinds_to_pop = app.columns_mut(ctx.accounts).delete_column(col);
+
+ for kind in &kinds_to_pop {
+ if let Err(err) = app.timeline_cache.pop(kind, ctx.ndb, ctx.pool) {
+ error!("error popping timeline: {err}");
+ }
}
- app.columns_mut(ctx.accounts).delete_column(col);
switching_occured = true;
}
@@ -169,7 +177,11 @@ impl RenderNavResponse {
}
RenderNavAction::SwitchingAction(switching_action) => {
- switching_occured = switching_action.process(&mut app.decks_cache, ctx);
+ switching_occured = switching_action.process(
+ &mut app.timeline_cache,
+ &mut app.decks_cache,
+ ctx,
+ );
}
RenderNavAction::ProfileAction(profile_action) => {
profile_action.process(
@@ -192,40 +204,12 @@ impl RenderNavResponse {
.column_mut(col)
.router_mut()
.pop();
- let txn = Transaction::new(ctx.ndb).expect("txn");
- if let Some(Route::Timeline(TimelineRoute::Thread(id))) = r {
- match notedeck::note::root_note_id_from_selected_id(
- ctx.ndb,
- ctx.note_cache,
- &txn,
- id.bytes(),
- ) {
- Ok(root_id) => {
- if let Some(thread) =
- app.timeline_cache.threads.get_mut(root_id.bytes())
- {
- if let Some(sub) = &mut thread.subscription {
- sub.unsubscribe(ctx.ndb, ctx.pool);
- }
- }
- }
-
- Err(RootIdError::NoteNotFound) => {
- error!("thread returned: note not found for unsub??: {}", id.hex())
- }
-
- Err(RootIdError::NoRootId) => {
- error!("thread returned: note not found for unsub??: {}", id.hex())
- }
+ if let Some(Route::Timeline(kind)) = &r {
+ if let Err(err) = app.timeline_cache.pop(kind, ctx.ndb, ctx.pool) {
+ error!("popping timeline had an error: {err} for {:?}", kind);
}
- } else if let Some(Route::Timeline(TimelineRoute::Profile(pubkey))) = r {
- if let Some(profile) = app.timeline_cache.profiles.get_mut(pubkey.bytes()) {
- if let Some(sub) = &mut profile.subscription {
- sub.unsubscribe(ctx.ndb, ctx.pool);
- }
- }
- }
+ };
switching_occured = true;
}
@@ -255,21 +239,21 @@ fn render_nav_body(
app: &mut Damus,
ctx: &mut AppContext<'_>,
top: &Route,
+ depth: usize,
col: usize,
) -> Option<RenderNavAction> {
match top {
- Route::Timeline(tlr) => render_timeline_route(
+ Route::Timeline(kind) => render_timeline_route(
ctx.ndb,
- get_active_columns_mut(ctx.accounts, &mut app.decks_cache),
- &mut app.drafts,
ctx.img_cache,
ctx.unknown_ids,
ctx.note_cache,
&mut app.timeline_cache,
ctx.accounts,
- *tlr,
+ kind,
col,
app.textmode,
+ depth,
ui,
),
Route::Accounts(amr) => {
@@ -294,6 +278,78 @@ fn render_nav_body(
RelayView::new(ctx.accounts, manager, &mut app.view_state.id_string_map).ui(ui);
None
}
+
+ Route::Reply(id) => {
+ let txn = if let Ok(txn) = Transaction::new(ctx.ndb) {
+ txn
+ } else {
+ ui.label("Reply to unknown note");
+ return None;
+ };
+
+ let note = if let Ok(note) = ctx.ndb.get_note_by_id(&txn, id.bytes()) {
+ note
+ } else {
+ ui.label("Reply to unknown note");
+ return None;
+ };
+
+ let id = egui::Id::new(("post", col, note.key().unwrap()));
+ let poster = ctx.accounts.selected_or_first_nsec()?;
+
+ let action = {
+ let draft = app.drafts.reply_mut(note.id());
+
+ let response = egui::ScrollArea::vertical().show(ui, |ui| {
+ ui::PostReplyView::new(
+ ctx.ndb,
+ poster,
+ draft,
+ ctx.note_cache,
+ ctx.img_cache,
+ ¬e,
+ )
+ .id_source(id)
+ .show(ui)
+ });
+
+ response.inner.action
+ };
+
+ action.map(Into::into)
+ }
+
+ Route::Quote(id) => {
+ let txn = Transaction::new(ctx.ndb).expect("txn");
+
+ let note = if let Ok(note) = ctx.ndb.get_note_by_id(&txn, id.bytes()) {
+ note
+ } else {
+ ui.label("Quote of unknown note");
+ return None;
+ };
+
+ let id = egui::Id::new(("post", col, note.key().unwrap()));
+
+ let poster = ctx.accounts.selected_or_first_nsec()?;
+ let draft = app.drafts.quote_mut(note.id());
+
+ let response = egui::ScrollArea::vertical().show(ui, |ui| {
+ crate::ui::note::QuoteRepostView::new(
+ ctx.ndb,
+ poster,
+ ctx.note_cache,
+ ctx.img_cache,
+ draft,
+ ¬e,
+ )
+ .id_source(id)
+ .show(ui)
+ });
+
+ response.inner.action.map(Into::into)
+ }
+
Route::ComposeNote => {
let kp = ctx.accounts.get_selected_account()?.to_full()?;
let draft = app.drafts.compose_mut();
@@ -421,9 +477,6 @@ pub fn render_nav(
ctx: &mut AppContext<'_>,
ui: &mut egui::Ui,
) -> RenderNavResponse {
- let col_id = get_active_columns(ctx.accounts, &app.decks_cache).get_column_id_at_index(col);
- // TODO(jb55): clean up this router_mut mess by using Router<R> in egui-nav directly
-
let nav_response = Nav::new(
&app.columns(ctx.accounts)
.column(col)
@@ -443,33 +496,24 @@ pub fn render_nav(
.router_mut()
.returning,
)
- .id_source(egui::Id::new(col_id))
+ .id_source(egui::Id::new(("nav", col)))
.show_mut(ui, |ui, render_type, nav| match render_type {
NavUiType::Title => NavTitle::new(
ctx.ndb,
ctx.img_cache,
get_active_columns_mut(ctx.accounts, &mut app.decks_cache),
- ctx.accounts.get_selected_account().map(|a| &a.pubkey),
nav.routes(),
col,
)
.show(ui),
- NavUiType::Body => render_nav_body(ui, app, ctx, nav.routes().last().expect("top"), col),
+ NavUiType::Body => {
+ if let Some(top) = nav.routes().last() {
+ render_nav_body(ui, app, ctx, top, nav.routes().len(), col)
+ } else {
+ None
+ }
+ }
});
RenderNavResponse::new(col, nav_response)
}
-
-fn unsubscribe_timeline(ndb: &mut Ndb, timeline: &Timeline) {
- if let Some(sub_id) = timeline.subscription {
- if let Err(e) = ndb.unsubscribe(sub_id) {
- error!("unsubscribe error: {}", e);
- } else {
- info!(
- "successfully unsubscribed from timeline {} with sub id {}",
- timeline.id,
- sub_id.id()
- );
- }
- }
-}
diff --git a/crates/notedeck_columns/src/profile.rs b/crates/notedeck_columns/src/profile.rs
@@ -1,16 +1,13 @@
use std::collections::HashMap;
-use enostr::{Filter, FullKeypair, Pubkey, PubkeyRef, RelayPool};
-use nostrdb::{FilterBuilder, Ndb, Note, NoteBuildOptions, NoteBuilder, ProfileRecord};
+use enostr::{FullKeypair, Pubkey, RelayPool};
+use nostrdb::{Ndb, Note, NoteBuildOptions, NoteBuilder, ProfileRecord};
-use notedeck::{filter::default_limit, FilterState};
use tracing::info;
use crate::{
- multi_subscriber::MultiSubscriber,
profile_state::ProfileState,
route::{Route, Router},
- timeline::{PubkeySource, Timeline, TimelineKind, TimelineTab},
};
pub struct NostrName<'a> {
@@ -75,33 +72,6 @@ pub fn get_display_name<'a>(record: Option<&ProfileRecord<'a>>) -> NostrName<'a>
}
}
-pub struct Profile {
- pub timeline: Timeline,
- pub subscription: Option<MultiSubscriber>,
-}
-
-impl Profile {
- pub fn new(source: PubkeySource, filters: Vec<Filter>) -> Self {
- let timeline = Timeline::new(
- TimelineKind::profile(source),
- FilterState::ready(filters),
- TimelineTab::full_tabs(),
- );
-
- Profile {
- timeline,
- subscription: None,
- }
- }
-
- pub fn filters_raw(pk: PubkeyRef<'_>) -> Vec<FilterBuilder> {
- vec![Filter::new()
- .authors([pk.bytes()])
- .kinds([1])
- .limit(default_limit())]
- }
-}
-
pub struct SaveProfileChanges {
pub kp: FullKeypair,
pub state: ProfileState,
diff --git a/crates/notedeck_columns/src/route.rs b/crates/notedeck_columns/src/route.rs
@@ -3,16 +3,22 @@ use std::fmt::{self};
use crate::{
accounts::AccountsRoute,
- column::Columns,
- timeline::{kind::ColumnTitle, TimelineId, TimelineRoute},
- ui::add_column::AddColumnRoute,
+ timeline::{
+ kind::{AlgoTimeline, ColumnTitle, ListKind},
+ ThreadSelection, TimelineKind,
+ },
+ ui::add_column::{AddAlgoRoute, AddColumnRoute},
};
+use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
+
/// App routing. These describe different places you can go inside Notedeck.
-#[derive(Clone, Copy, Eq, PartialEq, Debug)]
+#[derive(Clone, Eq, PartialEq, Debug)]
pub enum Route {
- Timeline(TimelineRoute),
+ Timeline(TimelineKind),
Accounts(AccountsRoute),
+ Reply(NoteId),
+ Quote(NoteId),
Relays,
ComposeNote,
AddColumn(AddColumnRoute),
@@ -23,12 +29,12 @@ pub enum Route {
}
impl Route {
- pub fn timeline(timeline_id: TimelineId) -> Self {
- Route::Timeline(TimelineRoute::Timeline(timeline_id))
+ pub fn timeline(timeline_kind: TimelineKind) -> Self {
+ Route::Timeline(timeline_kind)
}
- pub fn timeline_id(&self) -> Option<&TimelineId> {
- if let Route::Timeline(TimelineRoute::Timeline(tid)) = self {
+ pub fn timeline_id(&self) -> Option<&TimelineKind> {
+ if let Route::Timeline(tid) = self {
Some(tid)
} else {
None
@@ -39,20 +45,20 @@ impl Route {
Route::Relays
}
- pub fn thread(thread_root: NoteId) -> Self {
- Route::Timeline(TimelineRoute::Thread(thread_root))
+ pub fn thread(thread_selection: ThreadSelection) -> Self {
+ Route::Timeline(TimelineKind::Thread(thread_selection))
}
pub fn profile(pubkey: Pubkey) -> Self {
- Route::Timeline(TimelineRoute::Profile(pubkey))
+ Route::Timeline(TimelineKind::profile(pubkey))
}
pub fn reply(replying_to: NoteId) -> Self {
- Route::Timeline(TimelineRoute::Reply(replying_to))
+ Route::Reply(replying_to)
}
pub fn quote(quoting: NoteId) -> Self {
- Route::Timeline(TimelineRoute::Quote(quoting))
+ Route::Quote(quoting)
}
pub fn accounts() -> Self {
@@ -63,21 +69,128 @@ impl Route {
Route::Accounts(AccountsRoute::AddAccount)
}
- pub fn title<'a>(&self, columns: &'a Columns) -> ColumnTitle<'a> {
+ pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
match self {
- Route::Timeline(tlr) => match tlr {
- TimelineRoute::Timeline(id) => {
- if let Some(timeline) = columns.find_timeline(*id) {
- timeline.kind.to_title()
- } else {
- ColumnTitle::simple("Unknown")
- }
- }
- TimelineRoute::Thread(_id) => ColumnTitle::simple("Thread"),
- TimelineRoute::Reply(_id) => ColumnTitle::simple("Reply"),
- TimelineRoute::Quote(_id) => ColumnTitle::simple("Quote"),
- TimelineRoute::Profile(_pubkey) => ColumnTitle::simple("Profile"),
- },
+ Route::Timeline(timeline_kind) => timeline_kind.serialize_tokens(writer),
+ Route::Accounts(routes) => routes.serialize_tokens(writer),
+ Route::AddColumn(routes) => routes.serialize_tokens(writer),
+ Route::Reply(note_id) => {
+ writer.write_token("reply");
+ writer.write_token(¬e_id.hex());
+ }
+ Route::Quote(note_id) => {
+ writer.write_token("quote");
+ writer.write_token(¬e_id.hex());
+ }
+ Route::EditDeck(ind) => {
+ writer.write_token("deck");
+ writer.write_token("edit");
+ writer.write_token(&ind.to_string());
+ }
+ Route::EditProfile(pubkey) => {
+ writer.write_token("profile");
+ writer.write_token("edit");
+ writer.write_token(&pubkey.hex());
+ }
+ Route::Relays => {
+ writer.write_token("relay");
+ }
+ Route::ComposeNote => {
+ writer.write_token("compose");
+ }
+ Route::Support => {
+ writer.write_token("support");
+ }
+ Route::NewDeck => {
+ writer.write_token("deck");
+ writer.write_token("new");
+ }
+ }
+ }
+
+ pub fn parse<'a>(
+ parser: &mut TokenParser<'a>,
+ deck_author: &Pubkey,
+ ) -> Result<Self, ParseError<'a>> {
+ let tlkind =
+ parser.try_parse(|p| Ok(Route::Timeline(TimelineKind::parse(p, deck_author)?)));
+
+ if tlkind.is_ok() {
+ return tlkind;
+ }
+
+ TokenParser::alt(
+ parser,
+ &[
+ |p| Ok(Route::Accounts(AccountsRoute::parse_from_tokens(p)?)),
+ |p| Ok(Route::AddColumn(AddColumnRoute::parse_from_tokens(p)?)),
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("deck")?;
+ p.parse_token("edit")?;
+ let ind_str = p.pull_token()?;
+ let parsed_index = ind_str
+ .parse::<usize>()
+ .map_err(|_| ParseError::DecodeFailed)?;
+ Ok(Route::EditDeck(parsed_index))
+ })
+ },
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("profile")?;
+ p.parse_token("edit")?;
+ let pubkey = Pubkey::from_hex(p.pull_token()?)
+ .map_err(|_| ParseError::HexDecodeFailed)?;
+ Ok(Route::EditProfile(pubkey))
+ })
+ },
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("relay")?;
+ Ok(Route::Relays)
+ })
+ },
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("quote")?;
+ Ok(Route::Quote(NoteId::new(tokenator::parse_hex_id(p)?)))
+ })
+ },
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("reply")?;
+ Ok(Route::Reply(NoteId::new(tokenator::parse_hex_id(p)?)))
+ })
+ },
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("compose")?;
+ Ok(Route::ComposeNote)
+ })
+ },
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("support")?;
+ Ok(Route::Support)
+ })
+ },
+ |p| {
+ p.parse_all(|p| {
+ p.parse_token("deck")?;
+ p.parse_token("new")?;
+ Ok(Route::NewDeck)
+ })
+ },
+ ],
+ )
+ }
+
+ pub fn title(&self) -> ColumnTitle<'_> {
+ match self {
+ Route::Timeline(kind) => kind.to_title(),
+
+ Route::Reply(_id) => ColumnTitle::simple("Reply"),
+ Route::Quote(_id) => ColumnTitle::simple("Quote"),
Route::Relays => ColumnTitle::simple("Relays"),
@@ -88,6 +201,10 @@ impl Route {
Route::ComposeNote => ColumnTitle::simple("Compose Note"),
Route::AddColumn(c) => match c {
AddColumnRoute::Base => ColumnTitle::simple("Add Column"),
+ AddColumnRoute::Algo(r) => match r {
+ AddAlgoRoute::Base => ColumnTitle::simple("Add Algo Column"),
+ AddAlgoRoute::LastPerPubkey => ColumnTitle::simple("Add Last Notes Column"),
+ },
AddColumnRoute::UndecidedNotification => {
ColumnTitle::simple("Add Notifications Column")
}
@@ -197,14 +314,22 @@ impl<R: Clone> Router<R> {
impl fmt::Display for Route {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
- Route::Timeline(tlr) => match tlr {
- TimelineRoute::Timeline(name) => write!(f, "{}", name),
- TimelineRoute::Thread(_id) => write!(f, "Thread"),
- TimelineRoute::Profile(_id) => write!(f, "Profile"),
- TimelineRoute::Reply(_id) => write!(f, "Reply"),
- TimelineRoute::Quote(_id) => write!(f, "Quote"),
+ Route::Timeline(kind) => match kind {
+ TimelineKind::List(ListKind::Contact(_pk)) => write!(f, "Contacts"),
+ TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(_))) => {
+ write!(f, "Last Per Pubkey (Contact)")
+ }
+ TimelineKind::Notifications(_) => write!(f, "Notifications"),
+ TimelineKind::Universe => write!(f, "Universe"),
+ TimelineKind::Generic(_) => write!(f, "Custom"),
+ TimelineKind::Hashtag(ht) => write!(f, "Hashtag ({})", ht),
+ TimelineKind::Thread(_id) => write!(f, "Thread"),
+ TimelineKind::Profile(_id) => write!(f, "Profile"),
},
+ Route::Reply(_id) => write!(f, "Reply"),
+ Route::Quote(_id) => write!(f, "Quote"),
+
Route::Relays => write!(f, "Relays"),
Route::Accounts(amr) => match amr {
diff --git a/crates/notedeck_columns/src/storage/decks.rs b/crates/notedeck_columns/src/storage/decks.rs
@@ -1,25 +1,28 @@
use std::{collections::HashMap, fmt, str::FromStr};
-use enostr::{NoteId, Pubkey};
-use nostrdb::Ndb;
+use enostr::Pubkey;
+use nostrdb::{Ndb, Transaction};
use serde::{Deserialize, Serialize};
use tracing::{error, info};
use crate::{
- accounts::AccountsRoute,
column::{Columns, IntermediaryRoute},
decks::{Deck, Decks, DecksCache},
route::Route,
- timeline::{kind::ListKind, PubkeySource, TimelineKind, TimelineRoute},
- ui::add_column::AddColumnRoute,
+ timeline::{TimelineCache, TimelineKind},
Error,
};
use notedeck::{storage, DataPath, DataPathType, Directory};
+use tokenator::{ParseError, TokenParser, TokenWriter};
pub static DECKS_CACHE_FILE: &str = "decks_cache.json";
-pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
+pub fn load_decks_cache(
+ path: &DataPath,
+ ndb: &Ndb,
+ timeline_cache: &mut TimelineCache,
+) -> Option<DecksCache> {
let data_path = path.path(DataPathType::Setting);
let decks_cache_str = match Directory::new(data_path).get_file(DECKS_CACHE_FILE.to_owned()) {
@@ -36,7 +39,9 @@ pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
let serializable_decks_cache =
serde_json::from_str::<SerializableDecksCache>(&decks_cache_str).ok()?;
- serializable_decks_cache.decks_cache(ndb).ok()
+ serializable_decks_cache
+ .decks_cache(ndb, timeline_cache)
+ .ok()
}
pub fn save_decks_cache(path: &DataPath, decks_cache: &DecksCache) {
@@ -82,14 +87,17 @@ impl SerializableDecksCache {
}
}
- pub fn decks_cache(self, ndb: &Ndb) -> Result<DecksCache, Error> {
+ pub fn decks_cache(
+ self,
+ ndb: &Ndb,
+ timeline_cache: &mut TimelineCache,
+ ) -> Result<DecksCache, Error> {
let account_to_decks = self
.decks_cache
.into_iter()
.map(|(pubkey, serializable_decks)| {
- let deck_key = pubkey.bytes();
serializable_decks
- .decks(ndb, deck_key)
+ .decks(ndb, timeline_cache, &pubkey)
.map(|decks| (pubkey, decks))
})
.collect::<Result<HashMap<Pubkey, Decks>, Error>>()?;
@@ -143,12 +151,17 @@ impl SerializableDecks {
}
}
- fn decks(self, ndb: &Ndb, deck_key: &[u8; 32]) -> Result<Decks, Error> {
+ fn decks(
+ self,
+ ndb: &Ndb,
+ timeline_cache: &mut TimelineCache,
+ deck_key: &Pubkey,
+ ) -> Result<Decks, Error> {
Ok(Decks::from_decks(
self.active_deck,
self.decks
.into_iter()
- .map(|d| d.deck(ndb, deck_key))
+ .map(|d| d.deck(ndb, timeline_cache, deck_key))
.collect::<Result<_, _>>()?,
))
}
@@ -253,8 +266,13 @@ impl SerializableDeck {
SerializableDeck { metadata, columns }
}
- pub fn deck(self, ndb: &Ndb, deck_user: &[u8; 32]) -> Result<Deck, Error> {
- let columns = deserialize_columns(ndb, deck_user, self.columns);
+ pub fn deck(
+ self,
+ ndb: &Ndb,
+ timeline_cache: &mut TimelineCache,
+ deck_user: &Pubkey,
+ ) -> Result<Deck, Error> {
+ let columns = deserialize_columns(ndb, timeline_cache, deck_user, self.columns);
let deserialized_metadata = deserialize_metadata(self.metadata)
.ok_or(Error::Generic("Could not deserialize metadata".to_owned()))?;
@@ -283,9 +301,9 @@ fn serialize_columns(columns: &Columns) -> Vec<Vec<String>> {
for column in columns.columns() {
let mut column_routes = Vec::new();
for route in column.router().routes() {
- if let Some(route_str) = serialize_route(route, columns) {
- column_routes.push(route_str);
- }
+ let mut writer = TokenWriter::default();
+ route.serialize_tokens(&mut writer);
+ column_routes.push(writer.str().to_string());
}
cols_serialized.push(column_routes);
}
@@ -293,502 +311,78 @@ fn serialize_columns(columns: &Columns) -> Vec<Vec<String>> {
cols_serialized
}
-fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec<Vec<String>>) -> Columns {
+fn deserialize_columns(
+ ndb: &Ndb,
+ timeline_cache: &mut TimelineCache,
+ deck_user: &Pubkey,
+ columns: Vec<Vec<String>>,
+) -> Columns {
let mut cols = Columns::new();
- for serialized_routes in serialized {
+ for column in columns {
let mut cur_routes = Vec::new();
- for serialized_route in serialized_routes {
- let selections = Selection::from_serialized(&serialized_route);
- if let Some(route_intermediary) = selections_to_route(selections.clone()) {
- if let Some(ir) = route_intermediary.intermediary_route(ndb, Some(deck_user)) {
- match &ir {
- IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Thread(_)))
- | IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Profile(_))) => {
- // Do nothing. TimelineRoute Threads & Profiles not yet supported for deserialization
- }
- _ => cur_routes.push(ir),
+
+ for route in column {
+ let tokens: Vec<&str> = route.split(":").collect();
+ let mut parser = TokenParser::new(&tokens);
+
+ match CleanIntermediaryRoute::parse(&mut parser, deck_user) {
+ Ok(route_intermediary) => {
+ if let Some(ir) = route_intermediary.into_intermediary_route(ndb) {
+ cur_routes.push(ir);
}
}
- } else {
- error!(
- "could not turn selections to RouteIntermediary: {:?}",
- selections
- );
+ Err(err) => {
+ error!("could not turn tokens to RouteIntermediary: {:?}", err);
+ }
}
}
if !cur_routes.is_empty() {
- cols.insert_intermediary_routes(cur_routes);
+ cols.insert_intermediary_routes(timeline_cache, cur_routes);
}
}
cols
}
-#[derive(Clone, Debug)]
-enum Selection {
- Keyword(Keyword),
- Payload(String),
-}
-
-#[derive(Clone, PartialEq, Debug)]
-enum Keyword {
- Notifs,
- Universe,
- Contact,
- Explicit,
- DeckAuthor,
- Profile,
- Hashtag,
- Generic,
- Thread,
- Reply,
- Quote,
- Account,
- Show,
- New,
- Relay,
- Compose,
- Column,
- NotificationSelection,
- ExternalNotifSelection,
- HashtagSelection,
- Support,
- Deck,
- Edit,
- IndividualSelection,
- ExternalIndividualSelection,
-}
-
-impl Keyword {
- const MAPPING: &'static [(&'static str, Keyword, bool)] = &[
- ("notifs", Keyword::Notifs, false),
- ("universe", Keyword::Universe, false),
- ("contact", Keyword::Contact, false),
- ("explicit", Keyword::Explicit, true),
- ("deck_author", Keyword::DeckAuthor, false),
- ("profile", Keyword::Profile, false),
- ("hashtag", Keyword::Hashtag, true),
- ("generic", Keyword::Generic, false),
- ("thread", Keyword::Thread, true),
- ("reply", Keyword::Reply, true),
- ("quote", Keyword::Quote, true),
- ("account", Keyword::Account, false),
- ("show", Keyword::Show, false),
- ("new", Keyword::New, false),
- ("relay", Keyword::Relay, false),
- ("compose", Keyword::Compose, false),
- ("column", Keyword::Column, false),
- (
- "notification_selection",
- Keyword::NotificationSelection,
- false,
- ),
- (
- "external_notif_selection",
- Keyword::ExternalNotifSelection,
- false,
- ),
- ("hashtag_selection", Keyword::HashtagSelection, false),
- ("support", Keyword::Support, false),
- ("deck", Keyword::Deck, false),
- ("edit", Keyword::Edit, true),
- ];
-
- fn has_payload(&self) -> bool {
- Keyword::MAPPING
- .iter()
- .find(|(_, keyword, _)| keyword == self)
- .map(|(_, _, has_payload)| *has_payload)
- .unwrap_or(false)
- }
-}
-
-impl fmt::Display for Keyword {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- if let Some(name) = Keyword::MAPPING
- .iter()
- .find(|(_, keyword, _)| keyword == self)
- .map(|(name, _, _)| *name)
- {
- write!(f, "{}", name)
- } else {
- write!(f, "UnknownKeyword")
- }
- }
-}
-
-impl FromStr for Keyword {
- type Err = Error;
-
- fn from_str(serialized: &str) -> Result<Self, Self::Err> {
- Keyword::MAPPING
- .iter()
- .find(|(name, _, _)| *name == serialized)
- .map(|(_, keyword, _)| keyword.clone())
- .ok_or(Error::Generic(
- "Could not convert string to Keyword enum".to_owned(),
- ))
- }
-}
-
enum CleanIntermediaryRoute {
ToTimeline(TimelineKind),
ToRoute(Route),
}
impl CleanIntermediaryRoute {
- fn intermediary_route(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<IntermediaryRoute> {
+ fn into_intermediary_route(self, ndb: &Ndb) -> Option<IntermediaryRoute> {
match self {
- CleanIntermediaryRoute::ToTimeline(timeline_kind) => Some(IntermediaryRoute::Timeline(
- timeline_kind.into_timeline(ndb, user)?,
- )),
- CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)),
- }
- }
-}
-
-// TODO: The public-accessible version will be a subset of this
-fn serialize_route(route: &Route, columns: &Columns) -> Option<String> {
- let mut selections: Vec<Selection> = Vec::new();
- match route {
- Route::Timeline(timeline_route) => match timeline_route {
- TimelineRoute::Timeline(timeline_id) => {
- if let Some(timeline) = columns.find_timeline(*timeline_id) {
- match &timeline.kind {
- TimelineKind::List(list_kind) => match list_kind {
- ListKind::Contact(pubkey_source) => {
- selections.push(Selection::Keyword(Keyword::Contact));
- selections.extend(generate_pubkey_selections(pubkey_source));
- }
- },
- TimelineKind::Notifications(pubkey_source) => {
- selections.push(Selection::Keyword(Keyword::Notifs));
- selections.extend(generate_pubkey_selections(pubkey_source));
- }
- TimelineKind::Profile(pubkey_source) => {
- selections.push(Selection::Keyword(Keyword::Profile));
- selections.extend(generate_pubkey_selections(pubkey_source));
- }
- TimelineKind::Universe => {
- selections.push(Selection::Keyword(Keyword::Universe))
- }
- TimelineKind::Thread(root_id) => {
- selections.push(Selection::Keyword(Keyword::Thread));
- selections.push(Selection::Payload(hex::encode(root_id.bytes())));
- }
- TimelineKind::Generic => {
- selections.push(Selection::Keyword(Keyword::Generic))
- }
- TimelineKind::Hashtag(hashtag) => {
- selections.push(Selection::Keyword(Keyword::Hashtag));
- selections.push(Selection::Payload(hashtag.to_string()));
- }
- }
- }
- }
- TimelineRoute::Thread(note_id) => {
- selections.push(Selection::Keyword(Keyword::Thread));
- selections.push(Selection::Payload(note_id.hex()));
- }
- TimelineRoute::Profile(pubkey) => {
- selections.push(Selection::Keyword(Keyword::Profile));
- selections.push(Selection::Keyword(Keyword::Explicit));
- selections.push(Selection::Payload(pubkey.hex()));
- }
- TimelineRoute::Reply(note_id) => {
- selections.push(Selection::Keyword(Keyword::Reply));
- selections.push(Selection::Payload(note_id.hex()));
- }
- TimelineRoute::Quote(note_id) => {
- selections.push(Selection::Keyword(Keyword::Quote));
- selections.push(Selection::Payload(note_id.hex()));
- }
- },
- Route::Accounts(accounts_route) => {
- selections.push(Selection::Keyword(Keyword::Account));
- match accounts_route {
- AccountsRoute::Accounts => selections.push(Selection::Keyword(Keyword::Show)),
- AccountsRoute::AddAccount => selections.push(Selection::Keyword(Keyword::New)),
- }
- }
- Route::Relays => selections.push(Selection::Keyword(Keyword::Relay)),
- Route::ComposeNote => selections.push(Selection::Keyword(Keyword::Compose)),
- Route::AddColumn(add_column_route) => {
- selections.push(Selection::Keyword(Keyword::Column));
- match add_column_route {
- AddColumnRoute::Base => (),
- AddColumnRoute::UndecidedNotification => {
- selections.push(Selection::Keyword(Keyword::NotificationSelection))
- }
- AddColumnRoute::ExternalNotification => {
- selections.push(Selection::Keyword(Keyword::ExternalNotifSelection))
- }
- AddColumnRoute::Hashtag => {
- selections.push(Selection::Keyword(Keyword::HashtagSelection))
- }
- AddColumnRoute::UndecidedIndividual => {
- selections.push(Selection::Keyword(Keyword::IndividualSelection))
- }
- AddColumnRoute::ExternalIndividual => {
- selections.push(Selection::Keyword(Keyword::ExternalIndividualSelection))
- }
- }
- }
- Route::Support => selections.push(Selection::Keyword(Keyword::Support)),
- Route::NewDeck => {
- selections.push(Selection::Keyword(Keyword::Deck));
- selections.push(Selection::Keyword(Keyword::New));
- }
- Route::EditDeck(index) => {
- selections.push(Selection::Keyword(Keyword::Deck));
- selections.push(Selection::Keyword(Keyword::Edit));
- selections.push(Selection::Payload(index.to_string()));
- }
- Route::EditProfile(pubkey) => {
- selections.push(Selection::Keyword(Keyword::Profile));
- selections.push(Selection::Keyword(Keyword::Edit));
- selections.push(Selection::Payload(pubkey.hex()));
- }
- }
-
- if selections.is_empty() {
- None
- } else {
- Some(
- selections
- .iter()
- .map(|k| k.to_string())
- .collect::<Vec<String>>()
- .join(":"),
- )
- }
-}
-
-fn generate_pubkey_selections(source: &PubkeySource) -> Vec<Selection> {
- let mut selections = Vec::new();
- match source {
- PubkeySource::Explicit(pubkey) => {
- selections.push(Selection::Keyword(Keyword::Explicit));
- selections.push(Selection::Payload(pubkey.hex()));
- }
- PubkeySource::DeckAuthor => {
- selections.push(Selection::Keyword(Keyword::DeckAuthor));
- }
- }
- selections
-}
-
-impl Selection {
- fn from_serialized(serialized: &str) -> Vec<Self> {
- let mut selections = Vec::new();
- let seperator = ":";
-
- let mut serialized_copy = serialized.to_string();
- let mut buffer = serialized_copy.as_mut();
-
- let mut next_is_payload = false;
- while let Some(index) = buffer.find(seperator) {
- if let Ok(keyword) = Keyword::from_str(&buffer[..index]) {
- selections.push(Selection::Keyword(keyword.clone()));
- if keyword.has_payload() {
- next_is_payload = true;
- }
+ CleanIntermediaryRoute::ToTimeline(timeline_kind) => {
+ let txn = Transaction::new(ndb).unwrap();
+ Some(IntermediaryRoute::Timeline(
+ timeline_kind.into_timeline(&txn, ndb)?,
+ ))
}
-
- buffer = &mut buffer[index + seperator.len()..];
- }
-
- if next_is_payload {
- selections.push(Selection::Payload(buffer.to_string()));
- } else if let Ok(keyword) = Keyword::from_str(buffer) {
- selections.push(Selection::Keyword(keyword.clone()));
+ CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)),
}
-
- selections
}
-}
-fn selections_to_route(selections: Vec<Selection>) -> Option<CleanIntermediaryRoute> {
- match selections.first()? {
- Selection::Keyword(Keyword::Contact) => match selections.get(1)? {
- Selection::Keyword(Keyword::Explicit) => {
- if let Selection::Payload(hex) = selections.get(2)? {
- Some(CleanIntermediaryRoute::ToTimeline(
- TimelineKind::contact_list(PubkeySource::Explicit(
- Pubkey::from_hex(hex.as_str()).ok()?,
- )),
- ))
- } else {
- None
- }
- }
- Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
- TimelineKind::contact_list(PubkeySource::DeckAuthor),
- )),
- _ => None,
- },
- Selection::Keyword(Keyword::Notifs) => match selections.get(1)? {
- Selection::Keyword(Keyword::Explicit) => {
- if let Selection::Payload(hex) = selections.get(2)? {
- Some(CleanIntermediaryRoute::ToTimeline(
- TimelineKind::notifications(PubkeySource::Explicit(
- Pubkey::from_hex(hex.as_str()).ok()?,
- )),
- ))
- } else {
- None
- }
- }
- Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
- TimelineKind::notifications(PubkeySource::DeckAuthor),
- )),
- _ => None,
- },
- Selection::Keyword(Keyword::Profile) => match selections.get(1)? {
- Selection::Keyword(Keyword::Explicit) => {
- if let Selection::Payload(hex) = selections.get(2)? {
- Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile(
- PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?),
- )))
- } else {
- None
- }
- }
- Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
- TimelineKind::profile(PubkeySource::DeckAuthor),
- )),
- Selection::Keyword(Keyword::Edit) => {
- if let Selection::Payload(hex) = selections.get(2)? {
- Some(CleanIntermediaryRoute::ToRoute(Route::EditProfile(
- Pubkey::from_hex(hex.as_str()).ok()?,
- )))
- } else {
- None
- }
- }
- _ => None,
- },
- Selection::Keyword(Keyword::Universe) => {
- Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Universe))
- }
- Selection::Keyword(Keyword::Hashtag) => {
- if let Selection::Payload(hashtag) = selections.get(1)? {
- Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Hashtag(
- hashtag.to_string(),
- )))
- } else {
- None
- }
- }
- Selection::Keyword(Keyword::Generic) => {
- Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Generic))
- }
- Selection::Keyword(Keyword::Thread) => {
- if let Selection::Payload(hex) = selections.get(1)? {
- Some(CleanIntermediaryRoute::ToRoute(Route::thread(
- NoteId::from_hex(hex.as_str()).ok()?,
- )))
- } else {
- None
- }
- }
- Selection::Keyword(Keyword::Reply) => {
- if let Selection::Payload(hex) = selections.get(1)? {
- Some(CleanIntermediaryRoute::ToRoute(Route::reply(
- NoteId::from_hex(hex.as_str()).ok()?,
- )))
- } else {
- None
- }
- }
- Selection::Keyword(Keyword::Quote) => {
- if let Selection::Payload(hex) = selections.get(1)? {
- Some(CleanIntermediaryRoute::ToRoute(Route::quote(
- NoteId::from_hex(hex.as_str()).ok()?,
- )))
- } else {
- None
- }
- }
- Selection::Keyword(Keyword::Account) => match selections.get(1)? {
- Selection::Keyword(Keyword::Show) => Some(CleanIntermediaryRoute::ToRoute(
- Route::Accounts(AccountsRoute::Accounts),
- )),
- Selection::Keyword(Keyword::New) => Some(CleanIntermediaryRoute::ToRoute(
- Route::Accounts(AccountsRoute::AddAccount),
- )),
- _ => None,
- },
- Selection::Keyword(Keyword::Relay) => Some(CleanIntermediaryRoute::ToRoute(Route::Relays)),
- Selection::Keyword(Keyword::Compose) => {
- Some(CleanIntermediaryRoute::ToRoute(Route::ComposeNote))
+ fn parse<'a>(
+ parser: &mut TokenParser<'a>,
+ deck_author: &Pubkey,
+ ) -> Result<Self, ParseError<'a>> {
+ let timeline = parser.try_parse(|p| {
+ Ok(CleanIntermediaryRoute::ToTimeline(TimelineKind::parse(
+ p,
+ deck_author,
+ )?))
+ });
+ if timeline.is_ok() {
+ return timeline;
}
- Selection::Keyword(Keyword::Column) => match selections.get(1)? {
- Selection::Keyword(Keyword::NotificationSelection) => {
- Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
- AddColumnRoute::UndecidedNotification,
- )))
- }
- Selection::Keyword(Keyword::ExternalNotifSelection) => {
- Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
- AddColumnRoute::ExternalNotification,
- )))
- }
- Selection::Keyword(Keyword::HashtagSelection) => Some(CleanIntermediaryRoute::ToRoute(
- Route::AddColumn(AddColumnRoute::Hashtag),
- )),
- Selection::Keyword(Keyword::IndividualSelection) => {
- Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
- AddColumnRoute::UndecidedIndividual,
- )))
- }
- Selection::Keyword(Keyword::ExternalIndividualSelection) => {
- Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
- AddColumnRoute::ExternalIndividual,
- )))
- }
- _ => None,
- },
- Selection::Keyword(Keyword::Support) => {
- Some(CleanIntermediaryRoute::ToRoute(Route::Support))
- }
- Selection::Keyword(Keyword::Deck) => match selections.get(1)? {
- Selection::Keyword(Keyword::New) => {
- Some(CleanIntermediaryRoute::ToRoute(Route::NewDeck))
- }
- Selection::Keyword(Keyword::Edit) => {
- if let Selection::Payload(index_str) = selections.get(2)? {
- let parsed_index = index_str.parse::<usize>().ok()?;
- Some(CleanIntermediaryRoute::ToRoute(Route::EditDeck(
- parsed_index,
- )))
- } else {
- None
- }
- }
- _ => None,
- },
- Selection::Payload(_)
- | Selection::Keyword(Keyword::Explicit)
- | Selection::Keyword(Keyword::New)
- | Selection::Keyword(Keyword::DeckAuthor)
- | Selection::Keyword(Keyword::Show)
- | Selection::Keyword(Keyword::NotificationSelection)
- | Selection::Keyword(Keyword::ExternalNotifSelection)
- | Selection::Keyword(Keyword::HashtagSelection)
- | Selection::Keyword(Keyword::IndividualSelection)
- | Selection::Keyword(Keyword::ExternalIndividualSelection)
- | Selection::Keyword(Keyword::Edit) => None,
- }
-}
-impl fmt::Display for Selection {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- Selection::Keyword(keyword) => write!(f, "{}", keyword),
- Selection::Payload(payload) => write!(f, "{}", payload),
- }
+ parser.try_parse(|p| {
+ Ok(CleanIntermediaryRoute::ToRoute(Route::parse(
+ p,
+ deck_author,
+ )?))
+ })
}
}
diff --git a/crates/notedeck_columns/src/storage/migration.rs b/crates/notedeck_columns/src/storage/migration.rs
@@ -1,697 +0,0 @@
-use enostr::{NoteId, Pubkey};
-use nostrdb::Ndb;
-use serde::{Deserialize, Deserializer};
-use tracing::error;
-
-use crate::{
- accounts::AccountsRoute,
- column::{Columns, IntermediaryRoute},
- route::Route,
- timeline::{kind::ListKind, PubkeySource, Timeline, TimelineId, TimelineKind, TimelineRoute},
- ui::add_column::AddColumnRoute,
- Result,
-};
-
-use notedeck::{DataPath, DataPathType, Directory};
-
-pub static COLUMNS_FILE: &str = "columns.json";
-
-fn columns_json(path: &DataPath) -> Option<String> {
- let data_path = path.path(DataPathType::Setting);
- Directory::new(data_path)
- .get_file(COLUMNS_FILE.to_string())
- .ok()
-}
-
-#[derive(Deserialize, Debug, PartialEq)]
-enum MigrationTimelineRoute {
- Timeline(u32),
- Thread(String),
- Profile(String),
- Reply(String),
- Quote(String),
-}
-
-impl MigrationTimelineRoute {
- fn timeline_route(self) -> Option<TimelineRoute> {
- match self {
- MigrationTimelineRoute::Timeline(id) => {
- Some(TimelineRoute::Timeline(TimelineId::new(id)))
- }
- MigrationTimelineRoute::Thread(note_id_hex) => {
- Some(TimelineRoute::Thread(NoteId::from_hex(¬e_id_hex).ok()?))
- }
- MigrationTimelineRoute::Profile(pubkey_hex) => {
- Some(TimelineRoute::Profile(Pubkey::from_hex(&pubkey_hex).ok()?))
- }
- MigrationTimelineRoute::Reply(note_id_hex) => {
- Some(TimelineRoute::Reply(NoteId::from_hex(¬e_id_hex).ok()?))
- }
- MigrationTimelineRoute::Quote(note_id_hex) => {
- Some(TimelineRoute::Quote(NoteId::from_hex(¬e_id_hex).ok()?))
- }
- }
- }
-}
-
-#[derive(Deserialize, Debug, PartialEq)]
-enum MigrationRoute {
- Timeline(MigrationTimelineRoute),
- Accounts(MigrationAccountsRoute),
- Relays,
- ComposeNote,
- AddColumn(MigrationAddColumnRoute),
- Support,
-}
-
-impl MigrationRoute {
- fn route(self) -> Option<Route> {
- match self {
- MigrationRoute::Timeline(migration_timeline_route) => {
- Some(Route::Timeline(migration_timeline_route.timeline_route()?))
- }
- MigrationRoute::Accounts(migration_accounts_route) => {
- Some(Route::Accounts(migration_accounts_route.accounts_route()))
- }
- MigrationRoute::Relays => Some(Route::Relays),
- MigrationRoute::ComposeNote => Some(Route::ComposeNote),
- MigrationRoute::AddColumn(migration_add_column_route) => Some(Route::AddColumn(
- migration_add_column_route.add_column_route(),
- )),
- MigrationRoute::Support => Some(Route::Support),
- }
- }
-}
-
-#[derive(Deserialize, Debug, PartialEq)]
-enum MigrationAccountsRoute {
- Accounts,
- AddAccount,
-}
-
-impl MigrationAccountsRoute {
- fn accounts_route(self) -> AccountsRoute {
- match self {
- MigrationAccountsRoute::Accounts => AccountsRoute::Accounts,
- MigrationAccountsRoute::AddAccount => AccountsRoute::AddAccount,
- }
- }
-}
-
-#[derive(Deserialize, Debug, PartialEq)]
-enum MigrationAddColumnRoute {
- Base,
- UndecidedNotification,
- ExternalNotification,
- Hashtag,
-}
-
-impl MigrationAddColumnRoute {
- fn add_column_route(self) -> AddColumnRoute {
- match self {
- MigrationAddColumnRoute::Base => AddColumnRoute::Base,
- MigrationAddColumnRoute::UndecidedNotification => AddColumnRoute::UndecidedNotification,
- MigrationAddColumnRoute::ExternalNotification => AddColumnRoute::ExternalNotification,
- MigrationAddColumnRoute::Hashtag => AddColumnRoute::Hashtag,
- }
- }
-}
-
-#[derive(Debug, PartialEq)]
-struct MigrationColumn {
- routes: Vec<MigrationRoute>,
-}
-
-impl<'de> Deserialize<'de> for MigrationColumn {
- fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
- where
- D: Deserializer<'de>,
- {
- let routes = Vec::<MigrationRoute>::deserialize(deserializer)?;
-
- Ok(MigrationColumn { routes })
- }
-}
-
-#[derive(Deserialize, Debug)]
-struct MigrationColumns {
- columns: Vec<MigrationColumn>,
- timelines: Vec<MigrationTimeline>,
-}
-
-#[derive(Deserialize, Debug, Clone, PartialEq)]
-struct MigrationTimeline {
- id: u32,
- kind: MigrationTimelineKind,
-}
-
-impl MigrationTimeline {
- fn into_timeline(self, ndb: &Ndb, deck_user_pubkey: Option<&[u8; 32]>) -> Option<Timeline> {
- self.kind
- .into_timeline_kind()?
- .into_timeline(ndb, deck_user_pubkey)
- }
-}
-
-#[derive(Deserialize, Clone, Debug, PartialEq)]
-enum MigrationListKind {
- Contact(MigrationPubkeySource),
-}
-
-impl MigrationListKind {
- fn list_kind(self) -> Option<ListKind> {
- match self {
- MigrationListKind::Contact(migration_pubkey_source) => {
- Some(ListKind::Contact(migration_pubkey_source.pubkey_source()?))
- }
- }
- }
-}
-
-#[derive(Deserialize, Clone, Debug, PartialEq)]
-enum MigrationPubkeySource {
- Explicit(String),
- DeckAuthor,
-}
-
-impl MigrationPubkeySource {
- fn pubkey_source(self) -> Option<PubkeySource> {
- match self {
- MigrationPubkeySource::Explicit(hex) => {
- Some(PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?))
- }
- MigrationPubkeySource::DeckAuthor => Some(PubkeySource::DeckAuthor),
- }
- }
-}
-
-#[derive(Deserialize, Clone, Debug, PartialEq)]
-enum MigrationTimelineKind {
- List(MigrationListKind),
- Notifications(MigrationPubkeySource),
- Profile(MigrationPubkeySource),
- Universe,
- Generic,
- Hashtag(String),
-}
-
-impl MigrationTimelineKind {
- fn into_timeline_kind(self) -> Option<TimelineKind> {
- match self {
- MigrationTimelineKind::List(migration_list_kind) => {
- Some(TimelineKind::List(migration_list_kind.list_kind()?))
- }
- MigrationTimelineKind::Notifications(migration_pubkey_source) => Some(
- TimelineKind::Notifications(migration_pubkey_source.pubkey_source()?),
- ),
- MigrationTimelineKind::Profile(migration_pubkey_source) => Some(TimelineKind::Profile(
- migration_pubkey_source.pubkey_source()?,
- )),
- MigrationTimelineKind::Universe => Some(TimelineKind::Universe),
- MigrationTimelineKind::Generic => Some(TimelineKind::Generic),
- MigrationTimelineKind::Hashtag(hashtag) => Some(TimelineKind::Hashtag(hashtag)),
- }
- }
-}
-
-impl MigrationColumns {
- fn into_columns(self, ndb: &Ndb, deck_pubkey: Option<&[u8; 32]>) -> Columns {
- let mut columns = Columns::default();
-
- for column in self.columns {
- let mut cur_routes = Vec::new();
- for route in column.routes {
- match route {
- MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(timeline_id)) => {
- if let Some(migration_tl) =
- self.timelines.iter().find(|tl| tl.id == timeline_id)
- {
- let tl = migration_tl.clone().into_timeline(ndb, deck_pubkey);
- if let Some(tl) = tl {
- cur_routes.push(IntermediaryRoute::Timeline(tl));
- } else {
- error!("Problem deserializing timeline {:?}", migration_tl);
- }
- }
- }
- MigrationRoute::Timeline(MigrationTimelineRoute::Thread(_thread)) => {}
- _ => {
- if let Some(route) = route.route() {
- cur_routes.push(IntermediaryRoute::Route(route));
- }
- }
- }
- }
- if !cur_routes.is_empty() {
- columns.insert_intermediary_routes(cur_routes);
- }
- }
- columns
- }
-}
-
-fn string_to_columns(
- serialized_columns: String,
- ndb: &Ndb,
- user: Option<&[u8; 32]>,
-) -> Option<Columns> {
- Some(
- deserialize_columns_string(serialized_columns)
- .ok()?
- .into_columns(ndb, user),
- )
-}
-
-pub fn deserialize_columns(path: &DataPath, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Columns> {
- string_to_columns(columns_json(path)?, ndb, user)
-}
-
-fn deserialize_columns_string(serialized_columns: String) -> Result<MigrationColumns> {
- Ok(
- serde_json::from_str::<MigrationColumns>(&serialized_columns)
- .map_err(notedeck::Error::Json)?,
- )
-}
-
-#[cfg(test)]
-mod tests {
- use crate::storage::migration::{
- MigrationColumn, MigrationListKind, MigrationPubkeySource, MigrationRoute,
- MigrationTimeline, MigrationTimelineKind, MigrationTimelineRoute,
- };
-
- impl MigrationColumn {
- fn from_route(route: MigrationRoute) -> Self {
- Self {
- routes: vec![route],
- }
- }
-
- fn from_routes(routes: Vec<MigrationRoute>) -> Self {
- Self { routes }
- }
- }
-
- impl MigrationTimeline {
- fn new(id: u32, kind: MigrationTimelineKind) -> Self {
- Self { id, kind }
- }
- }
-
- use super::*;
-
- #[test]
- fn multi_column() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}],[{"Timeline":{"Timeline":0}}],[{"Timeline":{"Timeline":1}}]],"timelines":[{"id":0,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}},{"id":1,"kind":{"Hashtag":"introductions"}},{"id":2,"kind":"Universe"}]}"#; // Multi-column
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
-
- assert_eq!(migration_cols.columns.len(), 3);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(2)
- ))
- );
-
- assert_eq!(
- *migration_cols.columns.get(1).unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(0)
- ))
- );
-
- assert_eq!(
- *migration_cols.columns.get(2).unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(1)
- ))
- );
-
- assert_eq!(migration_cols.timelines.len(), 3);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(
- 0,
- MigrationTimelineKind::List(MigrationListKind::Contact(
- MigrationPubkeySource::Explicit(
- "aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
- .to_owned()
- )
- ))
- )
- );
- assert_eq!(
- *migration_cols.timelines.get(1).unwrap(),
- MigrationTimeline::new(
- 1,
- MigrationTimelineKind::Hashtag("introductions".to_owned())
- )
- );
-
- assert_eq!(
- *migration_cols.timelines.get(2).unwrap(),
- MigrationTimeline::new(2, MigrationTimelineKind::Universe)
- )
- }
-
- #[test]
- fn base() {
- let route = r#"{"columns":[[{"AddColumn":"Base"}]],"timelines":[]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_route(MigrationRoute::AddColumn(MigrationAddColumnRoute::Base))
- );
-
- assert!(migration_cols.timelines.is_empty());
- }
-
- #[test]
- fn universe() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":0}}]],"timelines":[{"id":0,"kind":"Universe"}]}"#;
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(0)
- ))
- );
-
- assert_eq!(migration_cols.timelines.len(), 1);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(0, MigrationTimelineKind::Universe)
- )
- }
-
- #[test]
- fn home() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}]],"timelines":[{"id":2,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}}]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(2)
- ))
- );
-
- assert_eq!(migration_cols.timelines.len(), 1);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(
- 2,
- MigrationTimelineKind::List(MigrationListKind::Contact(
- MigrationPubkeySource::Explicit(
- "aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
- .to_owned()
- )
- ))
- )
- )
- }
-
- #[test]
- fn thread() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Thread":"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
- MigrationRoute::Timeline(MigrationTimelineRoute::Thread(
- "fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25".to_owned()
- )),
- ])
- );
-
- assert_eq!(migration_cols.timelines.len(), 1);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(
- 7,
- MigrationTimelineKind::List(MigrationListKind::Contact(
- MigrationPubkeySource::Explicit(
- "4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
- .to_owned()
- )
- ))
- )
- )
- }
-
- #[test]
- fn profile() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Profile":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
- MigrationRoute::Timeline(MigrationTimelineRoute::Profile(
- "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
- )),
- ])
- );
-
- assert_eq!(migration_cols.timelines.len(), 1);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(
- 7,
- MigrationTimelineKind::List(MigrationListKind::Contact(
- MigrationPubkeySource::Explicit(
- "4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
- .to_owned()
- )
- ))
- )
- )
- }
-
- #[test]
- fn your_notifs() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":5}}]],"timelines":[{"id":5,"kind":{"Notifications":"DeckAuthor"}}]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(5)
- ))
- );
-
- assert_eq!(migration_cols.timelines.len(), 1);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(
- 5,
- MigrationTimelineKind::Notifications(MigrationPubkeySource::DeckAuthor)
- )
- )
- }
-
- #[test]
- fn undecided_notifs() {
- let route = r#"{"columns":[[{"AddColumn":"Base"},{"AddColumn":"UndecidedNotification"}]],"timelines":[]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
- MigrationRoute::AddColumn(MigrationAddColumnRoute::UndecidedNotification),
- ])
- );
-
- assert!(migration_cols.timelines.is_empty());
- }
-
- #[test]
- fn extern_notifs() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":4}}]],"timelines":[{"id":4,"kind":{"Notifications":{"Explicit":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}}]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(4)
- ))
- );
-
- assert_eq!(migration_cols.timelines.len(), 1);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(
- 4,
- MigrationTimelineKind::Notifications(MigrationPubkeySource::Explicit(
- "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
- ))
- )
- )
- }
-
- #[test]
- fn hashtag() {
- let route = r#"{"columns":[[{"Timeline":{"Timeline":6}}]],"timelines":[{"id":6,"kind":{"Hashtag":"notedeck"}}]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_route(MigrationRoute::Timeline(
- MigrationTimelineRoute::Timeline(6)
- ))
- );
-
- assert_eq!(migration_cols.timelines.len(), 1);
- assert_eq!(
- *migration_cols.timelines.first().unwrap(),
- MigrationTimeline::new(6, MigrationTimelineKind::Hashtag("notedeck".to_owned()))
- )
- }
-
- #[test]
- fn support() {
- let route = r#"{"columns":[[{"AddColumn":"Base"},"Support"]],"timelines":[]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
- MigrationRoute::Support
- ])
- );
-
- assert!(migration_cols.timelines.is_empty());
- }
-
- #[test]
- fn post() {
- let route = r#"{"columns":[[{"AddColumn":"Base"},"ComposeNote"]],"timelines":[]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
- MigrationRoute::ComposeNote
- ])
- );
-
- assert!(migration_cols.timelines.is_empty());
- }
-
- #[test]
- fn relay() {
- let route = r#"{"columns":[[{"AddColumn":"Base"},"Relays"]],"timelines":[]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
- MigrationRoute::Relays
- ])
- );
-
- assert!(migration_cols.timelines.is_empty());
- }
-
- #[test]
- fn accounts() {
- let route =
- r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"}]],"timelines":[]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
- MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
- ])
- );
-
- assert!(migration_cols.timelines.is_empty());
- }
-
- #[test]
- fn login() {
- let route = r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"},{"Accounts":"AddAccount"}]],"timelines":[]}"#;
-
- let deserialized_columns = deserialize_columns_string(route.to_string());
- assert!(deserialized_columns.is_ok());
-
- let migration_cols = deserialized_columns.unwrap();
- assert_eq!(migration_cols.columns.len(), 1);
- assert_eq!(
- *migration_cols.columns.first().unwrap(),
- MigrationColumn::from_routes(vec![
- MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
- MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
- MigrationRoute::Accounts(MigrationAccountsRoute::AddAccount),
- ])
- );
-
- assert!(migration_cols.timelines.is_empty());
- }
-}
diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs
@@ -1,5 +1,3 @@
mod decks;
-mod migration;
pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE};
-pub use migration::{deserialize_columns, COLUMNS_FILE};
diff --git a/crates/notedeck_columns/src/subscriptions.rs b/crates/notedeck_columns/src/subscriptions.rs
@@ -1,4 +1,4 @@
-use crate::timeline::{TimelineId, TimelineKind};
+use crate::timeline::TimelineKind;
use std::collections::HashMap;
use uuid::Uuid;
@@ -16,7 +16,7 @@ pub enum SubKind {
/// We are fetching a contact list so that we can use it for our follows
/// Filter.
// TODO: generalize this to any list?
- FetchingContactList(TimelineId),
+ FetchingContactList(TimelineKind),
}
/// Subscriptions that need to be tracked at various stages. Sometimes we
diff --git a/crates/notedeck_columns/src/thread.rs b/crates/notedeck_columns/src/thread.rs
@@ -1,27 +0,0 @@
-use crate::{multi_subscriber::MultiSubscriber, timeline::Timeline};
-
-use nostrdb::FilterBuilder;
-use notedeck::{RootNoteId, RootNoteIdBuf};
-
-pub struct Thread {
- pub timeline: Timeline,
- pub subscription: Option<MultiSubscriber>,
-}
-
-impl Thread {
- pub fn new(root_id: RootNoteIdBuf) -> Self {
- let timeline = Timeline::thread(root_id);
-
- Thread {
- timeline,
- subscription: None,
- }
- }
-
- pub fn filters_raw(root_id: RootNoteId<'_>) -> Vec<FilterBuilder> {
- vec![
- nostrdb::Filter::new().kinds([1]).event(root_id.bytes()),
- nostrdb::Filter::new().ids([root_id.bytes()]).limit(1),
- ]
- }
-}
diff --git a/crates/notedeck_columns/src/timeline/cache.rs b/crates/notedeck_columns/src/timeline/cache.rs
@@ -1,23 +1,21 @@
use crate::{
actionbar::TimelineOpenResult,
+ error::Error,
multi_subscriber::MultiSubscriber,
- profile::Profile,
- thread::Thread,
//subscriptions::SubRefs,
- timeline::{PubkeySource, Timeline},
+ timeline::{Timeline, TimelineKind},
};
-use notedeck::{NoteCache, NoteRef, RootNoteId, RootNoteIdBuf};
+use notedeck::{filter, FilterState, NoteCache, NoteRef};
-use enostr::{Pubkey, PubkeyRef, RelayPool};
-use nostrdb::{Filter, FilterBuilder, Ndb, Transaction};
+use enostr::RelayPool;
+use nostrdb::{Filter, Ndb, Transaction};
use std::collections::HashMap;
-use tracing::{debug, info, warn};
+use tracing::{debug, error, info, warn};
#[derive(Default)]
pub struct TimelineCache {
- pub threads: HashMap<RootNoteIdBuf, Thread>,
- pub profiles: HashMap<Pubkey, Profile>,
+ pub timelines: HashMap<TimelineKind, Timeline>,
}
pub enum Vitality<'a, M> {
@@ -41,102 +39,64 @@ impl<'a, M> Vitality<'a, M> {
}
}
-#[derive(Hash, Debug, Copy, Clone)]
-pub enum TimelineCacheKey<'a> {
- Profile(PubkeyRef<'a>),
- Thread(RootNoteId<'a>),
-}
-
-impl<'a> TimelineCacheKey<'a> {
- pub fn profile(pubkey: PubkeyRef<'a>) -> Self {
- Self::Profile(pubkey)
- }
-
- pub fn thread(root_id: RootNoteId<'a>) -> Self {
- Self::Thread(root_id)
- }
-
- pub fn bytes(&self) -> &[u8; 32] {
- match self {
- Self::Profile(pk) => pk.bytes(),
- Self::Thread(root_id) => root_id.bytes(),
- }
- }
-
- /// The filters used to update our timeline cache
- pub fn filters_raw(&self) -> Vec<FilterBuilder> {
- match self {
- TimelineCacheKey::Thread(root_id) => Thread::filters_raw(*root_id),
-
- TimelineCacheKey::Profile(pubkey) => vec![Filter::new()
- .authors([pubkey.bytes()])
- .kinds([1])
- .limit(notedeck::filter::default_limit())],
- }
- }
-
- pub fn filters_since(&self, since: u64) -> Vec<Filter> {
- self.filters_raw()
- .into_iter()
- .map(|fb| fb.since(since).build())
- .collect()
- }
+impl TimelineCache {
+ /// Pop a timeline from the timeline cache. This only removes the timeline
+ /// if it has reached 0 subscribers, meaning it was the last one to be
+ /// removed
+ pub fn pop(
+ &mut self,
+ id: &TimelineKind,
+ ndb: &mut Ndb,
+ pool: &mut RelayPool,
+ ) -> Result<(), Error> {
+ let timeline = if let Some(timeline) = self.timelines.get_mut(id) {
+ timeline
+ } else {
+ return Err(Error::TimelineNotFound);
+ };
- pub fn filters(&self) -> Vec<Filter> {
- self.filters_raw()
- .into_iter()
- .map(|mut fb| fb.build())
- .collect()
- }
-}
+ if let Some(sub) = &mut timeline.subscription {
+ // if this is the last subscriber, remove the timeline from cache
+ if sub.unsubscribe(ndb, pool) {
+ debug!(
+ "popped last timeline {:?}, removing from timeline cache",
+ id
+ );
+ self.timelines.remove(id);
+ }
-impl TimelineCache {
- fn contains_key(&self, key: TimelineCacheKey<'_>) -> bool {
- match key {
- TimelineCacheKey::Profile(pubkey) => self.profiles.contains_key(pubkey.bytes()),
- TimelineCacheKey::Thread(root_id) => self.threads.contains_key(root_id.bytes()),
+ Ok(())
+ } else {
+ Err(Error::MissingSubscription)
}
}
- fn get_expected_mut(&mut self, key: TimelineCacheKey<'_>) -> &mut Timeline {
- match key {
- TimelineCacheKey::Profile(pubkey) => self
- .profiles
- .get_mut(pubkey.bytes())
- .map(|p| &mut p.timeline),
- TimelineCacheKey::Thread(root_id) => self
- .threads
- .get_mut(root_id.bytes())
- .map(|t| &mut t.timeline),
- }
- .expect("expected notes in timline cache")
+ fn get_expected_mut(&mut self, key: &TimelineKind) -> &mut Timeline {
+ self.timelines
+ .get_mut(key)
+ .expect("expected notes in timline cache")
}
- /// Insert a new profile or thread into the cache, based on the TimelineCacheKey
+ /// Insert a new timeline into the cache, based on the TimelineKind
#[allow(clippy::too_many_arguments)]
fn insert_new(
&mut self,
- id: TimelineCacheKey<'_>,
+ id: TimelineKind,
txn: &Transaction,
ndb: &Ndb,
notes: &[NoteRef],
note_cache: &mut NoteCache,
- filters: Vec<Filter>,
) {
- match id {
- TimelineCacheKey::Profile(pubkey) => {
- let mut profile = Profile::new(PubkeySource::Explicit(pubkey.to_owned()), filters);
- // insert initial notes into timeline
- profile.timeline.insert_new(txn, ndb, note_cache, notes);
- self.profiles.insert(pubkey.to_owned(), profile);
- }
+ let mut timeline = if let Some(timeline) = id.clone().into_timeline(txn, ndb) {
+ timeline
+ } else {
+ error!("Error creating timeline from {:?}", &id);
+ return;
+ };
- TimelineCacheKey::Thread(root_id) => {
- let mut thread = Thread::new(root_id.to_owned());
- thread.timeline.insert_new(txn, ndb, note_cache, notes);
- self.threads.insert(root_id.to_owned(), thread);
- }
- }
+ // insert initial notes into timeline
+ timeline.insert_new(txn, ndb, note_cache, notes);
+ self.timelines.insert(id, timeline);
}
/// Get and/or update the notes associated with this timeline
@@ -145,24 +105,28 @@ impl TimelineCache {
ndb: &Ndb,
note_cache: &mut NoteCache,
txn: &Transaction,
- id: TimelineCacheKey<'a>,
+ id: &TimelineKind,
) -> Vitality<'a, Timeline> {
// we can't use the naive hashmap entry API here because lookups
// require a copy, wait until we have a raw entry api. We could
// also use hashbrown?
- if self.contains_key(id) {
+ if self.timelines.contains_key(id) {
return Vitality::Stale(self.get_expected_mut(id));
}
- let filters = id.filters();
- let notes = if let Ok(results) = ndb.query(txn, &filters, 1000) {
- results
- .into_iter()
- .map(NoteRef::from_query_result)
- .collect()
+ let notes = if let FilterState::Ready(filters) = id.filters(txn, ndb) {
+ if let Ok(results) = ndb.query(txn, &filters, 1000) {
+ results
+ .into_iter()
+ .map(NoteRef::from_query_result)
+ .collect()
+ } else {
+ debug!("got no results from TimelineCache lookup for {:?}", id);
+ vec![]
+ }
} else {
- debug!("got no results from TimelineCache lookup for {:?}", id);
+ // filter is not ready yet
vec![]
};
@@ -172,44 +136,37 @@ impl TimelineCache {
info!("found NotesHolder with {} notes", notes.len());
}
- self.insert_new(id, txn, ndb, ¬es, note_cache, filters);
+ self.insert_new(id.to_owned(), txn, ndb, ¬es, note_cache);
Vitality::Fresh(self.get_expected_mut(id))
}
- pub fn subscription(
- &mut self,
- id: TimelineCacheKey<'_>,
- ) -> Option<&mut Option<MultiSubscriber>> {
- match id {
- TimelineCacheKey::Profile(pubkey) => self
- .profiles
- .get_mut(pubkey.bytes())
- .map(|p| &mut p.subscription),
- TimelineCacheKey::Thread(root_id) => self
- .threads
- .get_mut(root_id.bytes())
- .map(|t| &mut t.subscription),
- }
- }
-
- pub fn open<'a>(
+ /// Open a timeline, this is another way of saying insert a timeline
+ /// into the timeline cache. If there exists a timeline already, we
+ /// bump its subscription reference count. If it's new we start a new
+ /// subscription
+ pub fn open(
&mut self,
ndb: &Ndb,
note_cache: &mut NoteCache,
txn: &Transaction,
pool: &mut RelayPool,
- id: TimelineCacheKey<'a>,
- ) -> Option<TimelineOpenResult<'a>> {
- let result = match self.notes(ndb, note_cache, txn, id) {
+ id: &TimelineKind,
+ ) -> Option<TimelineOpenResult> {
+ let (open_result, timeline) = match self.notes(ndb, note_cache, txn, id) {
Vitality::Stale(timeline) => {
// The timeline cache is stale, let's update it
- let notes = find_new_notes(timeline.all_or_any_notes(), id, txn, ndb);
- let cached_timeline_result = if notes.is_empty() {
+ let notes = find_new_notes(
+ timeline.all_or_any_notes(),
+ timeline.subscription.as_ref().map(|s| &s.filters)?,
+ txn,
+ ndb,
+ );
+ let open_result = if notes.is_empty() {
None
} else {
let new_notes = notes.iter().map(|n| n.key).collect();
- Some(TimelineOpenResult::new_notes(new_notes, id))
+ Some(TimelineOpenResult::new_notes(new_notes, id.clone()))
};
// we can't insert and update the VirtualList now, because we
@@ -217,42 +174,36 @@ impl TimelineCache {
// result instead
//
// holder.get_view().insert(¬es); <-- no
- cached_timeline_result
+ (open_result, timeline)
}
- Vitality::Fresh(_timeline) => None,
+ Vitality::Fresh(timeline) => (None, timeline),
};
- let sub_id = if let Some(sub) = self.subscription(id) {
- if let Some(multi_subscriber) = sub {
- multi_subscriber.subscribe(ndb, pool);
- multi_subscriber.sub.as_ref().map(|s| s.local)
- } else {
- let mut multi_sub = MultiSubscriber::new(id.filters());
- multi_sub.subscribe(ndb, pool);
- let sub_id = multi_sub.sub.as_ref().map(|s| s.local);
- *sub = Some(multi_sub);
- sub_id
- }
+ if let Some(multi_sub) = &mut timeline.subscription {
+ debug!("got open with *old* subscription for {:?}", &timeline.kind);
+ multi_sub.subscribe(ndb, pool);
+ } else if let Some(filter) = timeline.filter.get_any_ready() {
+ debug!("got open with *new* subscription for {:?}", &timeline.kind);
+ let mut multi_sub = MultiSubscriber::new(filter.clone());
+ multi_sub.subscribe(ndb, pool);
+ timeline.subscription = Some(multi_sub);
} else {
- None
+ // This should never happen reasoning, self.notes would have
+ // failed above if the filter wasn't ready
+ error!(
+ "open: filter not ready, so could not setup subscription. this should never happen"
+ );
};
- let timeline = self.get_expected_mut(id);
- if let Some(sub_id) = sub_id {
- timeline.subscription = Some(sub_id);
- }
-
- // TODO: We have subscription ids tracked in different places. Fix this
-
- result
+ open_result
}
}
/// Look for new thread notes since our last fetch
fn find_new_notes(
notes: &[NoteRef],
- id: TimelineCacheKey<'_>,
+ filters: &[Filter],
txn: &Transaction,
ndb: &Ndb,
) -> Vec<NoteRef> {
@@ -261,7 +212,7 @@ fn find_new_notes(
}
let last_note = notes[0];
- let filters = id.filters_since(last_note.created_at + 1);
+ let filters = filter::make_filters_since(filters, last_note.created_at + 1);
if let Ok(results) = ndb.query(txn, &filters, 1000) {
debug!("got {} results from NotesHolder update", results.len());
diff --git a/crates/notedeck_columns/src/timeline/kind.rs b/crates/notedeck_columns/src/timeline/kind.rs
@@ -1,47 +1,190 @@
use crate::error::Error;
use crate::timeline::{Timeline, TimelineTab};
-use enostr::{Filter, Pubkey};
+use enostr::{Filter, NoteId, Pubkey};
use nostrdb::{Ndb, Transaction};
-use notedeck::{filter::default_limit, FilterError, FilterState, RootNoteIdBuf};
+use notedeck::{
+ filter::{self, default_limit},
+ FilterError, FilterState, NoteCache, RootIdError, RootNoteIdBuf,
+};
use serde::{Deserialize, Serialize};
+use std::hash::{Hash, Hasher};
use std::{borrow::Cow, fmt::Display};
+use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
use tracing::{error, warn};
-#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
+#[derive(Clone, Hash, Copy, Default, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum PubkeySource {
Explicit(Pubkey),
+ #[default]
DeckAuthor,
}
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq)]
pub enum ListKind {
- Contact(PubkeySource),
+ Contact(Pubkey),
+}
+
+impl ListKind {
+ pub fn pubkey(&self) -> Option<&Pubkey> {
+ match self {
+ Self::Contact(pk) => Some(pk),
+ }
+ }
}
impl PubkeySource {
+ pub fn pubkey(pubkey: Pubkey) -> Self {
+ PubkeySource::Explicit(pubkey)
+ }
+
pub fn to_pubkey<'a>(&'a self, deck_author: &'a Pubkey) -> &'a Pubkey {
match self {
PubkeySource::Explicit(pk) => pk,
PubkeySource::DeckAuthor => deck_author,
}
}
+}
- pub fn to_pubkey_bytes<'a>(&'a self, deck_author: &'a [u8; 32]) -> &'a [u8; 32] {
+impl TokenSerializable for PubkeySource {
+ fn serialize_tokens(&self, writer: &mut TokenWriter) {
match self {
- PubkeySource::Explicit(pk) => pk.bytes(),
- PubkeySource::DeckAuthor => deck_author,
+ PubkeySource::DeckAuthor => {
+ writer.write_token("deck_author");
+ }
+ PubkeySource::Explicit(pk) => {
+ writer.write_token(&hex::encode(pk.bytes()));
+ }
}
}
+
+ fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
+ parser.try_parse(|p| {
+ match p.pull_token() {
+ // we handle bare payloads and assume they are explicit pubkey sources
+ Ok("explicit") => {
+ if let Ok(hex) = p.pull_token() {
+ let pk = Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed)?;
+ Ok(PubkeySource::Explicit(pk))
+ } else {
+ Err(ParseError::HexDecodeFailed)
+ }
+ }
+
+ Err(_) | Ok("deck_author") => Ok(PubkeySource::DeckAuthor),
+
+ Ok(hex) => {
+ let pk = Pubkey::from_hex(hex).map_err(|_| ParseError::HexDecodeFailed)?;
+ Ok(PubkeySource::Explicit(pk))
+ }
+ }
+ })
+ }
}
impl ListKind {
- pub fn pubkey_source(&self) -> Option<&PubkeySource> {
+ pub fn contact_list(pk: Pubkey) -> Self {
+ ListKind::Contact(pk)
+ }
+
+ pub fn parse<'a>(
+ parser: &mut TokenParser<'a>,
+ deck_author: &Pubkey,
+ ) -> Result<Self, ParseError<'a>> {
+ parser.parse_all(|p| {
+ p.parse_token("contact")?;
+ let pk_src = PubkeySource::parse_from_tokens(p)?;
+ Ok(ListKind::Contact(*pk_src.to_pubkey(deck_author)))
+ })
+
+ /* here for u when you need more things to parse
+ TokenParser::alt(
+ parser,
+ &[|p| {
+ p.parse_all(|p| {
+ p.parse_token("contact")?;
+ let pk_src = PubkeySource::parse_from_tokens(p)?;
+ Ok(ListKind::Contact(pk_src))
+ });
+ },|p| {
+ // more cases...
+ }],
+ )
+ */
+ }
+
+ pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
match self {
- ListKind::Contact(pk_src) => Some(pk_src),
+ ListKind::Contact(pk) => {
+ writer.write_token("contact");
+ PubkeySource::pubkey(*pk).serialize_tokens(writer);
+ }
}
}
}
+/// Thread selection hashing is done in a specific way. For TimelineCache
+/// lookups, we want to only let the root_id influence thread selection.
+/// This way Thread TimelineKinds always map to the same cached timeline
+/// for now (we will likely have to rework this since threads aren't
+/// *really* timelines)
+#[derive(Debug, Clone)]
+pub struct ThreadSelection {
+ pub root_id: RootNoteIdBuf,
+
+ /// The selected note, if different than the root_id. None here
+ /// means the root is selected
+ pub selected_note: Option<NoteId>,
+}
+
+impl ThreadSelection {
+ pub fn selected_or_root(&self) -> &[u8; 32] {
+ self.selected_note
+ .as_ref()
+ .map(|sn| sn.bytes())
+ .unwrap_or(self.root_id.bytes())
+ }
+
+ pub fn from_root_id(root_id: RootNoteIdBuf) -> Self {
+ Self {
+ root_id,
+ selected_note: None,
+ }
+ }
+
+ pub fn from_note_id(
+ ndb: &Ndb,
+ note_cache: &mut NoteCache,
+ txn: &Transaction,
+ note_id: NoteId,
+ ) -> Result<Self, RootIdError> {
+ let root_id = RootNoteIdBuf::new(ndb, note_cache, txn, note_id.bytes())?;
+ Ok(if root_id.bytes() == note_id.bytes() {
+ Self::from_root_id(root_id)
+ } else {
+ Self {
+ root_id,
+ selected_note: Some(note_id),
+ }
+ })
+ }
+}
+
+impl Hash for ThreadSelection {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ // only hash the root id for thread selection
+ self.root_id.hash(state)
+ }
+}
+
+// need this to only match root_id or else hash lookups will fail
+impl PartialEq for ThreadSelection {
+ fn eq(&self, other: &Self) -> bool {
+ self.root_id == other.root_id
+ }
+}
+
+impl Eq for ThreadSelection {}
+
///
/// What kind of timeline is it?
/// - Follow List
@@ -50,30 +193,71 @@ impl ListKind {
/// - filter
/// - ... etc
///
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TimelineKind {
List(ListKind),
- Notifications(PubkeySource),
+ /// The last not per pubkey
+ Algo(AlgoTimeline),
- Profile(PubkeySource),
+ Notifications(Pubkey),
- /// This could be any note id, doesn't need to be the root id
- Thread(RootNoteIdBuf),
+ Profile(Pubkey),
+
+ Thread(ThreadSelection),
Universe,
- /// Generic filter
- Generic,
+ /// Generic filter, references a hash of a filter
+ Generic(u64),
Hashtag(String),
}
+const NOTIFS_TOKEN_DEPRECATED: &str = "notifs";
+const NOTIFS_TOKEN: &str = "notifications";
+
+/// Hardcoded algo timelines
+#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
+pub enum AlgoTimeline {
+ /// LastPerPubkey: a special nostr query that fetches the last N
+ /// notes for each pubkey on the list
+ LastPerPubkey(ListKind),
+}
+
+/// The identifier for our last per pubkey algo
+const LAST_PER_PUBKEY_TOKEN: &str = "last_per_pubkey";
+
+impl AlgoTimeline {
+ pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
+ match self {
+ AlgoTimeline::LastPerPubkey(list_kind) => {
+ writer.write_token(LAST_PER_PUBKEY_TOKEN);
+ list_kind.serialize_tokens(writer);
+ }
+ }
+ }
+
+ pub fn parse<'a>(
+ parser: &mut TokenParser<'a>,
+ deck_author: &Pubkey,
+ ) -> Result<Self, ParseError<'a>> {
+ parser.parse_all(|p| {
+ p.parse_token(LAST_PER_PUBKEY_TOKEN)?;
+ Ok(AlgoTimeline::LastPerPubkey(ListKind::parse(
+ p,
+ deck_author,
+ )?))
+ })
+ }
+}
+
impl Display for TimelineKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TimelineKind::List(ListKind::Contact(_src)) => f.write_str("Contacts"),
- TimelineKind::Generic => f.write_str("Timeline"),
+ TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_lk)) => f.write_str("Last Notes"),
+ TimelineKind::Generic(_) => f.write_str("Timeline"),
TimelineKind::Notifications(_) => f.write_str("Notifications"),
TimelineKind::Profile(_) => f.write_str("Profile"),
TimelineKind::Universe => f.write_str("Universe"),
@@ -84,93 +268,283 @@ impl Display for TimelineKind {
}
impl TimelineKind {
- pub fn pubkey_source(&self) -> Option<&PubkeySource> {
+ pub fn pubkey(&self) -> Option<&Pubkey> {
match self {
- TimelineKind::List(list_kind) => list_kind.pubkey_source(),
- TimelineKind::Notifications(pk_src) => Some(pk_src),
- TimelineKind::Profile(pk_src) => Some(pk_src),
+ TimelineKind::List(list_kind) => list_kind.pubkey(),
+ TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => list_kind.pubkey(),
+ TimelineKind::Notifications(pk) => Some(pk),
+ TimelineKind::Profile(pk) => Some(pk),
TimelineKind::Universe => None,
- TimelineKind::Generic => None,
+ TimelineKind::Generic(_) => None,
TimelineKind::Hashtag(_ht) => None,
TimelineKind::Thread(_ht) => None,
}
}
- pub fn contact_list(pk: PubkeySource) -> Self {
- TimelineKind::List(ListKind::Contact(pk))
+ /// Some feeds are not realtime, like certain algo feeds
+ pub fn should_subscribe_locally(&self) -> bool {
+ match self {
+ TimelineKind::Algo(AlgoTimeline::LastPerPubkey(_list_kind)) => false,
+
+ TimelineKind::List(_list_kind) => true,
+ TimelineKind::Notifications(_pk_src) => true,
+ TimelineKind::Profile(_pk_src) => true,
+ TimelineKind::Universe => true,
+ TimelineKind::Generic(_) => true,
+ TimelineKind::Hashtag(_ht) => true,
+ TimelineKind::Thread(_ht) => true,
+ }
+ }
+
+ pub fn serialize_tokens(&self, writer: &mut TokenWriter) {
+ match self {
+ TimelineKind::List(list_kind) => list_kind.serialize_tokens(writer),
+ TimelineKind::Algo(algo_timeline) => algo_timeline.serialize_tokens(writer),
+ TimelineKind::Notifications(pk) => {
+ writer.write_token(NOTIFS_TOKEN);
+ PubkeySource::pubkey(*pk).serialize_tokens(writer);
+ }
+ TimelineKind::Profile(pk) => {
+ writer.write_token("profile");
+ PubkeySource::pubkey(*pk).serialize_tokens(writer);
+ }
+ TimelineKind::Thread(root_note_id) => {
+ writer.write_token("thread");
+ writer.write_token(&root_note_id.root_id.hex());
+ }
+ TimelineKind::Universe => {
+ writer.write_token("universe");
+ }
+ TimelineKind::Generic(_usize) => {
+ // TODO: lookup filter and then serialize
+ writer.write_token("generic");
+ }
+ TimelineKind::Hashtag(ht) => {
+ writer.write_token("hashtag");
+ writer.write_token(ht);
+ }
+ }
+ }
+
+ pub fn parse<'a>(
+ parser: &mut TokenParser<'a>,
+ deck_author: &Pubkey,
+ ) -> Result<Self, ParseError<'a>> {
+ let profile = parser.try_parse(|p| {
+ p.parse_token("profile")?;
+ let pk_src = PubkeySource::parse_from_tokens(p)?;
+ Ok(TimelineKind::Profile(*pk_src.to_pubkey(deck_author)))
+ });
+ if profile.is_ok() {
+ return profile;
+ }
+
+ let notifications = parser.try_parse(|p| {
+ // still handle deprecated form (notifs)
+ p.parse_any_token(&[NOTIFS_TOKEN, NOTIFS_TOKEN_DEPRECATED])?;
+ let pk_src = PubkeySource::parse_from_tokens(p)?;
+ Ok(TimelineKind::Notifications(*pk_src.to_pubkey(deck_author)))
+ });
+ if notifications.is_ok() {
+ return notifications;
+ }
+
+ let list_tl =
+ parser.try_parse(|p| Ok(TimelineKind::List(ListKind::parse(p, deck_author)?)));
+ if list_tl.is_ok() {
+ return list_tl;
+ }
+
+ let algo_tl =
+ parser.try_parse(|p| Ok(TimelineKind::Algo(AlgoTimeline::parse(p, deck_author)?)));
+ if algo_tl.is_ok() {
+ return algo_tl;
+ }
+
+ TokenParser::alt(
+ parser,
+ &[
+ |p| {
+ p.parse_token("thread")?;
+ Ok(TimelineKind::Thread(ThreadSelection::from_root_id(
+ RootNoteIdBuf::new_unsafe(tokenator::parse_hex_id(p)?),
+ )))
+ },
+ |p| {
+ p.parse_token("universe")?;
+ Ok(TimelineKind::Universe)
+ },
+ |p| {
+ p.parse_token("generic")?;
+ // TODO: generic filter serialization
+ Ok(TimelineKind::Generic(0))
+ },
+ |p| {
+ p.parse_token("hashtag")?;
+ Ok(TimelineKind::Hashtag(p.pull_token()?.to_string()))
+ },
+ ],
+ )
+ }
+
+ pub fn last_per_pubkey(list_kind: ListKind) -> Self {
+ TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind))
+ }
+
+ pub fn contact_list(pk: Pubkey) -> Self {
+ TimelineKind::List(ListKind::contact_list(pk))
}
pub fn is_contacts(&self) -> bool {
matches!(self, TimelineKind::List(ListKind::Contact(_)))
}
- pub fn profile(pk: PubkeySource) -> Self {
+ pub fn profile(pk: Pubkey) -> Self {
TimelineKind::Profile(pk)
}
- pub fn thread(root_id: RootNoteIdBuf) -> Self {
- TimelineKind::Thread(root_id)
+ pub fn thread(selected_note: ThreadSelection) -> Self {
+ TimelineKind::Thread(selected_note)
}
pub fn is_notifications(&self) -> bool {
matches!(self, TimelineKind::Notifications(_))
}
- pub fn notifications(pk: PubkeySource) -> Self {
+ pub fn notifications(pk: Pubkey) -> Self {
TimelineKind::Notifications(pk)
}
- pub fn into_timeline(self, ndb: &Ndb, default_user: Option<&[u8; 32]>) -> Option<Timeline> {
+ // TODO: probably should set default limit here
+ pub fn filters(&self, txn: &Transaction, ndb: &Ndb) -> FilterState {
+ match self {
+ TimelineKind::Universe => FilterState::ready(universe_filter()),
+
+ TimelineKind::List(list_k) => match list_k {
+ ListKind::Contact(pubkey) => contact_filter_state(txn, ndb, pubkey),
+ },
+
+ // TODO: still need to update this to fetch likes, zaps, etc
+ TimelineKind::Notifications(pubkey) => FilterState::ready(vec![Filter::new()
+ .pubkeys([pubkey.bytes()])
+ .kinds([1])
+ .limit(default_limit())
+ .build()]),
+
+ TimelineKind::Hashtag(hashtag) => FilterState::ready(vec![Filter::new()
+ .kinds([1])
+ .limit(filter::default_limit())
+ .tags([hashtag.to_lowercase()], 't')
+ .build()]),
+
+ TimelineKind::Algo(algo_timeline) => match algo_timeline {
+ AlgoTimeline::LastPerPubkey(list_k) => match list_k {
+ ListKind::Contact(pubkey) => last_per_pubkey_filter_state(ndb, pubkey),
+ },
+ },
+
+ TimelineKind::Generic(_) => {
+ todo!("implement generic filter lookups")
+ }
+
+ TimelineKind::Thread(selection) => FilterState::ready(vec![
+ nostrdb::Filter::new()
+ .kinds([1])
+ .event(selection.root_id.bytes())
+ .build(),
+ nostrdb::Filter::new()
+ .ids([selection.root_id.bytes()])
+ .limit(1)
+ .build(),
+ ]),
+
+ TimelineKind::Profile(pk) => FilterState::ready(vec![Filter::new()
+ .authors([pk.bytes()])
+ .kinds([1])
+ .limit(default_limit())
+ .build()]),
+ }
+ }
+
+ pub fn into_timeline(self, txn: &Transaction, ndb: &Ndb) -> Option<Timeline> {
match self {
TimelineKind::Universe => Some(Timeline::new(
TimelineKind::Universe,
- FilterState::ready(vec![Filter::new()
- .kinds([1])
- .limit(default_limit())
- .build()]),
+ FilterState::ready(universe_filter()),
TimelineTab::no_replies(),
)),
TimelineKind::Thread(root_id) => Some(Timeline::thread(root_id)),
- TimelineKind::Generic => {
+ TimelineKind::Generic(_filter_id) => {
warn!("you can't convert a TimelineKind::Generic to a Timeline");
+ // TODO: you actually can! just need to look up the filter id
None
}
- TimelineKind::Profile(pk_src) => {
- let pk = match &pk_src {
- PubkeySource::DeckAuthor => default_user?,
- PubkeySource::Explicit(pk) => pk.bytes(),
- };
+ TimelineKind::Algo(AlgoTimeline::LastPerPubkey(ListKind::Contact(pk))) => {
+ let contact_filter = Filter::new()
+ .authors([pk.bytes()])
+ .kinds([3])
+ .limit(1)
+ .build();
+
+ let results = ndb
+ .query(txn, &[contact_filter.clone()], 1)
+ .expect("contact query failed?");
+
+ let kind_fn = TimelineKind::last_per_pubkey;
+ let tabs = TimelineTab::only_notes_and_replies();
+
+ if results.is_empty() {
+ return Some(Timeline::new(
+ kind_fn(ListKind::contact_list(pk)),
+ FilterState::needs_remote(vec![contact_filter.clone()]),
+ tabs,
+ ));
+ }
+
+ let list_kind = ListKind::contact_list(pk);
+ match Timeline::last_per_pubkey(&results[0].note, &list_kind) {
+ Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => {
+ Some(Timeline::new(
+ kind_fn(list_kind),
+ FilterState::needs_remote(vec![contact_filter]),
+ tabs,
+ ))
+ }
+ Err(e) => {
+ error!("Unexpected error: {e}");
+ None
+ }
+ Ok(tl) => Some(tl),
+ }
+ }
+
+ TimelineKind::Profile(pk) => {
let filter = Filter::new()
- .authors([pk])
+ .authors([pk.bytes()])
.kinds([1])
.limit(default_limit())
.build();
Some(Timeline::new(
- TimelineKind::profile(pk_src),
+ TimelineKind::profile(pk),
FilterState::ready(vec![filter]),
TimelineTab::full_tabs(),
))
}
- TimelineKind::Notifications(pk_src) => {
- let pk = match &pk_src {
- PubkeySource::DeckAuthor => default_user?,
- PubkeySource::Explicit(pk) => pk.bytes(),
- };
-
+ TimelineKind::Notifications(pk) => {
let notifications_filter = Filter::new()
- .pubkeys([pk])
+ .pubkeys([pk.bytes()])
.kinds([1])
.limit(default_limit())
.build();
Some(Timeline::new(
- TimelineKind::notifications(pk_src),
+ TimelineKind::notifications(pk),
FilterState::ready(vec![notifications_filter]),
TimelineTab::only_notes_and_replies(),
))
@@ -178,42 +552,11 @@ impl TimelineKind {
TimelineKind::Hashtag(hashtag) => Some(Timeline::hashtag(hashtag)),
- TimelineKind::List(ListKind::Contact(pk_src)) => {
- let pk = match &pk_src {
- PubkeySource::DeckAuthor => default_user?,
- PubkeySource::Explicit(pk) => pk.bytes(),
- };
-
- let contact_filter = Filter::new().authors([pk]).kinds([3]).limit(1).build();
-
- let txn = Transaction::new(ndb).expect("txn");
- let results = ndb
- .query(&txn, &[contact_filter.clone()], 1)
- .expect("contact query failed?");
-
- if results.is_empty() {
- return Some(Timeline::new(
- TimelineKind::contact_list(pk_src),
- FilterState::needs_remote(vec![contact_filter.clone()]),
- TimelineTab::full_tabs(),
- ));
- }
-
- match Timeline::contact_list(&results[0].note, pk_src.clone(), default_user) {
- Err(Error::App(notedeck::Error::Filter(FilterError::EmptyContactList))) => {
- Some(Timeline::new(
- TimelineKind::contact_list(pk_src),
- FilterState::needs_remote(vec![contact_filter]),
- TimelineTab::full_tabs(),
- ))
- }
- Err(e) => {
- error!("Unexpected error: {e}");
- None
- }
- Ok(tl) => Some(tl),
- }
- }
+ TimelineKind::List(ListKind::Contact(pk)) => Some(Timeline::new(
+ TimelineKind::contact_list(pk),
+ contact_filter_state(txn, ndb, &pk),
+ TimelineTab::full_tabs(),
+ )),
}
}
@@ -222,11 +565,14 @@ impl TimelineKind {
TimelineKind::List(list_kind) => match list_kind {
ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts"),
},
+ TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => match list_kind {
+ ListKind::Contact(_pubkey_source) => ColumnTitle::simple("Contacts (last notes)"),
+ },
TimelineKind::Notifications(_pubkey_source) => ColumnTitle::simple("Notifications"),
TimelineKind::Profile(_pubkey_source) => ColumnTitle::needs_db(self),
TimelineKind::Thread(_root_id) => ColumnTitle::simple("Thread"),
TimelineKind::Universe => ColumnTitle::simple("Universe"),
- TimelineKind::Generic => ColumnTitle::simple("Custom"),
+ TimelineKind::Generic(_) => ColumnTitle::simple("Custom"),
TimelineKind::Hashtag(hashtag) => ColumnTitle::formatted(hashtag.to_string()),
}
}
@@ -242,26 +588,15 @@ impl<'a> TitleNeedsDb<'a> {
TitleNeedsDb { kind }
}
- pub fn title<'txn>(
- &self,
- txn: &'txn Transaction,
- ndb: &Ndb,
- deck_author: Option<&Pubkey>,
- ) -> &'txn str {
- if let TimelineKind::Profile(pubkey_source) = self.kind {
- if let Some(deck_author) = deck_author {
- let pubkey = pubkey_source.to_pubkey(deck_author);
- let profile = ndb.get_profile_by_pubkey(txn, pubkey);
- let m_name = profile
- .as_ref()
- .ok()
- .map(|p| crate::profile::get_display_name(Some(p)).name());
-
- m_name.unwrap_or("Profile")
- } else {
- // why would be there be no deck author? weird
- "nostrich"
- }
+ pub fn title<'txn>(&self, txn: &'txn Transaction, ndb: &Ndb) -> &'txn str {
+ if let TimelineKind::Profile(pubkey) = self.kind {
+ let profile = ndb.get_profile_by_pubkey(txn, pubkey);
+ let m_name = profile
+ .as_ref()
+ .ok()
+ .map(|p| crate::profile::get_display_name(Some(p)).name());
+
+ m_name.unwrap_or("Profile")
} else {
"Unknown"
}
@@ -289,3 +624,65 @@ impl<'a> ColumnTitle<'a> {
Self::NeedsDb(TitleNeedsDb::new(kind))
}
}
+
+fn contact_filter_state(txn: &Transaction, ndb: &Ndb, pk: &Pubkey) -> FilterState {
+ let contact_filter = Filter::new()
+ .authors([pk.bytes()])
+ .kinds([3])
+ .limit(1)
+ .build();
+
+ let results = ndb
+ .query(txn, &[contact_filter.clone()], 1)
+ .expect("contact query failed?");
+
+ if results.is_empty() {
+ FilterState::needs_remote(vec![contact_filter.clone()])
+ } else {
+ let with_hashtags = false;
+ match filter::filter_from_tags(&results[0].note, Some(pk.bytes()), with_hashtags) {
+ Err(notedeck::Error::Filter(FilterError::EmptyContactList)) => {
+ FilterState::needs_remote(vec![contact_filter])
+ }
+ Err(err) => {
+ error!("Error getting contact filter state: {err}");
+ FilterState::Broken(FilterError::EmptyContactList)
+ }
+ Ok(filter) => FilterState::ready(filter.into_follow_filter()),
+ }
+ }
+}
+
+fn last_per_pubkey_filter_state(ndb: &Ndb, pk: &Pubkey) -> FilterState {
+ let contact_filter = Filter::new()
+ .authors([pk.bytes()])
+ .kinds([3])
+ .limit(1)
+ .build();
+
+ let txn = Transaction::new(ndb).expect("txn");
+ let results = ndb
+ .query(&txn, &[contact_filter.clone()], 1)
+ .expect("contact query failed?");
+
+ if results.is_empty() {
+ FilterState::needs_remote(vec![contact_filter])
+ } else {
+ let kind = 1;
+ let notes_per_pk = 1;
+ match filter::last_n_per_pubkey_from_tags(&results[0].note, kind, notes_per_pk) {
+ Err(notedeck::Error::Filter(FilterError::EmptyContactList)) => {
+ FilterState::needs_remote(vec![contact_filter])
+ }
+ Err(err) => {
+ error!("Error getting contact filter state: {err}");
+ FilterState::Broken(FilterError::EmptyContactList)
+ }
+ Ok(filter) => FilterState::ready(filter),
+ }
+ }
+}
+
+fn universe_filter() -> Vec<Filter> {
+ vec![Filter::new().kinds([1]).limit(default_limit()).build()]
+}
diff --git a/crates/notedeck_columns/src/timeline/mod.rs b/crates/notedeck_columns/src/timeline/mod.rs
@@ -1,25 +1,19 @@
use crate::{
- column::Columns,
- decks::DecksCache,
error::Error,
+ multi_subscriber::MultiSubscriber,
subscriptions::{self, SubKind, Subscriptions},
- thread::Thread,
+ timeline::kind::ListKind,
Result,
};
use notedeck::{
- filter, CachedNote, FilterError, FilterState, FilterStates, NoteCache, NoteRef, RootNoteIdBuf,
- UnknownIds,
+ filter, CachedNote, FilterError, FilterState, FilterStates, NoteCache, NoteRef, UnknownIds,
};
-use std::fmt;
-use std::sync::atomic::{AtomicU32, Ordering};
-
use egui_virtual_list::VirtualList;
use enostr::{PoolRelay, Pubkey, RelayPool};
-use nostrdb::{Filter, Ndb, Note, NoteKey, Subscription, Transaction};
+use nostrdb::{Filter, Ndb, Note, NoteKey, Transaction};
use std::cell::RefCell;
-use std::hash::Hash;
use std::rc::Rc;
use tracing::{debug, error, info, warn};
@@ -28,17 +22,26 @@ pub mod cache;
pub mod kind;
pub mod route;
-pub use cache::{TimelineCache, TimelineCacheKey};
-pub use kind::{ColumnTitle, PubkeySource, TimelineKind};
-pub use route::TimelineRoute;
+pub use cache::TimelineCache;
+pub use kind::{ColumnTitle, PubkeySource, ThreadSelection, TimelineKind};
+
+//#[derive(Debug, Hash, Clone, Eq, PartialEq)]
+//pub type TimelineId = TimelineKind;
-#[derive(Debug, Hash, Copy, Clone, Eq, PartialEq)]
-pub struct TimelineId(u32);
+/*
impl TimelineId {
- pub fn new(id: u32) -> Self {
+ pub fn kind(&self) -> &TimelineKind {
+ &self.kind
+ }
+
+ pub fn new(id: TimelineKind) -> Self {
TimelineId(id)
}
+
+ pub fn profile(pubkey: Pubkey) -> Self {
+ TimelineId::new(TimelineKind::Profile(PubkeySource::pubkey(pubkey)))
+ }
}
impl fmt::Display for TimelineId {
@@ -46,6 +49,7 @@ impl fmt::Display for TimelineId {
write!(f, "TimelineId({})", self.0)
}
}
+*/
#[derive(Copy, Clone, Eq, PartialEq, Debug, Default)]
pub enum ViewFilter {
@@ -185,7 +189,6 @@ impl TimelineTab {
/// A column in a deck. Holds navigation state, loaded notes, column kind, etc.
#[derive(Debug)]
pub struct Timeline {
- pub id: TimelineId,
pub kind: TimelineKind,
// We may not have the filter loaded yet, so let's make it an option so
// that codepaths have to explicitly handle it
@@ -193,45 +196,58 @@ pub struct Timeline {
pub views: Vec<TimelineTab>,
pub selected_view: usize,
- pub subscription: Option<Subscription>,
+ pub subscription: Option<MultiSubscriber>,
}
impl Timeline {
/// Create a timeline from a contact list
- pub fn contact_list(
- contact_list: &Note,
- pk_src: PubkeySource,
- deck_author: Option<&[u8; 32]>,
- ) -> Result<Self> {
- let our_pubkey = deck_author.map(|da| pk_src.to_pubkey_bytes(da));
+ pub fn contact_list(contact_list: &Note, pubkey: &[u8; 32]) -> Result<Self> {
let with_hashtags = false;
- let filter =
- filter::filter_from_tags(contact_list, our_pubkey, with_hashtags)?.into_follow_filter();
+ let filter = filter::filter_from_tags(contact_list, Some(pubkey), with_hashtags)?
+ .into_follow_filter();
Ok(Timeline::new(
- TimelineKind::contact_list(pk_src),
+ TimelineKind::contact_list(Pubkey::new(*pubkey)),
FilterState::ready(filter),
TimelineTab::full_tabs(),
))
}
- pub fn thread(note_id: RootNoteIdBuf) -> Self {
- let filter = Thread::filters_raw(note_id.borrow())
- .iter_mut()
- .map(|fb| fb.build())
- .collect();
+ pub fn thread(selection: ThreadSelection) -> Self {
+ let filter = vec![
+ nostrdb::Filter::new()
+ .kinds([1])
+ .event(selection.root_id.bytes())
+ .build(),
+ nostrdb::Filter::new()
+ .ids([selection.root_id.bytes()])
+ .limit(1)
+ .build(),
+ ];
Timeline::new(
- TimelineKind::Thread(note_id),
+ TimelineKind::Thread(selection),
FilterState::ready(filter),
TimelineTab::only_notes_and_replies(),
)
}
+ pub fn last_per_pubkey(list: &Note, list_kind: &ListKind) -> Result<Self> {
+ let kind = 1;
+ let notes_per_pk = 1;
+ let filter = filter::last_n_per_pubkey_from_tags(list, kind, notes_per_pk)?;
+
+ Ok(Timeline::new(
+ TimelineKind::last_per_pubkey(*list_kind),
+ FilterState::ready(filter),
+ TimelineTab::only_notes_and_replies(),
+ ))
+ }
+
pub fn hashtag(hashtag: String) -> Self {
let filter = Filter::new()
.kinds([1])
.limit(filter::default_limit())
- .tags([hashtag.clone()], 't')
+ .tags([hashtag.to_lowercase()], 't')
.build();
Timeline::new(
@@ -241,25 +257,20 @@ impl Timeline {
)
}
- pub fn make_view_id(id: TimelineId, selected_view: usize) -> egui::Id {
+ pub fn make_view_id(id: &TimelineKind, selected_view: usize) -> egui::Id {
egui::Id::new((id, selected_view))
}
pub fn view_id(&self) -> egui::Id {
- Timeline::make_view_id(self.id, self.selected_view)
+ Timeline::make_view_id(&self.kind, self.selected_view)
}
pub fn new(kind: TimelineKind, filter_state: FilterState, views: Vec<TimelineTab>) -> Self {
- // global unique id for all new timelines
- static UIDS: AtomicU32 = AtomicU32::new(0);
-
let filter = FilterStates::new(filter_state);
- let subscription: Option<Subscription> = None;
+ let subscription: Option<MultiSubscriber> = None;
let selected_view = 0;
- let id = TimelineId::new(UIDS.fetch_add(1, Ordering::Relaxed));
Timeline {
- id,
kind,
filter,
views,
@@ -397,8 +408,15 @@ impl Timeline {
note_cache: &mut NoteCache,
reversed: bool,
) -> Result<()> {
+ if !self.kind.should_subscribe_locally() {
+ // don't need to poll for timelines that don't have local subscriptions
+ return Ok(());
+ }
+
let sub = self
.subscription
+ .as_ref()
+ .and_then(|s| s.local_subid)
.ok_or(Error::App(notedeck::Error::no_active_sub()))?;
let new_note_ids = ndb.poll_for_notes(sub, 500);
@@ -466,10 +484,9 @@ pub fn setup_new_timeline(
pool: &mut RelayPool,
note_cache: &mut NoteCache,
since_optimize: bool,
- our_pk: Option<&Pubkey>,
) {
// if we're ready, setup local subs
- if is_timeline_ready(ndb, pool, note_cache, timeline, our_pk) {
+ if is_timeline_ready(ndb, pool, note_cache, timeline) {
if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) {
error!("setup_new_timeline: {err}");
}
@@ -487,7 +504,7 @@ pub fn setup_new_timeline(
pub fn send_initial_timeline_filters(
ndb: &Ndb,
since_optimize: bool,
- columns: &mut Columns,
+ timeline_cache: &mut TimelineCache,
subs: &mut Subscriptions,
pool: &mut RelayPool,
relay_id: &str,
@@ -495,7 +512,7 @@ pub fn send_initial_timeline_filters(
info!("Sending initial filters to {}", relay_id);
let relay = &mut pool.relays.iter_mut().find(|r| r.url() == relay_id)?;
- for timeline in columns.timelines_mut() {
+ for (_kind, timeline) in timeline_cache.timelines.iter_mut() {
send_initial_timeline_filter(ndb, since_optimize, subs, relay, timeline);
}
@@ -509,7 +526,7 @@ pub fn send_initial_timeline_filter(
relay: &mut PoolRelay,
timeline: &mut Timeline,
) {
- let filter_state = timeline.filter.get(relay.url());
+ let filter_state = timeline.filter.get_mut(relay.url());
match filter_state {
FilterState::Broken(err) => {
@@ -549,7 +566,7 @@ pub fn send_initial_timeline_filter(
if can_since_optimize && filter::should_since_optimize(lim, notes.len()) {
filter = filter::since_optimize_filter(filter, notes);
} else {
- warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", filter);
+ warn!("Skipping since optimization for {:?}: number of local notes is less than limit, attempting to backfill.", &timeline.kind);
}
filter
@@ -578,7 +595,7 @@ fn fetch_contact_list(
relay: &mut PoolRelay,
timeline: &mut Timeline,
) {
- let sub_kind = SubKind::FetchingContactList(timeline.id);
+ let sub_kind = SubKind::FetchingContactList(timeline.kind.clone());
let sub_id = subscriptions::new_sub_id();
let local_sub = ndb.subscribe(&filter).expect("sub");
@@ -601,14 +618,34 @@ fn setup_initial_timeline(
note_cache: &mut NoteCache,
filters: &[Filter],
) -> Result<()> {
- timeline.subscription = Some(ndb.subscribe(filters)?);
- let txn = Transaction::new(ndb)?;
+ // some timelines are one-shot and a refreshed, like last_per_pubkey algo feed
+ if timeline.kind.should_subscribe_locally() {
+ let local_sub = ndb.subscribe(filters)?;
+ match &mut timeline.subscription {
+ None => {
+ timeline.subscription = Some(MultiSubscriber::with_initial_local_sub(
+ local_sub,
+ filters.to_vec(),
+ ));
+ }
+
+ Some(msub) => {
+ msub.local_subid = Some(local_sub);
+ }
+ };
+ }
+
debug!(
"querying nostrdb sub {:?} {:?}",
timeline.subscription, timeline.filter
);
- let lim = filters[0].limit().unwrap_or(filter::default_limit()) as i32;
+ let mut lim = 0i32;
+ for filter in filters {
+ lim += filter.limit().unwrap_or(1) as i32;
+ }
+
+ let txn = Transaction::new(ndb)?;
let notes: Vec<NoteRef> = ndb
.query(&txn, filters, lim)?
.into_iter()
@@ -623,15 +660,11 @@ fn setup_initial_timeline(
pub fn setup_initial_nostrdb_subs(
ndb: &Ndb,
note_cache: &mut NoteCache,
- decks_cache: &mut DecksCache,
+ timeline_cache: &mut TimelineCache,
) -> Result<()> {
- for decks in decks_cache.get_all_decks_mut() {
- for deck in decks.decks_mut() {
- for timeline in deck.columns_mut().timelines_mut() {
- if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) {
- error!("setup_initial_nostrdb_subs: {err}");
- }
- }
+ for (_kind, timeline) in timeline_cache.timelines.iter_mut() {
+ if let Err(err) = setup_timeline_nostrdb_sub(ndb, note_cache, timeline) {
+ error!("setup_initial_nostrdb_subs: {err}");
}
}
@@ -663,7 +696,6 @@ pub fn is_timeline_ready(
pool: &mut RelayPool,
note_cache: &mut NoteCache,
timeline: &mut Timeline,
- our_pk: Option<&Pubkey>,
) -> bool {
// TODO: we should debounce the filter states a bit to make sure we have
// seen all of the different contact lists from each relay
@@ -696,11 +728,7 @@ pub fn is_timeline_ready(
let filter = {
let txn = Transaction::new(ndb).expect("txn");
let note = ndb.get_note_by_key(&txn, note_key).expect("note");
- let add_pk = timeline
- .kind
- .pubkey_source()
- .as_ref()
- .and_then(|pk_src| our_pk.map(|pk| pk_src.to_pubkey_bytes(pk)));
+ let add_pk = timeline.kind.pubkey().map(|pk| pk.bytes());
filter::filter_from_tags(¬e, add_pk, with_hashtags).map(|f| f.into_follow_filter())
};
diff --git a/crates/notedeck_columns/src/timeline/route.rs b/crates/notedeck_columns/src/timeline/route.rs
@@ -1,61 +1,44 @@
use crate::{
- column::Columns,
- draft::Drafts,
nav::RenderNavAction,
profile::ProfileAction,
- timeline::{TimelineCache, TimelineId, TimelineKind},
- ui::{
- self,
- note::{NoteOptions, QuoteRepostView},
- profile::ProfileView,
- },
+ timeline::{TimelineCache, TimelineKind},
+ ui::{self, note::NoteOptions, profile::ProfileView},
};
-use enostr::{NoteId, Pubkey};
-use nostrdb::{Ndb, Transaction};
+use enostr::Pubkey;
+use nostrdb::Ndb;
use notedeck::{Accounts, ImageCache, MuteFun, NoteCache, UnknownIds};
-#[derive(Debug, Eq, PartialEq, Clone, Copy)]
-pub enum TimelineRoute {
- Timeline(TimelineId),
- Thread(NoteId),
- Profile(Pubkey),
- Reply(NoteId),
- Quote(NoteId),
-}
-
#[allow(clippy::too_many_arguments)]
pub fn render_timeline_route(
ndb: &Ndb,
- columns: &mut Columns,
- drafts: &mut Drafts,
img_cache: &mut ImageCache,
unknown_ids: &mut UnknownIds,
note_cache: &mut NoteCache,
timeline_cache: &mut TimelineCache,
accounts: &mut Accounts,
- route: TimelineRoute,
+ kind: &TimelineKind,
col: usize,
textmode: bool,
+ depth: usize,
ui: &mut egui::Ui,
) -> Option<RenderNavAction> {
- match route {
- TimelineRoute::Timeline(timeline_id) => {
- let note_options = {
- let is_universe = if let Some(timeline) = columns.find_timeline(timeline_id) {
- timeline.kind == TimelineKind::Universe
- } else {
- false
- };
-
- let mut options = NoteOptions::new(is_universe);
- options.set_textmode(textmode);
- options
- };
-
+ let note_options = {
+ let mut options = NoteOptions::new(kind == &TimelineKind::Universe);
+ options.set_textmode(textmode);
+ options
+ };
+
+ match kind {
+ TimelineKind::List(_)
+ | TimelineKind::Algo(_)
+ | TimelineKind::Notifications(_)
+ | TimelineKind::Universe
+ | TimelineKind::Hashtag(_)
+ | TimelineKind::Generic(_) => {
let note_action = ui::TimelineView::new(
- timeline_id,
- columns,
+ kind,
+ timeline_cache,
ndb,
note_cache,
img_cache,
@@ -67,89 +50,50 @@ pub fn render_timeline_route(
note_action.map(RenderNavAction::NoteAction)
}
- TimelineRoute::Thread(id) => ui::ThreadView::new(
+ TimelineKind::Profile(pubkey) => {
+ if depth > 1 {
+ render_profile_route(
+ pubkey,
+ accounts,
+ ndb,
+ timeline_cache,
+ img_cache,
+ note_cache,
+ unknown_ids,
+ col,
+ ui,
+ &accounts.mutefun(),
+ )
+ } else {
+ // we render profiles like timelines if they are at the root
+ let note_action = ui::TimelineView::new(
+ kind,
+ timeline_cache,
+ ndb,
+ note_cache,
+ img_cache,
+ note_options,
+ &accounts.mutefun(),
+ )
+ .ui(ui);
+
+ note_action.map(RenderNavAction::NoteAction)
+ }
+ }
+
+ TimelineKind::Thread(id) => ui::ThreadView::new(
timeline_cache,
ndb,
note_cache,
unknown_ids,
img_cache,
- id.bytes(),
+ id.selected_or_root(),
textmode,
&accounts.mutefun(),
)
.id_source(egui::Id::new(("threadscroll", col)))
.ui(ui)
.map(Into::into),
-
- TimelineRoute::Reply(id) => {
- let txn = if let Ok(txn) = Transaction::new(ndb) {
- txn
- } else {
- ui.label("Reply to unknown note");
- return None;
- };
-
- let note = if let Ok(note) = ndb.get_note_by_id(&txn, id.bytes()) {
- note
- } else {
- ui.label("Reply to unknown note");
- return None;
- };
-
- let id = egui::Id::new(("post", col, note.key().unwrap()));
- let poster = accounts.selected_or_first_nsec()?;
-
- let action = {
- let draft = drafts.reply_mut(note.id());
-
- let response = egui::ScrollArea::vertical().show(ui, |ui| {
- ui::PostReplyView::new(ndb, poster, draft, note_cache, img_cache, ¬e)
- .id_source(id)
- .show(ui)
- });
-
- response.inner.action
- };
-
- action.map(Into::into)
- }
-
- TimelineRoute::Profile(pubkey) => render_profile_route(
- &pubkey,
- accounts,
- ndb,
- timeline_cache,
- img_cache,
- note_cache,
- unknown_ids,
- col,
- ui,
- &accounts.mutefun(),
- ),
-
- TimelineRoute::Quote(id) => {
- let txn = Transaction::new(ndb).expect("txn");
-
- let note = if let Ok(note) = ndb.get_note_by_id(&txn, id.bytes()) {
- note
- } else {
- ui.label("Quote of unknown note");
- return None;
- };
-
- let id = egui::Id::new(("post", col, note.key().unwrap()));
-
- let poster = accounts.selected_or_first_nsec()?;
- let draft = drafts.quote_mut(note.id());
-
- let response = egui::ScrollArea::vertical().show(ui, |ui| {
- QuoteRepostView::new(ndb, poster, note_cache, img_cache, draft, ¬e)
- .id_source(id)
- .show(ui)
- });
-
- response.inner.action.map(Into::into)
- }
}
}
@@ -193,3 +137,32 @@ pub fn render_profile_route(
None
}
}
+
+#[cfg(test)]
+mod tests {
+ use enostr::NoteId;
+ use tokenator::{TokenParser, TokenSerializable, TokenWriter};
+
+ use crate::timeline::{ThreadSelection, TimelineKind};
+ use enostr::Pubkey;
+ use notedeck::RootNoteIdBuf;
+
+ #[test]
+ fn test_timeline_route_serialize() {
+ use super::TimelineKind;
+
+ let note_id_hex = "1c54e5b0c386425f7e017d9e068ddef8962eb2ce1bb08ed27e24b93411c12e60";
+ let note_id = NoteId::from_hex(note_id_hex).unwrap();
+ let data_str = format!("thread:{}", note_id_hex);
+ let data = &data_str.split(":").collect::<Vec<&str>>();
+ let mut token_writer = TokenWriter::default();
+ let mut parser = TokenParser::new(&data);
+ let parsed = TimelineKind::parse(&mut parser, &Pubkey::new(*note_id.bytes())).unwrap();
+ let expected = TimelineKind::Thread(ThreadSelection::from_root_id(
+ RootNoteIdBuf::new_unsafe(*note_id.bytes()),
+ ));
+ parsed.serialize_tokens(&mut token_writer);
+ assert_eq!(expected, parsed);
+ assert_eq!(token_writer.str(), data_str);
+ }
+}
diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs
@@ -10,12 +10,14 @@ use nostrdb::{Ndb, Transaction};
use crate::{
login_manager::AcquireKeyState,
- timeline::{PubkeySource, Timeline, TimelineKind},
+ route::Route,
+ timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind},
ui::anim::ICON_EXPANSION_MULTIPLE,
Damus,
};
use notedeck::{AppContext, ImageCache, NotedeckTextStyle, UserAccount};
+use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
use super::{anim::AnimationHelper, padding, ProfilePreview};
@@ -24,22 +26,35 @@ pub enum AddColumnResponse {
UndecidedNotification,
ExternalNotification,
Hashtag,
+ Algo(AlgoOption),
UndecidedIndividual,
ExternalIndividual,
}
pub enum NotificationColumnType {
- Home,
+ Contacts,
External,
}
#[derive(Clone, Debug)]
+pub enum Decision<T> {
+ Undecided,
+ Decided(T),
+}
+
+#[derive(Clone, Debug)]
+pub enum AlgoOption {
+ LastPerPubkey(Decision<ListKind>),
+}
+
+#[derive(Clone, Debug)]
enum AddColumnOption {
Universe,
UndecidedNotification,
ExternalNotification,
+ Algo(AlgoOption),
Notification(PubkeySource),
- Home(PubkeySource),
+ Contacts(PubkeySource),
UndecidedHashtag,
Hashtag(String),
UndecidedIndividual,
@@ -47,47 +62,120 @@ enum AddColumnOption {
Individual(PubkeySource),
}
+#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
+pub enum AddAlgoRoute {
+ #[default]
+ Base,
+ LastPerPubkey,
+}
+
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
pub enum AddColumnRoute {
Base,
UndecidedNotification,
ExternalNotification,
Hashtag,
+ Algo(AddAlgoRoute),
UndecidedIndividual,
ExternalIndividual,
}
+// Parser for the common case without any payloads
+fn parse_column_route<'a>(
+ parser: &mut TokenParser<'a>,
+ route: AddColumnRoute,
+) -> Result<AddColumnRoute, ParseError<'a>> {
+ parser.parse_all(|p| {
+ for token in route.tokens() {
+ p.parse_token(token)?;
+ }
+ Ok(route)
+ })
+}
+
+impl AddColumnRoute {
+ /// Route tokens use in both serialization and deserialization
+ fn tokens(&self) -> &'static [&'static str] {
+ match self {
+ Self::Base => &["column"],
+ Self::UndecidedNotification => &["column", "notification_selection"],
+ Self::ExternalNotification => &["column", "external_notif_selection"],
+ Self::UndecidedIndividual => &["column", "individual_selection"],
+ Self::ExternalIndividual => &["column", "external_individual_selection"],
+ Self::Hashtag => &["column", "hashtag"],
+ Self::Algo(AddAlgoRoute::Base) => &["column", "algo_selection"],
+ Self::Algo(AddAlgoRoute::LastPerPubkey) => {
+ &["column", "algo_selection", "last_per_pubkey"]
+ } // NOTE!!! When adding to this, update the parser for TokenSerializable below
+ }
+ }
+}
+
+impl TokenSerializable for AddColumnRoute {
+ fn serialize_tokens(&self, writer: &mut TokenWriter) {
+ for token in self.tokens() {
+ writer.write_token(token);
+ }
+ }
+
+ fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
+ parser.peek_parse_token("column")?;
+
+ TokenParser::alt(
+ parser,
+ &[
+ |p| parse_column_route(p, AddColumnRoute::Base),
+ |p| parse_column_route(p, AddColumnRoute::UndecidedNotification),
+ |p| parse_column_route(p, AddColumnRoute::ExternalNotification),
+ |p| parse_column_route(p, AddColumnRoute::UndecidedIndividual),
+ |p| parse_column_route(p, AddColumnRoute::ExternalIndividual),
+ |p| parse_column_route(p, AddColumnRoute::Hashtag),
+ |p| parse_column_route(p, AddColumnRoute::Algo(AddAlgoRoute::Base)),
+ |p| parse_column_route(p, AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey)),
+ ],
+ )
+ }
+}
+
impl AddColumnOption {
pub fn take_as_response(
self,
ndb: &Ndb,
cur_account: Option<&UserAccount>,
) -> Option<AddColumnResponse> {
+ let txn = Transaction::new(ndb).unwrap();
match self {
+ AddColumnOption::Algo(algo_option) => Some(AddColumnResponse::Algo(algo_option)),
AddColumnOption::Universe => TimelineKind::Universe
- .into_timeline(ndb, None)
- .map(AddColumnResponse::Timeline),
- AddColumnOption::Notification(pubkey) => TimelineKind::Notifications(pubkey)
- .into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes()))
+ .into_timeline(&txn, ndb)
.map(AddColumnResponse::Timeline),
+ AddColumnOption::Notification(pubkey) => {
+ TimelineKind::Notifications(*pubkey.to_pubkey(&cur_account.map(|kp| kp.pubkey)?))
+ .into_timeline(&txn, ndb)
+ .map(AddColumnResponse::Timeline)
+ }
AddColumnOption::UndecidedNotification => {
Some(AddColumnResponse::UndecidedNotification)
}
- AddColumnOption::Home(pubkey) => {
- let tlk = TimelineKind::contact_list(pubkey);
- tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes()))
+ AddColumnOption::Contacts(pk_src) => {
+ let tlk = TimelineKind::contact_list(
+ *pk_src.to_pubkey(&cur_account.map(|kp| kp.pubkey)?),
+ );
+ tlk.into_timeline(&txn, ndb)
.map(AddColumnResponse::Timeline)
}
AddColumnOption::ExternalNotification => Some(AddColumnResponse::ExternalNotification),
AddColumnOption::UndecidedHashtag => Some(AddColumnResponse::Hashtag),
AddColumnOption::Hashtag(hashtag) => TimelineKind::Hashtag(hashtag)
- .into_timeline(ndb, None)
+ .into_timeline(&txn, ndb)
.map(AddColumnResponse::Timeline),
AddColumnOption::UndecidedIndividual => Some(AddColumnResponse::UndecidedIndividual),
AddColumnOption::ExternalIndividual => Some(AddColumnResponse::ExternalIndividual),
AddColumnOption::Individual(pubkey_source) => {
- let tlk = TimelineKind::profile(pubkey_source);
- tlk.into_timeline(ndb, cur_account.map(|a| a.pubkey.bytes()))
+ let tlk = TimelineKind::profile(
+ *pubkey_source.to_pubkey(&cur_account.map(|kp| kp.pubkey)?),
+ );
+ tlk.into_timeline(&txn, ndb)
.map(AddColumnResponse::Timeline)
}
}
@@ -151,6 +239,44 @@ impl<'a> AddColumnView<'a> {
})
}
+ fn algo_last_per_pk_ui(
+ &mut self,
+ ui: &mut Ui,
+ deck_author: Pubkey,
+ ) -> Option<AddColumnResponse> {
+ let algo_option = ColumnOptionData {
+ title: "Contact List",
+ description: "Source the last note for each user in your contact list",
+ icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"),
+ option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Decided(
+ ListKind::contact_list(deck_author),
+ ))),
+ };
+
+ let option = algo_option.option.clone();
+ if self.column_option_ui(ui, algo_option).clicked() {
+ option.take_as_response(self.ndb, self.cur_account)
+ } else {
+ None
+ }
+ }
+
+ fn algo_ui(&mut self, ui: &mut Ui) -> Option<AddColumnResponse> {
+ let algo_option = ColumnOptionData {
+ title: "Last Note per User",
+ description: "Show the last note for each user from a list",
+ icon: egui::include_image!("../../../../assets/icons/universe_icon_dark_4x.png"),
+ option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Undecided)),
+ };
+
+ let option = algo_option.option.clone();
+ if self.column_option_ui(ui, algo_option).clicked() {
+ option.take_as_response(self.ndb, self.cur_account)
+ } else {
+ None
+ }
+ }
+
fn individual_ui(&mut self, ui: &mut Ui) -> Option<AddColumnResponse> {
let mut selected_option: Option<AddColumnResponse> = None;
for column_option_data in self.get_individual_options() {
@@ -204,18 +330,22 @@ impl<'a> AddColumnView<'a> {
}
let resp = if let Some(keypair) = key_state.get_login_keypair() {
- let txn = Transaction::new(self.ndb).expect("txn");
- if let Ok(profile) = self.ndb.get_profile_by_pubkey(&txn, keypair.pubkey.bytes()) {
- egui::Frame::window(ui.style())
- .outer_margin(Margin {
- left: 4.0,
- right: 4.0,
- top: 12.0,
- bottom: 32.0,
- })
- .show(ui, |ui| {
- ProfilePreview::new(&profile, self.img_cache).ui(ui);
- });
+ {
+ let txn = Transaction::new(self.ndb).expect("txn");
+ if let Ok(profile) =
+ self.ndb.get_profile_by_pubkey(&txn, keypair.pubkey.bytes())
+ {
+ egui::Frame::window(ui.style())
+ .outer_margin(Margin {
+ left: 4.0,
+ right: 4.0,
+ top: 12.0,
+ bottom: 32.0,
+ })
+ .show(ui, |ui| {
+ ProfilePreview::new(&profile, self.img_cache).ui(ui);
+ });
+ }
}
if ui.add(add_column_button()).clicked() {
@@ -352,10 +482,10 @@ impl<'a> AddColumnView<'a> {
};
vec.push(ColumnOptionData {
- title: "Home timeline",
- description: "See recommended notes first",
+ title: "Contacts",
+ description: "See notes from your contacts",
icon: egui::include_image!("../../../../assets/icons/home_icon_dark_4x.png"),
- option: AddColumnOption::Home(source.clone()),
+ option: AddColumnOption::Contacts(source),
});
}
vec.push(ColumnOptionData {
@@ -376,6 +506,12 @@ impl<'a> AddColumnView<'a> {
icon: egui::include_image!("../../../../assets/icons/profile_icon_4x.png"),
option: AddColumnOption::UndecidedIndividual,
});
+ vec.push(ColumnOptionData {
+ title: "Algo",
+ description: "Algorithmic feeds to aid in note discovery",
+ icon: egui::include_image!("../../../../assets/icons/plus_icon_4x.png"),
+ option: AddColumnOption::Algo(AlgoOption::LastPerPubkey(Decision::Undecided)),
+ });
vec
}
@@ -486,6 +622,16 @@ pub fn render_add_column_routes(
);
let resp = match route {
AddColumnRoute::Base => add_column_view.ui(ui),
+ AddColumnRoute::Algo(r) => match r {
+ AddAlgoRoute::Base => add_column_view.algo_ui(ui),
+ AddAlgoRoute::LastPerPubkey => {
+ if let Some(deck_author) = ctx.accounts.get_selected_account() {
+ add_column_view.algo_last_per_pk_ui(ui, deck_author.pubkey)
+ } else {
+ None
+ }
+ }
+ },
AddColumnRoute::UndecidedNotification => add_column_view.notifications_ui(ui),
AddColumnRoute::ExternalNotification => add_column_view.external_notification_ui(ui),
AddColumnRoute::Hashtag => hashtag_ui(ui, ctx.ndb, &mut app.view_state.id_string_map),
@@ -503,21 +649,73 @@ pub fn render_add_column_routes(
ctx.pool,
ctx.note_cache,
app.since_optimize,
- ctx.accounts
- .get_selected_account()
- .as_ref()
- .map(|sa| &sa.pubkey),
);
+
app.columns_mut(ctx.accounts)
- .add_timeline_to_column(col, timeline);
+ .column_mut(col)
+ .router_mut()
+ .route_to_replaced(Route::timeline(timeline.kind.clone()));
+
+ app.timeline_cache
+ .timelines
+ .insert(timeline.kind.clone(), timeline);
}
+
+ AddColumnResponse::Algo(algo_option) => match algo_option {
+ // If we are undecided, we simply route to the LastPerPubkey
+ // algo route selection
+ AlgoOption::LastPerPubkey(Decision::Undecided) => {
+ app.columns_mut(ctx.accounts)
+ .column_mut(col)
+ .router_mut()
+ .route_to(Route::AddColumn(AddColumnRoute::Algo(
+ AddAlgoRoute::LastPerPubkey,
+ )));
+ }
+
+ // We have a decision on where we want the last per pubkey
+ // source to be, so let;s create a timeline from that and
+ // add it to our list of timelines
+ AlgoOption::LastPerPubkey(Decision::Decided(list_kind)) => {
+ let maybe_timeline = {
+ let txn = Transaction::new(ctx.ndb).unwrap();
+ TimelineKind::last_per_pubkey(list_kind).into_timeline(&txn, ctx.ndb)
+ };
+
+ if let Some(mut timeline) = maybe_timeline {
+ crate::timeline::setup_new_timeline(
+ &mut timeline,
+ ctx.ndb,
+ &mut app.subscriptions,
+ ctx.pool,
+ ctx.note_cache,
+ app.since_optimize,
+ );
+
+ app.columns_mut(ctx.accounts)
+ .column_mut(col)
+ .router_mut()
+ .route_to_replaced(Route::timeline(timeline.kind.clone()));
+
+ app.timeline_cache
+ .timelines
+ .insert(timeline.kind.clone(), timeline);
+ } else {
+ // we couldn't fetch the timeline yet... let's let
+ // the user know ?
+
+ // TODO: spin off the list search here instead
+
+ ui.label(format!("error: could not find {:?}", &list_kind));
+ }
+ }
+ },
+
AddColumnResponse::UndecidedNotification => {
app.columns_mut(ctx.accounts)
.column_mut(col)
.router_mut()
- .route_to(crate::route::Route::AddColumn(
- AddColumnRoute::UndecidedNotification,
- ));
+ .route_to(Route::AddColumn(AddColumnRoute::UndecidedNotification));
}
AddColumnResponse::ExternalNotification => {
app.columns_mut(ctx.accounts)
@@ -587,3 +785,37 @@ pub fn hashtag_ui(
})
.inner
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_column_serialize() {
+ use super::{AddAlgoRoute, AddColumnRoute};
+
+ {
+ let data_str = "column:algo_selection:last_per_pubkey";
+ let data = &data_str.split(":").collect::<Vec<&str>>();
+ let mut token_writer = TokenWriter::default();
+ let mut parser = TokenParser::new(&data);
+ let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap();
+ let expected = AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey);
+ parsed.serialize_tokens(&mut token_writer);
+ assert_eq!(expected, parsed);
+ assert_eq!(token_writer.str(), data_str);
+ }
+
+ {
+ let data_str = "column";
+ let mut token_writer = TokenWriter::default();
+ let data: &[&str] = &[data_str];
+ let mut parser = TokenParser::new(data);
+ let parsed = AddColumnRoute::parse_from_tokens(&mut parser).unwrap();
+ let expected = AddColumnRoute::Base;
+ parsed.serialize_tokens(&mut token_writer);
+ assert_eq!(expected, parsed);
+ assert_eq!(token_writer.str(), data_str);
+ }
+ }
+}
diff --git a/crates/notedeck_columns/src/ui/column/header.rs b/crates/notedeck_columns/src/ui/column/header.rs
@@ -5,7 +5,7 @@ use crate::nav::SwitchingAction;
use crate::{
column::Columns,
route::Route,
- timeline::{ColumnTitle, TimelineId, TimelineKind, TimelineRoute},
+ timeline::{ColumnTitle, TimelineKind},
ui::{
self,
anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE},
@@ -22,7 +22,6 @@ pub struct NavTitle<'a> {
ndb: &'a Ndb,
img_cache: &'a mut ImageCache,
columns: &'a Columns,
- deck_author: Option<&'a Pubkey>,
routes: &'a [Route],
col_id: usize,
}
@@ -32,7 +31,6 @@ impl<'a> NavTitle<'a> {
ndb: &'a Ndb,
img_cache: &'a mut ImageCache,
columns: &'a Columns,
- deck_author: Option<&'a Pubkey>,
routes: &'a [Route],
col_id: usize,
) -> Self {
@@ -40,7 +38,6 @@ impl<'a> NavTitle<'a> {
ndb,
img_cache,
columns,
- deck_author,
routes,
col_id,
}
@@ -123,14 +120,14 @@ impl<'a> NavTitle<'a> {
// not it looks cool
self.title_pfp(ui, prev, 32.0);
- let column_title = prev.title(self.columns);
+ let column_title = prev.title();
let back_resp = match &column_title {
ColumnTitle::Simple(title) => ui.add(Self::back_label(title, color)),
ColumnTitle::NeedsDb(need_db) => {
let txn = Transaction::new(self.ndb).unwrap();
- let title = need_db.title(&txn, self.ndb, self.deck_author);
+ let title = need_db.title(&txn, self.ndb);
ui.add(Self::back_label(title, color))
}
};
@@ -402,14 +399,11 @@ impl<'a> NavTitle<'a> {
})
}
- fn timeline_pfp(&mut self, ui: &mut egui::Ui, id: TimelineId, pfp_size: f32) {
+ fn timeline_pfp(&mut self, ui: &mut egui::Ui, id: &TimelineKind, pfp_size: f32) {
let txn = Transaction::new(self.ndb).unwrap();
- if let Some(pfp) = self
- .columns
- .find_timeline(id)
- .and_then(|tl| tl.kind.pubkey_source())
- .and_then(|pksrc| self.deck_author.map(|da| pksrc.to_pubkey(da)))
+ if let Some(pfp) = id
+ .pubkey()
.and_then(|pk| self.pubkey_pfp(&txn, pk.bytes(), pfp_size))
{
ui.add(pfp);
@@ -422,34 +416,35 @@ impl<'a> NavTitle<'a> {
fn title_pfp(&mut self, ui: &mut egui::Ui, top: &Route, pfp_size: f32) {
match top {
- Route::Timeline(tlr) => match tlr {
- TimelineRoute::Timeline(tlid) => {
- let is_hashtag = self
- .columns
- .find_timeline(*tlid)
- .is_some_and(|tl| matches!(tl.kind, TimelineKind::Hashtag(_)));
-
- if is_hashtag {
- ui.add(
- egui::Image::new(egui::include_image!(
- "../../../../../assets/icons/hashtag_icon_4x.png"
- ))
- .fit_to_exact_size(egui::vec2(pfp_size, pfp_size)),
- );
- } else {
- self.timeline_pfp(ui, *tlid, pfp_size);
- }
+ Route::Timeline(kind) => match kind {
+ TimelineKind::Hashtag(_ht) => {
+ ui.add(
+ egui::Image::new(egui::include_image!(
+ "../../../../../assets/icons/hashtag_icon_4x.png"
+ ))
+ .fit_to_exact_size(egui::vec2(pfp_size, pfp_size)),
+ );
}
- TimelineRoute::Thread(_note_id) => {}
- TimelineRoute::Reply(_note_id) => {}
- TimelineRoute::Quote(_note_id) => {}
-
- TimelineRoute::Profile(pubkey) => {
+ TimelineKind::Profile(pubkey) => {
self.show_profile(ui, pubkey, pfp_size);
}
+
+ TimelineKind::Thread(_) => {
+ // no pfp for threads
+ }
+
+ TimelineKind::Universe
+ | TimelineKind::Algo(_)
+ | TimelineKind::Notifications(_)
+ | TimelineKind::Generic(_)
+ | TimelineKind::List(_) => {
+ self.timeline_pfp(ui, kind, pfp_size);
+ }
},
+ Route::Reply(_) => {}
+ Route::Quote(_) => {}
Route::Accounts(_as) => {}
Route::ComposeNote => {}
Route::AddColumn(_add_col_route) => {}
@@ -480,7 +475,7 @@ impl<'a> NavTitle<'a> {
}
fn title_label(&self, ui: &mut egui::Ui, top: &Route) {
- let column_title = top.title(self.columns);
+ let column_title = top.title();
match &column_title {
ColumnTitle::Simple(title) => {
@@ -489,7 +484,7 @@ impl<'a> NavTitle<'a> {
ColumnTitle::NeedsDb(need_db) => {
let txn = Transaction::new(self.ndb).unwrap();
- let title = need_db.title(&txn, self.ndb, self.deck_author);
+ let title = need_db.title(&txn, self.ndb);
ui.add(Self::title_label_value(title));
}
};
diff --git a/crates/notedeck_columns/src/ui/mention.rs b/crates/notedeck_columns/src/ui/mention.rs
@@ -1,5 +1,5 @@
use crate::ui;
-use crate::{actionbar::NoteAction, profile::get_display_name};
+use crate::{actionbar::NoteAction, profile::get_display_name, timeline::TimelineKind};
use egui::Sense;
use enostr::Pubkey;
use nostrdb::{Ndb, Transaction};
@@ -89,7 +89,9 @@ fn mention_ui(
let note_action = if resp.clicked() {
ui::show_pointer(ui);
- Some(NoteAction::OpenProfile(Pubkey::new(*pk)))
+ Some(NoteAction::OpenTimeline(TimelineKind::profile(
+ Pubkey::new(*pk),
+ )))
} else if resp.hovered() {
ui::show_pointer(ui);
None
diff --git a/crates/notedeck_columns/src/ui/note/contents.rs b/crates/notedeck_columns/src/ui/note/contents.rs
@@ -1,10 +1,9 @@
-use crate::actionbar::NoteAction;
-use crate::images::ImageType;
use crate::ui::{
self,
note::{NoteOptions, NoteResponse},
ProfilePic,
};
+use crate::{actionbar::NoteAction, images::ImageType, timeline::TimelineKind};
use egui::{Color32, Hyperlink, Image, RichText};
use nostrdb::{BlockType, Mention, Ndb, Note, NoteKey, Transaction};
use tracing::warn;
@@ -198,7 +197,15 @@ fn render_note_contents(
BlockType::Hashtag => {
#[cfg(feature = "profiling")]
puffin::profile_scope!("hashtag contents");
- ui.colored_label(link_color, format!("#{}", block.as_str()));
+ let resp = ui.colored_label(link_color, format!("#{}", block.as_str()));
+
+ if resp.clicked() {
+ note_action = Some(NoteAction::OpenTimeline(TimelineKind::Hashtag(
+ block.as_str().to_string(),
+ )));
+ } else if resp.hovered() {
+ ui::show_pointer(ui);
+ }
}
BlockType::Url => {
diff --git a/crates/notedeck_columns/src/ui/note/mod.rs b/crates/notedeck_columns/src/ui/note/mod.rs
@@ -17,6 +17,7 @@ pub use reply_description::reply_desc;
use crate::{
actionbar::NoteAction,
profile::get_display_name,
+ timeline::{ThreadSelection, TimelineKind},
ui::{self, View},
};
@@ -354,8 +355,9 @@ impl<'a> NoteView<'a> {
ui.vertical(|ui| {
ui.horizontal(|ui| {
if self.pfp(note_key, &profile, ui).clicked() {
- note_action =
- Some(NoteAction::OpenProfile(Pubkey::new(*self.note.pubkey())));
+ note_action = Some(NoteAction::OpenTimeline(TimelineKind::profile(
+ Pubkey::new(*self.note.pubkey()),
+ )));
};
let size = ui.available_size();
@@ -415,7 +417,7 @@ impl<'a> NoteView<'a> {
ui.add(&mut contents);
if let Some(action) = contents.action() {
- note_action = Some(*action);
+ note_action = Some(action.clone());
}
if self.options().has_actionbar() {
@@ -430,7 +432,9 @@ impl<'a> NoteView<'a> {
// main design
ui.with_layout(egui::Layout::left_to_right(egui::Align::TOP), |ui| {
if self.pfp(note_key, &profile, ui).clicked() {
- note_action = Some(NoteAction::OpenProfile(Pubkey::new(*self.note.pubkey())));
+ note_action = Some(NoteAction::OpenTimeline(TimelineKind::Profile(
+ Pubkey::new(*self.note.pubkey()),
+ )));
};
ui.with_layout(egui::Layout::top_down(egui::Align::LEFT), |ui| {
@@ -480,7 +484,7 @@ impl<'a> NoteView<'a> {
ui.add(&mut contents);
if let Some(action) = contents.action() {
- note_action = Some(*action);
+ note_action = Some(action.clone());
}
if self.options().has_actionbar() {
@@ -496,7 +500,16 @@ impl<'a> NoteView<'a> {
};
let note_action = if note_hitbox_clicked(ui, hitbox_id, &response.rect, maybe_hitbox) {
- Some(NoteAction::OpenThread(NoteId::new(*self.note.id())))
+ if let Ok(selection) = ThreadSelection::from_note_id(
+ self.ndb,
+ self.note_cache,
+ self.note.txn().unwrap(),
+ NoteId::new(*self.note.id()),
+ ) {
+ Some(NoteAction::OpenTimeline(TimelineKind::Thread(selection)))
+ } else {
+ None
+ }
} else {
note_action
};
diff --git a/crates/notedeck_columns/src/ui/profile/mod.rs b/crates/notedeck_columns/src/ui/profile/mod.rs
@@ -5,7 +5,7 @@ pub mod preview;
pub use edit::EditProfileView;
use egui::load::TexturePoll;
use egui::{vec2, Color32, Label, Layout, Rect, RichText, Rounding, ScrollArea, Sense, Stroke};
-use enostr::{Pubkey, PubkeyRef};
+use enostr::Pubkey;
use nostrdb::{Ndb, ProfileRecord, Transaction};
pub use picture::ProfilePic;
pub use preview::ProfilePreview;
@@ -15,7 +15,7 @@ use crate::{
actionbar::NoteAction,
colors, images,
profile::get_display_name,
- timeline::{TimelineCache, TimelineCacheKey},
+ timeline::{TimelineCache, TimelineKind},
ui::{
note::NoteOptions,
timeline::{tabs_ui, TimelineTabView},
@@ -90,7 +90,7 @@ impl<'a> ProfileView<'a> {
self.ndb,
self.note_cache,
&txn,
- TimelineCacheKey::Profile(PubkeyRef::new(self.pubkey.bytes())),
+ &TimelineKind::Profile(*self.pubkey),
)
.get_ptr();
diff --git a/crates/notedeck_columns/src/ui/side_panel.rs b/crates/notedeck_columns/src/ui/side_panel.rs
@@ -288,7 +288,7 @@ impl<'a> DesktopSidePanel<'a> {
if router
.routes()
.iter()
- .any(|&r| r == Route::Accounts(AccountsRoute::Accounts))
+ .any(|r| r == &Route::Accounts(AccountsRoute::Accounts))
{
// return if we are already routing to accounts
router.go_back();
@@ -297,7 +297,7 @@ impl<'a> DesktopSidePanel<'a> {
}
}
SidePanelAction::Settings => {
- if router.routes().iter().any(|&r| r == Route::Relays) {
+ if router.routes().iter().any(|r| r == &Route::Relays) {
// return if we are already routing to accounts
router.go_back();
} else {
@@ -308,7 +308,7 @@ impl<'a> DesktopSidePanel<'a> {
if router
.routes()
.iter()
- .any(|&r| matches!(r, Route::AddColumn(_)))
+ .any(|r| matches!(r, Route::AddColumn(_)))
{
router.go_back();
} else {
@@ -316,7 +316,7 @@ impl<'a> DesktopSidePanel<'a> {
}
}
SidePanelAction::ComposeNote => {
- if router.routes().iter().any(|&r| r == Route::ComposeNote) {
+ if router.routes().iter().any(|r| r == &Route::ComposeNote) {
router.go_back();
} else {
router.route_to(Route::ComposeNote);
@@ -331,7 +331,7 @@ impl<'a> DesktopSidePanel<'a> {
info!("Clicked expand side panel button");
}
SidePanelAction::Support => {
- if router.routes().iter().any(|&r| r == Route::Support) {
+ if router.routes().iter().any(|r| r == &Route::Support) {
router.go_back();
} else {
support.refresh();
@@ -339,7 +339,7 @@ impl<'a> DesktopSidePanel<'a> {
}
}
SidePanelAction::NewDeck => {
- if router.routes().iter().any(|&r| r == Route::NewDeck) {
+ if router.routes().iter().any(|r| r == &Route::NewDeck) {
router.go_back();
} else {
router.route_to(Route::NewDeck);
@@ -351,7 +351,7 @@ impl<'a> DesktopSidePanel<'a> {
)))
}
SidePanelAction::EditDeck(index) => {
- if router.routes().iter().any(|&r| r == Route::EditDeck(index)) {
+ if router.routes().iter().any(|r| r == &Route::EditDeck(index)) {
router.go_back();
} else {
switching_response = Some(crate::nav::SwitchingAction::Decks(
diff --git a/crates/notedeck_columns/src/ui/thread.rs b/crates/notedeck_columns/src/ui/thread.rs
@@ -1,6 +1,6 @@
use crate::{
actionbar::NoteAction,
- timeline::{TimelineCache, TimelineCacheKey},
+ timeline::{ThreadSelection, TimelineCache, TimelineKind},
ui::note::NoteOptions,
};
@@ -83,7 +83,7 @@ impl<'a> ThreadView<'a> {
self.ndb,
self.note_cache,
&txn,
- TimelineCacheKey::Thread(root_id),
+ &TimelineKind::Thread(ThreadSelection::from_root_id(root_id.to_owned())),
)
.get_ptr();
diff --git a/crates/notedeck_columns/src/ui/timeline.rs b/crates/notedeck_columns/src/ui/timeline.rs
@@ -3,8 +3,7 @@ use std::f32::consts::PI;
use crate::actionbar::NoteAction;
use crate::timeline::TimelineTab;
use crate::{
- column::Columns,
- timeline::{TimelineId, ViewFilter},
+ timeline::{TimelineCache, TimelineKind, ViewFilter},
ui,
ui::note::NoteOptions,
};
@@ -19,8 +18,8 @@ use tracing::{error, warn};
use super::anim::{AnimationHelper, ICON_EXPANSION_MULTIPLE};
pub struct TimelineView<'a> {
- timeline_id: TimelineId,
- columns: &'a mut Columns,
+ timeline_id: &'a TimelineKind,
+ timeline_cache: &'a mut TimelineCache,
ndb: &'a Ndb,
note_cache: &'a mut NoteCache,
img_cache: &'a mut ImageCache,
@@ -31,8 +30,8 @@ pub struct TimelineView<'a> {
impl<'a> TimelineView<'a> {
pub fn new(
- timeline_id: TimelineId,
- columns: &'a mut Columns,
+ timeline_id: &'a TimelineKind,
+ timeline_cache: &'a mut TimelineCache,
ndb: &'a Ndb,
note_cache: &'a mut NoteCache,
img_cache: &'a mut ImageCache,
@@ -43,7 +42,7 @@ impl<'a> TimelineView<'a> {
TimelineView {
ndb,
timeline_id,
- columns,
+ timeline_cache,
note_cache,
img_cache,
reverse,
@@ -57,7 +56,7 @@ impl<'a> TimelineView<'a> {
ui,
self.ndb,
self.timeline_id,
- self.columns,
+ self.timeline_cache,
self.note_cache,
self.img_cache,
self.reverse,
@@ -76,8 +75,8 @@ impl<'a> TimelineView<'a> {
fn timeline_ui(
ui: &mut egui::Ui,
ndb: &Ndb,
- timeline_id: TimelineId,
- columns: &mut Columns,
+ timeline_id: &TimelineKind,
+ timeline_cache: &mut TimelineCache,
note_cache: &mut NoteCache,
img_cache: &mut ImageCache,
reversed: bool,
@@ -92,7 +91,7 @@ fn timeline_ui(
*/
let scroll_id = {
- let timeline = if let Some(timeline) = columns.find_timeline_mut(timeline_id) {
+ let timeline = if let Some(timeline) = timeline_cache.timelines.get_mut(timeline_id) {
timeline
} else {
error!("tried to render timeline in column, but timeline was missing");
@@ -142,7 +141,7 @@ fn timeline_ui(
}
let scroll_output = scroll_area.show(ui, |ui| {
- let timeline = if let Some(timeline) = columns.find_timeline_mut(timeline_id) {
+ let timeline = if let Some(timeline) = timeline_cache.timelines.get(timeline_id) {
timeline
} else {
error!("tried to render timeline in column, but timeline was missing");
diff --git a/crates/notedeck_columns/src/unknowns.rs b/crates/notedeck_columns/src/unknowns.rs
@@ -1,4 +1,4 @@
-use crate::{column::Columns, Result};
+use crate::{timeline::TimelineCache, Result};
use nostrdb::{Ndb, NoteKey, Transaction};
use notedeck::{CachedNote, NoteCache, UnknownIds};
use tracing::error;
@@ -6,12 +6,12 @@ use tracing::error;
pub fn update_from_columns(
txn: &Transaction,
unknown_ids: &mut UnknownIds,
- columns: &Columns,
+ timeline_cache: &TimelineCache,
ndb: &Ndb,
note_cache: &mut NoteCache,
) -> bool {
let before = unknown_ids.ids().len();
- if let Err(e) = get_unknown_ids(txn, unknown_ids, columns, ndb, note_cache) {
+ if let Err(e) = get_unknown_ids(txn, unknown_ids, timeline_cache, ndb, note_cache) {
error!("UnknownIds::update {e}");
}
let after = unknown_ids.ids().len();
@@ -27,7 +27,7 @@ pub fn update_from_columns(
pub fn get_unknown_ids(
txn: &Transaction,
unknown_ids: &mut UnknownIds,
- columns: &Columns,
+ timeline_cache: &TimelineCache,
ndb: &Ndb,
note_cache: &mut NoteCache,
) -> Result<()> {
@@ -36,7 +36,7 @@ pub fn get_unknown_ids(
let mut new_cached_notes: Vec<(NoteKey, CachedNote)> = vec![];
- for timeline in columns.timelines() {
+ for (_kind, timeline) in timeline_cache.timelines.iter() {
for noteref in timeline.all_or_any_notes() {
let note = ndb.get_note_by_key(txn, noteref.key)?;
let note_key = note.key().unwrap();
diff --git a/crates/tokenator/Cargo.toml b/crates/tokenator/Cargo.toml
@@ -0,0 +1,8 @@
+[package]
+name = "tokenator"
+version = "0.1.0"
+edition = "2021"
+description = "A simple library for parsing a serializing string tokens"
+
+[dependencies]
+hex = { workspace = true }
diff --git a/crates/tokenator/README.md b/crates/tokenator/README.md
@@ -0,0 +1,5 @@
+
+# tokenator
+
+Tokenator is a simple string token parser and serializer.
+
diff --git a/crates/tokenator/src/lib.rs b/crates/tokenator/src/lib.rs
@@ -0,0 +1,232 @@
+#[derive(Debug, Clone)]
+pub struct UnexpectedToken<'fnd, 'exp> {
+ pub expected: &'exp str,
+ pub found: &'fnd str,
+}
+
+#[derive(Debug, Clone)]
+pub enum ParseError<'a> {
+ /// Not done parsing yet
+ Incomplete,
+
+ /// All parsing options failed
+ AltAllFailed,
+
+ /// There was some issue decoding the data
+ DecodeFailed,
+
+ HexDecodeFailed,
+
+ /// We encountered an unexpected token
+ UnexpectedToken(UnexpectedToken<'a, 'static>),
+
+ /// No more tokens
+ EOF,
+}
+
+pub struct TokenWriter {
+ delim: &'static str,
+ tokens_written: usize,
+ buf: Vec<u8>,
+}
+
+impl Default for TokenWriter {
+ fn default() -> Self {
+ Self::new(":")
+ }
+}
+
+impl TokenWriter {
+ pub fn new(delim: &'static str) -> Self {
+ let buf = vec![];
+ let tokens_written = 0;
+ Self {
+ buf,
+ tokens_written,
+ delim,
+ }
+ }
+
+ pub fn write_token(&mut self, token: &str) {
+ if self.tokens_written > 0 {
+ self.buf.extend_from_slice(self.delim.as_bytes())
+ }
+ self.buf.extend_from_slice(token.as_bytes());
+ self.tokens_written += 1;
+ }
+
+ pub fn str(&self) -> &str {
+ // SAFETY: only &strs are ever serialized, so its guaranteed to be
+ // correct here
+ unsafe { std::str::from_utf8_unchecked(self.buffer()) }
+ }
+
+ pub fn buffer(&self) -> &[u8] {
+ &self.buf
+ }
+}
+
+#[derive(Clone)]
+pub struct TokenParser<'a> {
+ tokens: &'a [&'a str],
+ index: usize,
+}
+
+impl<'a> TokenParser<'a> {
+ /// alt tries each parser in `routes` until one succeeds.
+ /// If all fail, returns `ParseError::AltAllFailed`.
+ #[allow(clippy::type_complexity)]
+ pub fn alt<R>(
+ parser: &mut TokenParser<'a>,
+ routes: &[fn(&mut TokenParser<'a>) -> Result<R, ParseError<'a>>],
+ ) -> Result<R, ParseError<'a>> {
+ let start = parser.index;
+ for route in routes {
+ match route(parser) {
+ Ok(r) => return Ok(r), // if success, stop trying more routes
+ Err(_) => {
+ // revert index & try next route
+ parser.index = start;
+ }
+ }
+ }
+ // if we tried them all and none succeeded
+ Err(ParseError::AltAllFailed)
+ }
+
+ pub fn new(tokens: &'a [&'a str]) -> Self {
+ let index = 0;
+ Self { tokens, index }
+ }
+
+ pub fn peek_parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> {
+ let found = self.peek_token()?;
+ if found == expected {
+ Ok(found)
+ } else {
+ Err(ParseError::UnexpectedToken(UnexpectedToken {
+ expected,
+ found,
+ }))
+ }
+ }
+
+ /// Parse a list of alternative tokens, returning success if any match.
+ pub fn parse_any_token(
+ &mut self,
+ expected: &[&'static str],
+ ) -> Result<&'a str, ParseError<'a>> {
+ for token in expected {
+ let result = self.try_parse(|p| p.parse_token(token));
+ if result.is_ok() {
+ return result;
+ }
+ }
+
+ Err(ParseError::AltAllFailed)
+ }
+
+ pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> {
+ let found = self.pull_token()?;
+ if found == expected {
+ Ok(found)
+ } else {
+ Err(ParseError::UnexpectedToken(UnexpectedToken {
+ expected,
+ found,
+ }))
+ }
+ }
+
+ /// Ensure that we have parsed all tokens. If not the parser backtracks
+ /// and the parse does not succeed, returning [`ParseError::Incomplete`].
+ pub fn parse_all<R>(
+ &mut self,
+ parse_fn: impl FnOnce(&mut Self) -> Result<R, ParseError<'a>>,
+ ) -> Result<R, ParseError<'a>> {
+ let start = self.index;
+ let result = parse_fn(self);
+
+ // If the parser closure fails, revert the index
+ if result.is_err() {
+ self.index = start;
+ result
+ } else if !self.is_eof() {
+ Err(ParseError::Incomplete)
+ } else {
+ result
+ }
+ }
+
+ /// Attempt to parse something, backtrack if we fail.
+ pub fn try_parse<R>(
+ &mut self,
+ parse_fn: impl FnOnce(&mut Self) -> Result<R, ParseError<'a>>,
+ ) -> Result<R, ParseError<'a>> {
+ let start = self.index;
+ let result = parse_fn(self);
+
+ // If the parser closure fails, revert the index
+ if result.is_err() {
+ self.index = start;
+ result
+ } else {
+ result
+ }
+ }
+
+ pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> {
+ let token = self
+ .tokens
+ .get(self.index)
+ .copied()
+ .ok_or(ParseError::EOF)?;
+ self.index += 1;
+ Ok(token)
+ }
+
+ pub fn unpop_token(&mut self) {
+ if (self.index as isize) - 1 < 0 {
+ return;
+ }
+
+ self.index -= 1;
+ }
+
+ pub fn peek_token(&self) -> Result<&'a str, ParseError<'a>> {
+ self.tokens()
+ .first()
+ .ok_or(ParseError::DecodeFailed)
+ .copied()
+ }
+
+ #[inline]
+ pub fn tokens(&self) -> &'a [&'a str] {
+ let min_index = self.index.min(self.tokens.len());
+ &self.tokens[min_index..]
+ }
+
+ #[inline]
+ pub fn is_eof(&self) -> bool {
+ self.tokens().is_empty()
+ }
+}
+
+pub trait TokenSerializable: Sized {
+ /// Return a list of serialization plans for a type. We do this for
+ /// type safety and assume constructing these types are lightweight
+ fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>>;
+ fn serialize_tokens(&self, writer: &mut TokenWriter);
+}
+
+/// Parse a 32 byte hex string
+pub fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> {
+ use hex;
+
+ let hexid = parser.pull_token()?;
+ hex::decode(hexid)
+ .map_err(|_| ParseError::HexDecodeFailed)?
+ .as_slice()
+ .try_into()
+ .map_err(|_| ParseError::HexDecodeFailed)
+}
diff --git a/shell.nix b/shell.nix
@@ -14,7 +14,6 @@ mkShell ({
#cargo-edit
#cargo-watch
rustup
- rustfmt
libiconv
pkg-config
#cmake