Switch to unified timeline cache via TimelineKinds

This is a fairly large rewrite which unifies our threads, timelines and
profiles. Now all timelines have a MultiSubscriber, and can be added
and removed to columns just like Threads and Profiles.

Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2025-01-22 15:59:21 -08:00
parent d46e526a45
commit 0cc1d8a600
39 changed files with 1395 additions and 2055 deletions

View File

@@ -1,7 +1,7 @@
use std::{collections::HashMap, fmt, str::FromStr};
use enostr::Pubkey;
use nostrdb::Ndb;
use nostrdb::{Ndb, Transaction};
use serde::{Deserialize, Serialize};
use tracing::{error, info};
@@ -9,16 +9,20 @@ use crate::{
column::{Columns, IntermediaryRoute},
decks::{Deck, Decks, DecksCache},
route::Route,
timeline::TimelineKind,
timeline::{TimelineCache, TimelineKind},
Error,
};
use notedeck::{storage, DataPath, DataPathType, Directory};
use tokenator::{ParseError, TokenParser, TokenSerializable, TokenWriter};
use tokenator::{ParseError, TokenParser, TokenWriter};
pub static DECKS_CACHE_FILE: &str = "decks_cache.json";
pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
pub fn load_decks_cache(
path: &DataPath,
ndb: &Ndb,
timeline_cache: &mut TimelineCache,
) -> Option<DecksCache> {
let data_path = path.path(DataPathType::Setting);
let decks_cache_str = match Directory::new(data_path).get_file(DECKS_CACHE_FILE.to_owned()) {
@@ -35,7 +39,9 @@ pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
let serializable_decks_cache =
serde_json::from_str::<SerializableDecksCache>(&decks_cache_str).ok()?;
serializable_decks_cache.decks_cache(ndb).ok()
serializable_decks_cache
.decks_cache(ndb, timeline_cache)
.ok()
}
pub fn save_decks_cache(path: &DataPath, decks_cache: &DecksCache) {
@@ -81,14 +87,17 @@ impl SerializableDecksCache {
}
}
pub fn decks_cache(self, ndb: &Ndb) -> Result<DecksCache, Error> {
pub fn decks_cache(
self,
ndb: &Ndb,
timeline_cache: &mut TimelineCache,
) -> Result<DecksCache, Error> {
let account_to_decks = self
.decks_cache
.into_iter()
.map(|(pubkey, serializable_decks)| {
let deck_key = pubkey.bytes();
serializable_decks
.decks(ndb, deck_key)
.decks(ndb, timeline_cache, &pubkey)
.map(|decks| (pubkey, decks))
})
.collect::<Result<HashMap<Pubkey, Decks>, Error>>()?;
@@ -142,12 +151,17 @@ impl SerializableDecks {
}
}
fn decks(self, ndb: &Ndb, deck_key: &[u8; 32]) -> Result<Decks, Error> {
fn decks(
self,
ndb: &Ndb,
timeline_cache: &mut TimelineCache,
deck_key: &Pubkey,
) -> Result<Decks, Error> {
Ok(Decks::from_decks(
self.active_deck,
self.decks
.into_iter()
.map(|d| d.deck(ndb, deck_key))
.map(|d| d.deck(ndb, timeline_cache, deck_key))
.collect::<Result<_, _>>()?,
))
}
@@ -252,8 +266,13 @@ impl SerializableDeck {
SerializableDeck { metadata, columns }
}
pub fn deck(self, ndb: &Ndb, deck_user: &[u8; 32]) -> Result<Deck, Error> {
let columns = deserialize_columns(ndb, deck_user, self.columns);
pub fn deck(
self,
ndb: &Ndb,
timeline_cache: &mut TimelineCache,
deck_user: &Pubkey,
) -> Result<Deck, Error> {
let columns = deserialize_columns(ndb, timeline_cache, deck_user, self.columns);
let deserialized_metadata = deserialize_metadata(self.metadata)
.ok_or(Error::Generic("Could not deserialize metadata".to_owned()))?;
@@ -292,7 +311,12 @@ fn serialize_columns(columns: &Columns) -> Vec<Vec<String>> {
cols_serialized
}
fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec<Vec<String>>) -> Columns {
fn deserialize_columns(
ndb: &Ndb,
timeline_cache: &mut TimelineCache,
deck_user: &Pubkey,
columns: Vec<Vec<String>>,
) -> Columns {
let mut cols = Columns::new();
for column in columns {
let mut cur_routes = Vec::new();
@@ -301,11 +325,9 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec<Vec<String>
let tokens: Vec<&str> = route.split(":").collect();
let mut parser = TokenParser::new(&tokens);
match CleanIntermediaryRoute::parse_from_tokens(&mut parser) {
match CleanIntermediaryRoute::parse(&mut parser, deck_user) {
Ok(route_intermediary) => {
if let Some(ir) =
route_intermediary.into_intermediary_route(ndb, Some(deck_user))
{
if let Some(ir) = route_intermediary.into_intermediary_route(ndb) {
cur_routes.push(ir);
}
}
@@ -316,7 +338,7 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], columns: Vec<Vec<String>
}
if !cur_routes.is_empty() {
cols.insert_intermediary_routes(cur_routes);
cols.insert_intermediary_routes(timeline_cache, cur_routes);
}
}
@@ -329,48 +351,38 @@ enum CleanIntermediaryRoute {
}
impl CleanIntermediaryRoute {
fn into_intermediary_route(
self,
ndb: &Ndb,
user: Option<&[u8; 32]>,
) -> Option<IntermediaryRoute> {
fn into_intermediary_route(self, ndb: &Ndb) -> Option<IntermediaryRoute> {
match self {
CleanIntermediaryRoute::ToTimeline(timeline_kind) => Some(IntermediaryRoute::Timeline(
timeline_kind.into_timeline(ndb, user)?,
)),
CleanIntermediaryRoute::ToTimeline(timeline_kind) => {
let txn = Transaction::new(ndb).unwrap();
Some(IntermediaryRoute::Timeline(
timeline_kind.into_timeline(&txn, ndb)?,
))
}
CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)),
}
}
}
impl TokenSerializable for CleanIntermediaryRoute {
fn serialize_tokens(&self, writer: &mut TokenWriter) {
match self {
CleanIntermediaryRoute::ToTimeline(tlk) => {
tlk.serialize_tokens(writer);
}
CleanIntermediaryRoute::ToRoute(route) => {
route.serialize_tokens(writer);
}
fn parse<'a>(
parser: &mut TokenParser<'a>,
deck_author: &Pubkey,
) -> Result<Self, ParseError<'a>> {
let timeline = parser.try_parse(|p| {
Ok(CleanIntermediaryRoute::ToTimeline(TimelineKind::parse(
p,
deck_author,
)?))
});
if timeline.is_ok() {
return timeline;
}
}
fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>> {
TokenParser::alt(
parser,
&[
|p| {
Ok(CleanIntermediaryRoute::ToTimeline(
TimelineKind::parse_from_tokens(p)?,
))
},
|p| {
Ok(CleanIntermediaryRoute::ToRoute(Route::parse_from_tokens(
p,
)?))
},
],
)
parser.try_parse(|p| {
Ok(CleanIntermediaryRoute::ToRoute(Route::parse(
p,
deck_author,
)?))
})
}
}

View File

@@ -1,697 +0,0 @@
use enostr::{NoteId, Pubkey};
use nostrdb::Ndb;
use serde::{Deserialize, Deserializer};
use tracing::error;
use crate::{
accounts::AccountsRoute,
column::{Columns, IntermediaryRoute},
route::Route,
timeline::{kind::ListKind, PubkeySource, Timeline, TimelineId, TimelineKind, TimelineRoute},
ui::add_column::AddColumnRoute,
Result,
};
use notedeck::{DataPath, DataPathType, Directory};
pub static COLUMNS_FILE: &str = "columns.json";
fn columns_json(path: &DataPath) -> Option<String> {
let data_path = path.path(DataPathType::Setting);
Directory::new(data_path)
.get_file(COLUMNS_FILE.to_string())
.ok()
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationTimelineRoute {
Timeline(u32),
Thread(String),
Profile(String),
Reply(String),
Quote(String),
}
impl MigrationTimelineRoute {
fn timeline_route(self) -> Option<TimelineRoute> {
match self {
MigrationTimelineRoute::Timeline(id) => {
Some(TimelineRoute::Timeline(TimelineId::new(id)))
}
MigrationTimelineRoute::Thread(note_id_hex) => {
Some(TimelineRoute::Thread(NoteId::from_hex(&note_id_hex).ok()?))
}
MigrationTimelineRoute::Profile(pubkey_hex) => {
Some(TimelineRoute::Profile(Pubkey::from_hex(&pubkey_hex).ok()?))
}
MigrationTimelineRoute::Reply(note_id_hex) => {
Some(TimelineRoute::Reply(NoteId::from_hex(&note_id_hex).ok()?))
}
MigrationTimelineRoute::Quote(note_id_hex) => {
Some(TimelineRoute::Quote(NoteId::from_hex(&note_id_hex).ok()?))
}
}
}
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationRoute {
Timeline(MigrationTimelineRoute),
Accounts(MigrationAccountsRoute),
Relays,
ComposeNote,
AddColumn(MigrationAddColumnRoute),
Support,
}
impl MigrationRoute {
fn route(self) -> Option<Route> {
match self {
MigrationRoute::Timeline(migration_timeline_route) => {
Some(Route::Timeline(migration_timeline_route.timeline_route()?))
}
MigrationRoute::Accounts(migration_accounts_route) => {
Some(Route::Accounts(migration_accounts_route.accounts_route()))
}
MigrationRoute::Relays => Some(Route::Relays),
MigrationRoute::ComposeNote => Some(Route::ComposeNote),
MigrationRoute::AddColumn(migration_add_column_route) => Some(Route::AddColumn(
migration_add_column_route.add_column_route(),
)),
MigrationRoute::Support => Some(Route::Support),
}
}
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationAccountsRoute {
Accounts,
AddAccount,
}
impl MigrationAccountsRoute {
fn accounts_route(self) -> AccountsRoute {
match self {
MigrationAccountsRoute::Accounts => AccountsRoute::Accounts,
MigrationAccountsRoute::AddAccount => AccountsRoute::AddAccount,
}
}
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationAddColumnRoute {
Base,
UndecidedNotification,
ExternalNotification,
Hashtag,
}
impl MigrationAddColumnRoute {
fn add_column_route(self) -> AddColumnRoute {
match self {
MigrationAddColumnRoute::Base => AddColumnRoute::Base,
MigrationAddColumnRoute::UndecidedNotification => AddColumnRoute::UndecidedNotification,
MigrationAddColumnRoute::ExternalNotification => AddColumnRoute::ExternalNotification,
MigrationAddColumnRoute::Hashtag => AddColumnRoute::Hashtag,
}
}
}
#[derive(Debug, PartialEq)]
struct MigrationColumn {
routes: Vec<MigrationRoute>,
}
impl<'de> Deserialize<'de> for MigrationColumn {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let routes = Vec::<MigrationRoute>::deserialize(deserializer)?;
Ok(MigrationColumn { routes })
}
}
#[derive(Deserialize, Debug)]
struct MigrationColumns {
columns: Vec<MigrationColumn>,
timelines: Vec<MigrationTimeline>,
}
#[derive(Deserialize, Debug, Clone, PartialEq)]
struct MigrationTimeline {
id: u32,
kind: MigrationTimelineKind,
}
impl MigrationTimeline {
fn into_timeline(self, ndb: &Ndb, deck_user_pubkey: Option<&[u8; 32]>) -> Option<Timeline> {
self.kind
.into_timeline_kind()?
.into_timeline(ndb, deck_user_pubkey)
}
}
#[derive(Deserialize, Clone, Debug, PartialEq)]
enum MigrationListKind {
Contact(MigrationPubkeySource),
}
impl MigrationListKind {
fn list_kind(self) -> Option<ListKind> {
match self {
MigrationListKind::Contact(migration_pubkey_source) => {
Some(ListKind::Contact(migration_pubkey_source.pubkey_source()?))
}
}
}
}
#[derive(Deserialize, Clone, Debug, PartialEq)]
enum MigrationPubkeySource {
Explicit(String),
DeckAuthor,
}
impl MigrationPubkeySource {
fn pubkey_source(self) -> Option<PubkeySource> {
match self {
MigrationPubkeySource::Explicit(hex) => {
Some(PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?))
}
MigrationPubkeySource::DeckAuthor => Some(PubkeySource::DeckAuthor),
}
}
}
#[derive(Deserialize, Clone, Debug, PartialEq)]
enum MigrationTimelineKind {
List(MigrationListKind),
Notifications(MigrationPubkeySource),
Profile(MigrationPubkeySource),
Universe,
Generic,
Hashtag(String),
}
impl MigrationTimelineKind {
fn into_timeline_kind(self) -> Option<TimelineKind> {
match self {
MigrationTimelineKind::List(migration_list_kind) => {
Some(TimelineKind::List(migration_list_kind.list_kind()?))
}
MigrationTimelineKind::Notifications(migration_pubkey_source) => Some(
TimelineKind::Notifications(migration_pubkey_source.pubkey_source()?),
),
MigrationTimelineKind::Profile(migration_pubkey_source) => Some(TimelineKind::Profile(
migration_pubkey_source.pubkey_source()?,
)),
MigrationTimelineKind::Universe => Some(TimelineKind::Universe),
MigrationTimelineKind::Generic => Some(TimelineKind::Generic),
MigrationTimelineKind::Hashtag(hashtag) => Some(TimelineKind::Hashtag(hashtag)),
}
}
}
impl MigrationColumns {
fn into_columns(self, ndb: &Ndb, deck_pubkey: Option<&[u8; 32]>) -> Columns {
let mut columns = Columns::default();
for column in self.columns {
let mut cur_routes = Vec::new();
for route in column.routes {
match route {
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(timeline_id)) => {
if let Some(migration_tl) =
self.timelines.iter().find(|tl| tl.id == timeline_id)
{
let tl = migration_tl.clone().into_timeline(ndb, deck_pubkey);
if let Some(tl) = tl {
cur_routes.push(IntermediaryRoute::Timeline(tl));
} else {
error!("Problem deserializing timeline {:?}", migration_tl);
}
}
}
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(_thread)) => {}
_ => {
if let Some(route) = route.route() {
cur_routes.push(IntermediaryRoute::Route(route));
}
}
}
}
if !cur_routes.is_empty() {
columns.insert_intermediary_routes(cur_routes);
}
}
columns
}
}
fn string_to_columns(
serialized_columns: String,
ndb: &Ndb,
user: Option<&[u8; 32]>,
) -> Option<Columns> {
Some(
deserialize_columns_string(serialized_columns)
.ok()?
.into_columns(ndb, user),
)
}
pub fn deserialize_columns(path: &DataPath, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Columns> {
string_to_columns(columns_json(path)?, ndb, user)
}
fn deserialize_columns_string(serialized_columns: String) -> Result<MigrationColumns> {
Ok(
serde_json::from_str::<MigrationColumns>(&serialized_columns)
.map_err(notedeck::Error::Json)?,
)
}
#[cfg(test)]
mod tests {
use crate::storage::migration::{
MigrationColumn, MigrationListKind, MigrationPubkeySource, MigrationRoute,
MigrationTimeline, MigrationTimelineKind, MigrationTimelineRoute,
};
impl MigrationColumn {
fn from_route(route: MigrationRoute) -> Self {
Self {
routes: vec![route],
}
}
fn from_routes(routes: Vec<MigrationRoute>) -> Self {
Self { routes }
}
}
impl MigrationTimeline {
fn new(id: u32, kind: MigrationTimelineKind) -> Self {
Self { id, kind }
}
}
use super::*;
#[test]
fn multi_column() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}],[{"Timeline":{"Timeline":0}}],[{"Timeline":{"Timeline":1}}]],"timelines":[{"id":0,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}},{"id":1,"kind":{"Hashtag":"introductions"}},{"id":2,"kind":"Universe"}]}"#; // Multi-column
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 3);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(2)
))
);
assert_eq!(
*migration_cols.columns.get(1).unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(0)
))
);
assert_eq!(
*migration_cols.columns.get(2).unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(1)
))
);
assert_eq!(migration_cols.timelines.len(), 3);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
0,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
.to_owned()
)
))
)
);
assert_eq!(
*migration_cols.timelines.get(1).unwrap(),
MigrationTimeline::new(
1,
MigrationTimelineKind::Hashtag("introductions".to_owned())
)
);
assert_eq!(
*migration_cols.timelines.get(2).unwrap(),
MigrationTimeline::new(2, MigrationTimelineKind::Universe)
)
}
#[test]
fn base() {
let route = r#"{"columns":[[{"AddColumn":"Base"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::AddColumn(MigrationAddColumnRoute::Base))
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn universe() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":0}}]],"timelines":[{"id":0,"kind":"Universe"}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(0)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(0, MigrationTimelineKind::Universe)
)
}
#[test]
fn home() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}]],"timelines":[{"id":2,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(2)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
2,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
.to_owned()
)
))
)
)
}
#[test]
fn thread() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Thread":"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(
"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25".to_owned()
)),
])
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
7,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
.to_owned()
)
))
)
)
}
#[test]
fn profile() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Profile":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
MigrationRoute::Timeline(MigrationTimelineRoute::Profile(
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
)),
])
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
7,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
.to_owned()
)
))
)
)
}
#[test]
fn your_notifs() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":5}}]],"timelines":[{"id":5,"kind":{"Notifications":"DeckAuthor"}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(5)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
5,
MigrationTimelineKind::Notifications(MigrationPubkeySource::DeckAuthor)
)
)
}
#[test]
fn undecided_notifs() {
let route = r#"{"columns":[[{"AddColumn":"Base"},{"AddColumn":"UndecidedNotification"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::AddColumn(MigrationAddColumnRoute::UndecidedNotification),
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn extern_notifs() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":4}}]],"timelines":[{"id":4,"kind":{"Notifications":{"Explicit":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(4)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
4,
MigrationTimelineKind::Notifications(MigrationPubkeySource::Explicit(
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
))
)
)
}
#[test]
fn hashtag() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":6}}]],"timelines":[{"id":6,"kind":{"Hashtag":"notedeck"}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(6)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(6, MigrationTimelineKind::Hashtag("notedeck".to_owned()))
)
}
#[test]
fn support() {
let route = r#"{"columns":[[{"AddColumn":"Base"},"Support"]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Support
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn post() {
let route = r#"{"columns":[[{"AddColumn":"Base"},"ComposeNote"]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::ComposeNote
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn relay() {
let route = r#"{"columns":[[{"AddColumn":"Base"},"Relays"]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Relays
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn accounts() {
let route =
r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn login() {
let route = r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"},{"Accounts":"AddAccount"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
MigrationRoute::Accounts(MigrationAccountsRoute::AddAccount),
])
);
assert!(migration_cols.timelines.is_empty());
}
}

View File

@@ -1,5 +1,3 @@
mod decks;
mod migration;
pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE};
pub use migration::{deserialize_columns, COLUMNS_FILE};