split notedeck into crates
This splits notedeck into crates, separating the browser chrome and individual apps: * notedeck: binary file, browser chrome * notedeck_columns: our columns app * enostr: same as before We still need to do more work to cleanly separate the chrome apis from the app apis. Soon I will create notedeck-notebook to see what makes sense to be shared between the apps. Some obvious ones that come to mind: 1. ImageCache We will likely want to move this to the notedeck crate, as most apps will want some kind of image cache. In web browsers, web pages do not need to worry about this, so we will likely have to do something similar 2. Ndb Since NdbRef is threadsafe and Ndb is an Arc<NdbRef>, it can be safely copied to each app. This will simplify things. In the future we might want to create an abstraction over this? Maybe each app shouldn't have access to the same database... we assume the data in DBs are all public anyways, but if we have unwrapped giftwraps that could be a problem. 3. RelayPool / Subscription Manager The browser should probably maintain these. Then apps can use ken's high level subscription manager api and not have to worry about connection pool details 4. Accounts Accounts and key management should be handled by the chrome. Apps should only have a simple signer interface. That's all for now, just something to think about! Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
803
crates/notedeck_columns/src/storage/decks.rs
Normal file
803
crates/notedeck_columns/src/storage/decks.rs
Normal file
@@ -0,0 +1,803 @@
|
||||
use std::{collections::HashMap, fmt, str::FromStr};
|
||||
|
||||
use enostr::{NoteId, Pubkey};
|
||||
use nostrdb::Ndb;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{error, info};
|
||||
|
||||
use crate::{
|
||||
accounts::AccountsRoute,
|
||||
column::{Columns, IntermediaryRoute},
|
||||
decks::{Deck, Decks, DecksCache},
|
||||
route::Route,
|
||||
timeline::{kind::ListKind, PubkeySource, TimelineKind, TimelineRoute},
|
||||
ui::add_column::AddColumnRoute,
|
||||
Error,
|
||||
};
|
||||
|
||||
use super::{write_file, DataPath, DataPathType, Directory};
|
||||
|
||||
pub static DECKS_CACHE_FILE: &str = "decks_cache.json";
|
||||
|
||||
pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
|
||||
let data_path = path.path(DataPathType::Setting);
|
||||
|
||||
let decks_cache_str = match Directory::new(data_path).get_file(DECKS_CACHE_FILE.to_owned()) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
error!(
|
||||
"Could not read decks cache from file {}: {}",
|
||||
DECKS_CACHE_FILE, e
|
||||
);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let serializable_decks_cache =
|
||||
serde_json::from_str::<SerializableDecksCache>(&decks_cache_str).ok()?;
|
||||
|
||||
serializable_decks_cache.decks_cache(ndb).ok()
|
||||
}
|
||||
|
||||
pub fn save_decks_cache(path: &DataPath, decks_cache: &DecksCache) {
|
||||
let serialized_decks_cache =
|
||||
match serde_json::to_string(&SerializableDecksCache::to_serializable(decks_cache)) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
error!("Could not serialize decks cache: {}", e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let data_path = path.path(DataPathType::Setting);
|
||||
|
||||
if let Err(e) = write_file(
|
||||
&data_path,
|
||||
DECKS_CACHE_FILE.to_string(),
|
||||
&serialized_decks_cache,
|
||||
) {
|
||||
error!(
|
||||
"Could not write decks cache to file {}: {}",
|
||||
DECKS_CACHE_FILE, e
|
||||
);
|
||||
} else {
|
||||
info!("Successfully wrote decks cache to {}", DECKS_CACHE_FILE);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializableDecksCache {
|
||||
#[serde(serialize_with = "serialize_map", deserialize_with = "deserialize_map")]
|
||||
decks_cache: HashMap<Pubkey, SerializableDecks>,
|
||||
}
|
||||
|
||||
impl SerializableDecksCache {
|
||||
fn to_serializable(decks_cache: &DecksCache) -> Self {
|
||||
SerializableDecksCache {
|
||||
decks_cache: decks_cache
|
||||
.get_mapping()
|
||||
.iter()
|
||||
.map(|(k, v)| (*k, SerializableDecks::from_decks(v)))
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decks_cache(self, ndb: &Ndb) -> Result<DecksCache, Error> {
|
||||
let account_to_decks = self
|
||||
.decks_cache
|
||||
.into_iter()
|
||||
.map(|(pubkey, serializable_decks)| {
|
||||
let deck_key = pubkey.bytes();
|
||||
serializable_decks
|
||||
.decks(ndb, deck_key)
|
||||
.map(|decks| (pubkey, decks))
|
||||
})
|
||||
.collect::<Result<HashMap<Pubkey, Decks>, Error>>()?;
|
||||
|
||||
Ok(DecksCache::new(account_to_decks))
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_map<S>(
|
||||
map: &HashMap<Pubkey, SerializableDecks>,
|
||||
serializer: S,
|
||||
) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let stringified_map: HashMap<String, &SerializableDecks> =
|
||||
map.iter().map(|(k, v)| (k.hex(), v)).collect();
|
||||
stringified_map.serialize(serializer)
|
||||
}
|
||||
|
||||
fn deserialize_map<'de, D>(deserializer: D) -> Result<HashMap<Pubkey, SerializableDecks>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let stringified_map: HashMap<String, SerializableDecks> = HashMap::deserialize(deserializer)?;
|
||||
|
||||
stringified_map
|
||||
.into_iter()
|
||||
.map(|(k, v)| {
|
||||
let key = Pubkey::from_hex(&k).map_err(serde::de::Error::custom)?;
|
||||
Ok((key, v))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializableDecks {
|
||||
active_deck: usize,
|
||||
decks: Vec<SerializableDeck>,
|
||||
}
|
||||
|
||||
impl SerializableDecks {
|
||||
pub fn from_decks(decks: &Decks) -> Self {
|
||||
Self {
|
||||
active_deck: decks.active_index(),
|
||||
decks: decks
|
||||
.decks()
|
||||
.iter()
|
||||
.map(SerializableDeck::from_deck)
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
fn decks(self, ndb: &Ndb, deck_key: &[u8; 32]) -> Result<Decks, Error> {
|
||||
Ok(Decks::from_decks(
|
||||
self.active_deck,
|
||||
self.decks
|
||||
.into_iter()
|
||||
.map(|d| d.deck(ndb, deck_key))
|
||||
.collect::<Result<_, _>>()?,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SerializableDeck {
|
||||
metadata: Vec<String>,
|
||||
columns: Vec<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Clone)]
|
||||
enum MetadataKeyword {
|
||||
Icon,
|
||||
Name,
|
||||
}
|
||||
|
||||
impl MetadataKeyword {
|
||||
const MAPPING: &'static [(&'static str, MetadataKeyword)] = &[
|
||||
("icon", MetadataKeyword::Icon),
|
||||
("name", MetadataKeyword::Name),
|
||||
];
|
||||
}
|
||||
impl fmt::Display for MetadataKeyword {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if let Some(name) = MetadataKeyword::MAPPING
|
||||
.iter()
|
||||
.find(|(_, keyword)| keyword == self)
|
||||
.map(|(name, _)| *name)
|
||||
{
|
||||
write!(f, "{}", name)
|
||||
} else {
|
||||
write!(f, "UnknownMetadataKeyword")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for MetadataKeyword {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
|
||||
MetadataKeyword::MAPPING
|
||||
.iter()
|
||||
.find(|(name, _)| *name == serialized)
|
||||
.map(|(_, keyword)| keyword.clone())
|
||||
.ok_or(Error::Generic(
|
||||
"Could not convert string to Keyword enum".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
struct MetadataPayload {
|
||||
keyword: MetadataKeyword,
|
||||
value: String,
|
||||
}
|
||||
|
||||
impl MetadataPayload {
|
||||
fn new(keyword: MetadataKeyword, value: String) -> Self {
|
||||
Self { keyword, value }
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_metadata(payloads: Vec<MetadataPayload>) -> Vec<String> {
|
||||
payloads
|
||||
.into_iter()
|
||||
.map(|payload| format!("{}:{}", payload.keyword, payload.value))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn deserialize_metadata(serialized_metadatas: Vec<String>) -> Option<Vec<MetadataPayload>> {
|
||||
let mut payloads = Vec::new();
|
||||
for serialized_metadata in serialized_metadatas {
|
||||
let cur_split: Vec<&str> = serialized_metadata.split(':').collect();
|
||||
if cur_split.len() != 2 {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Ok(keyword) = MetadataKeyword::from_str(cur_split.first().unwrap()) {
|
||||
payloads.push(MetadataPayload {
|
||||
keyword,
|
||||
value: cur_split.get(1).unwrap().to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if payloads.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(payloads)
|
||||
}
|
||||
}
|
||||
|
||||
impl SerializableDeck {
|
||||
pub fn from_deck(deck: &Deck) -> Self {
|
||||
let columns = serialize_columns(deck.columns());
|
||||
|
||||
let metadata = serialize_metadata(vec![
|
||||
MetadataPayload::new(MetadataKeyword::Icon, deck.icon.to_string()),
|
||||
MetadataPayload::new(MetadataKeyword::Name, deck.name.clone()),
|
||||
]);
|
||||
|
||||
SerializableDeck { metadata, columns }
|
||||
}
|
||||
|
||||
pub fn deck(self, ndb: &Ndb, deck_user: &[u8; 32]) -> Result<Deck, Error> {
|
||||
let columns = deserialize_columns(ndb, deck_user, self.columns);
|
||||
let deserialized_metadata = deserialize_metadata(self.metadata)
|
||||
.ok_or(Error::Generic("Could not deserialize metadata".to_owned()))?;
|
||||
|
||||
let icon = deserialized_metadata
|
||||
.iter()
|
||||
.find(|p| p.keyword == MetadataKeyword::Icon)
|
||||
.map_or_else(|| "🇩", |f| &f.value);
|
||||
let name = deserialized_metadata
|
||||
.iter()
|
||||
.find(|p| p.keyword == MetadataKeyword::Name)
|
||||
.map_or_else(|| "Deck", |f| &f.value)
|
||||
.to_string();
|
||||
|
||||
Ok(Deck::new_with_columns(
|
||||
icon.parse::<char>()
|
||||
.map_err(|_| Error::Generic("could not convert String -> char".to_owned()))?,
|
||||
name,
|
||||
columns,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_columns(columns: &Columns) -> Vec<Vec<String>> {
|
||||
let mut cols_serialized: Vec<Vec<String>> = Vec::new();
|
||||
|
||||
for column in columns.columns() {
|
||||
let mut column_routes = Vec::new();
|
||||
for route in column.router().routes() {
|
||||
if let Some(route_str) = serialize_route(route, columns) {
|
||||
column_routes.push(route_str);
|
||||
}
|
||||
}
|
||||
cols_serialized.push(column_routes);
|
||||
}
|
||||
|
||||
cols_serialized
|
||||
}
|
||||
|
||||
fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec<Vec<String>>) -> Columns {
|
||||
let mut cols = Columns::new();
|
||||
for serialized_routes in serialized {
|
||||
let mut cur_routes = Vec::new();
|
||||
for serialized_route in serialized_routes {
|
||||
let selections = Selection::from_serialized(&serialized_route);
|
||||
if let Some(route_intermediary) = selections_to_route(selections.clone()) {
|
||||
if let Some(ir) = route_intermediary.intermediary_route(ndb, Some(deck_user)) {
|
||||
match &ir {
|
||||
IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Thread(_)))
|
||||
| IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Profile(_))) => {
|
||||
// Do nothing. Threads & Profiles not yet supported for deserialization
|
||||
}
|
||||
IntermediaryRoute::Timeline(tl)
|
||||
if matches!(tl.kind, TimelineKind::Profile(_)) =>
|
||||
{
|
||||
// Do nothing. Profiles aren't yet supported for deserialization
|
||||
}
|
||||
_ => cur_routes.push(ir),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
error!(
|
||||
"could not turn selections to RouteIntermediary: {:?}",
|
||||
selections
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if !cur_routes.is_empty() {
|
||||
cols.insert_intermediary_routes(cur_routes);
|
||||
}
|
||||
}
|
||||
|
||||
cols
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum Selection {
|
||||
Keyword(Keyword),
|
||||
Payload(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
enum Keyword {
|
||||
Notifs,
|
||||
Universe,
|
||||
Contact,
|
||||
Explicit,
|
||||
DeckAuthor,
|
||||
Profile,
|
||||
Hashtag,
|
||||
Generic,
|
||||
Thread,
|
||||
Reply,
|
||||
Quote,
|
||||
Account,
|
||||
Show,
|
||||
New,
|
||||
Relay,
|
||||
Compose,
|
||||
Column,
|
||||
NotificationSelection,
|
||||
ExternalNotifSelection,
|
||||
HashtagSelection,
|
||||
Support,
|
||||
Deck,
|
||||
Edit,
|
||||
}
|
||||
|
||||
impl Keyword {
|
||||
const MAPPING: &'static [(&'static str, Keyword, bool)] = &[
|
||||
("notifs", Keyword::Notifs, false),
|
||||
("universe", Keyword::Universe, false),
|
||||
("contact", Keyword::Contact, false),
|
||||
("explicit", Keyword::Explicit, true),
|
||||
("deck_author", Keyword::DeckAuthor, false),
|
||||
("profile", Keyword::Profile, true),
|
||||
("hashtag", Keyword::Hashtag, true),
|
||||
("generic", Keyword::Generic, false),
|
||||
("thread", Keyword::Thread, true),
|
||||
("reply", Keyword::Reply, true),
|
||||
("quote", Keyword::Quote, true),
|
||||
("account", Keyword::Account, false),
|
||||
("show", Keyword::Show, false),
|
||||
("new", Keyword::New, false),
|
||||
("relay", Keyword::Relay, false),
|
||||
("compose", Keyword::Compose, false),
|
||||
("column", Keyword::Column, false),
|
||||
(
|
||||
"notification_selection",
|
||||
Keyword::NotificationSelection,
|
||||
false,
|
||||
),
|
||||
(
|
||||
"external_notif_selection",
|
||||
Keyword::ExternalNotifSelection,
|
||||
false,
|
||||
),
|
||||
("hashtag_selection", Keyword::HashtagSelection, false),
|
||||
("support", Keyword::Support, false),
|
||||
("deck", Keyword::Deck, false),
|
||||
("edit", Keyword::Edit, true),
|
||||
];
|
||||
|
||||
fn has_payload(&self) -> bool {
|
||||
Keyword::MAPPING
|
||||
.iter()
|
||||
.find(|(_, keyword, _)| keyword == self)
|
||||
.map(|(_, _, has_payload)| *has_payload)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Keyword {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if let Some(name) = Keyword::MAPPING
|
||||
.iter()
|
||||
.find(|(_, keyword, _)| keyword == self)
|
||||
.map(|(name, _, _)| *name)
|
||||
{
|
||||
write!(f, "{}", name)
|
||||
} else {
|
||||
write!(f, "UnknownKeyword")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Keyword {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
|
||||
Keyword::MAPPING
|
||||
.iter()
|
||||
.find(|(name, _, _)| *name == serialized)
|
||||
.map(|(_, keyword, _)| keyword.clone())
|
||||
.ok_or(Error::Generic(
|
||||
"Could not convert string to Keyword enum".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
enum CleanIntermediaryRoute {
|
||||
ToTimeline(TimelineKind),
|
||||
ToRoute(Route),
|
||||
}
|
||||
|
||||
impl CleanIntermediaryRoute {
|
||||
fn intermediary_route(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<IntermediaryRoute> {
|
||||
match self {
|
||||
CleanIntermediaryRoute::ToTimeline(timeline_kind) => Some(IntermediaryRoute::Timeline(
|
||||
timeline_kind.into_timeline(ndb, user)?,
|
||||
)),
|
||||
CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: The public-accessible version will be a subset of this
|
||||
fn serialize_route(route: &Route, columns: &Columns) -> Option<String> {
|
||||
let mut selections: Vec<Selection> = Vec::new();
|
||||
match route {
|
||||
Route::Timeline(timeline_route) => match timeline_route {
|
||||
TimelineRoute::Timeline(timeline_id) => {
|
||||
if let Some(timeline) = columns.find_timeline(*timeline_id) {
|
||||
match &timeline.kind {
|
||||
TimelineKind::List(list_kind) => match list_kind {
|
||||
ListKind::Contact(pubkey_source) => {
|
||||
selections.push(Selection::Keyword(Keyword::Contact));
|
||||
selections.extend(generate_pubkey_selections(pubkey_source));
|
||||
}
|
||||
},
|
||||
TimelineKind::Notifications(pubkey_source) => {
|
||||
selections.push(Selection::Keyword(Keyword::Notifs));
|
||||
selections.extend(generate_pubkey_selections(pubkey_source));
|
||||
}
|
||||
TimelineKind::Profile(pubkey_source) => {
|
||||
selections.push(Selection::Keyword(Keyword::Profile));
|
||||
selections.extend(generate_pubkey_selections(pubkey_source));
|
||||
}
|
||||
TimelineKind::Universe => {
|
||||
selections.push(Selection::Keyword(Keyword::Universe))
|
||||
}
|
||||
TimelineKind::Generic => {
|
||||
selections.push(Selection::Keyword(Keyword::Generic))
|
||||
}
|
||||
TimelineKind::Hashtag(hashtag) => {
|
||||
selections.push(Selection::Keyword(Keyword::Hashtag));
|
||||
selections.push(Selection::Payload(hashtag.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
TimelineRoute::Thread(note_id) => {
|
||||
selections.push(Selection::Keyword(Keyword::Thread));
|
||||
selections.push(Selection::Payload(note_id.hex()));
|
||||
}
|
||||
TimelineRoute::Profile(pubkey) => {
|
||||
selections.push(Selection::Keyword(Keyword::Profile));
|
||||
selections.push(Selection::Keyword(Keyword::Explicit));
|
||||
selections.push(Selection::Payload(pubkey.hex()));
|
||||
}
|
||||
TimelineRoute::Reply(note_id) => {
|
||||
selections.push(Selection::Keyword(Keyword::Reply));
|
||||
selections.push(Selection::Payload(note_id.hex()));
|
||||
}
|
||||
TimelineRoute::Quote(note_id) => {
|
||||
selections.push(Selection::Keyword(Keyword::Quote));
|
||||
selections.push(Selection::Payload(note_id.hex()));
|
||||
}
|
||||
},
|
||||
Route::Accounts(accounts_route) => {
|
||||
selections.push(Selection::Keyword(Keyword::Account));
|
||||
match accounts_route {
|
||||
AccountsRoute::Accounts => selections.push(Selection::Keyword(Keyword::Show)),
|
||||
AccountsRoute::AddAccount => selections.push(Selection::Keyword(Keyword::New)),
|
||||
}
|
||||
}
|
||||
Route::Relays => selections.push(Selection::Keyword(Keyword::Relay)),
|
||||
Route::ComposeNote => selections.push(Selection::Keyword(Keyword::Compose)),
|
||||
Route::AddColumn(add_column_route) => {
|
||||
selections.push(Selection::Keyword(Keyword::Column));
|
||||
match add_column_route {
|
||||
AddColumnRoute::Base => (),
|
||||
AddColumnRoute::UndecidedNotification => {
|
||||
selections.push(Selection::Keyword(Keyword::NotificationSelection))
|
||||
}
|
||||
AddColumnRoute::ExternalNotification => {
|
||||
selections.push(Selection::Keyword(Keyword::ExternalNotifSelection))
|
||||
}
|
||||
AddColumnRoute::Hashtag => {
|
||||
selections.push(Selection::Keyword(Keyword::HashtagSelection))
|
||||
}
|
||||
}
|
||||
}
|
||||
Route::Support => selections.push(Selection::Keyword(Keyword::Support)),
|
||||
Route::NewDeck => {
|
||||
selections.push(Selection::Keyword(Keyword::Deck));
|
||||
selections.push(Selection::Keyword(Keyword::New));
|
||||
}
|
||||
Route::EditDeck(index) => {
|
||||
selections.push(Selection::Keyword(Keyword::Deck));
|
||||
selections.push(Selection::Keyword(Keyword::Edit));
|
||||
selections.push(Selection::Payload(index.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
if selections.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
selections
|
||||
.iter()
|
||||
.map(|k| k.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(":"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_pubkey_selections(source: &PubkeySource) -> Vec<Selection> {
|
||||
let mut selections = Vec::new();
|
||||
match source {
|
||||
PubkeySource::Explicit(pubkey) => {
|
||||
selections.push(Selection::Keyword(Keyword::Explicit));
|
||||
selections.push(Selection::Payload(pubkey.hex()));
|
||||
}
|
||||
PubkeySource::DeckAuthor => {
|
||||
selections.push(Selection::Keyword(Keyword::DeckAuthor));
|
||||
}
|
||||
}
|
||||
selections
|
||||
}
|
||||
|
||||
impl Selection {
|
||||
fn from_serialized(serialized: &str) -> Vec<Self> {
|
||||
let mut selections = Vec::new();
|
||||
let seperator = ":";
|
||||
|
||||
let mut serialized_copy = serialized.to_string();
|
||||
let mut buffer = serialized_copy.as_mut();
|
||||
|
||||
let mut next_is_payload = false;
|
||||
while let Some(index) = buffer.find(seperator) {
|
||||
if let Ok(keyword) = Keyword::from_str(&buffer[..index]) {
|
||||
selections.push(Selection::Keyword(keyword.clone()));
|
||||
if keyword.has_payload() {
|
||||
next_is_payload = true;
|
||||
}
|
||||
}
|
||||
|
||||
buffer = &mut buffer[index + seperator.len()..];
|
||||
}
|
||||
|
||||
if next_is_payload {
|
||||
selections.push(Selection::Payload(buffer.to_string()));
|
||||
} else if let Ok(keyword) = Keyword::from_str(buffer) {
|
||||
selections.push(Selection::Keyword(keyword.clone()));
|
||||
}
|
||||
|
||||
selections
|
||||
}
|
||||
}
|
||||
|
||||
fn selections_to_route(selections: Vec<Selection>) -> Option<CleanIntermediaryRoute> {
|
||||
match selections.first()? {
|
||||
Selection::Keyword(Keyword::Contact) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::Explicit) => {
|
||||
if let Selection::Payload(hex) = selections.get(2)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::contact_list(PubkeySource::Explicit(
|
||||
Pubkey::from_hex(hex.as_str()).ok()?,
|
||||
)),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::contact_list(PubkeySource::DeckAuthor),
|
||||
)),
|
||||
_ => None,
|
||||
},
|
||||
Selection::Keyword(Keyword::Notifs) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::Explicit) => {
|
||||
if let Selection::Payload(hex) = selections.get(2)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::notifications(PubkeySource::Explicit(
|
||||
Pubkey::from_hex(hex.as_str()).ok()?,
|
||||
)),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::notifications(PubkeySource::DeckAuthor),
|
||||
)),
|
||||
_ => None,
|
||||
},
|
||||
Selection::Keyword(Keyword::Profile) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::Explicit) => {
|
||||
if let Selection::Payload(hex) = selections.get(2)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile(
|
||||
PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?),
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::profile(PubkeySource::DeckAuthor),
|
||||
)),
|
||||
_ => None,
|
||||
},
|
||||
Selection::Keyword(Keyword::Universe) => {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Universe))
|
||||
}
|
||||
Selection::Keyword(Keyword::Hashtag) => {
|
||||
if let Selection::Payload(hashtag) = selections.get(1)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Hashtag(
|
||||
hashtag.to_string(),
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::Generic) => {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Generic))
|
||||
}
|
||||
Selection::Keyword(Keyword::Thread) => {
|
||||
if let Selection::Payload(hex) = selections.get(1)? {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::thread(
|
||||
NoteId::from_hex(hex.as_str()).ok()?,
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::Reply) => {
|
||||
if let Selection::Payload(hex) = selections.get(1)? {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::reply(
|
||||
NoteId::from_hex(hex.as_str()).ok()?,
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::Quote) => {
|
||||
if let Selection::Payload(hex) = selections.get(1)? {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::quote(
|
||||
NoteId::from_hex(hex.as_str()).ok()?,
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::Account) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::Show) => Some(CleanIntermediaryRoute::ToRoute(
|
||||
Route::Accounts(AccountsRoute::Accounts),
|
||||
)),
|
||||
Selection::Keyword(Keyword::New) => Some(CleanIntermediaryRoute::ToRoute(
|
||||
Route::Accounts(AccountsRoute::AddAccount),
|
||||
)),
|
||||
_ => None,
|
||||
},
|
||||
Selection::Keyword(Keyword::Relay) => Some(CleanIntermediaryRoute::ToRoute(Route::Relays)),
|
||||
Selection::Keyword(Keyword::Compose) => {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::ComposeNote))
|
||||
}
|
||||
Selection::Keyword(Keyword::Column) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::NotificationSelection) => {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
|
||||
AddColumnRoute::UndecidedNotification,
|
||||
)))
|
||||
}
|
||||
Selection::Keyword(Keyword::ExternalNotifSelection) => {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
|
||||
AddColumnRoute::ExternalNotification,
|
||||
)))
|
||||
}
|
||||
Selection::Keyword(Keyword::HashtagSelection) => Some(CleanIntermediaryRoute::ToRoute(
|
||||
Route::AddColumn(AddColumnRoute::Hashtag),
|
||||
)),
|
||||
_ => None,
|
||||
},
|
||||
Selection::Keyword(Keyword::Support) => {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::Support))
|
||||
}
|
||||
Selection::Keyword(Keyword::Deck) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::New) => {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::NewDeck))
|
||||
}
|
||||
Selection::Keyword(Keyword::Edit) => {
|
||||
if let Selection::Payload(index_str) = selections.get(2)? {
|
||||
let parsed_index = index_str.parse::<usize>().ok()?;
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::EditDeck(
|
||||
parsed_index,
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
Selection::Payload(_)
|
||||
| Selection::Keyword(Keyword::Explicit)
|
||||
| Selection::Keyword(Keyword::New)
|
||||
| Selection::Keyword(Keyword::DeckAuthor)
|
||||
| Selection::Keyword(Keyword::Show)
|
||||
| Selection::Keyword(Keyword::NotificationSelection)
|
||||
| Selection::Keyword(Keyword::ExternalNotifSelection)
|
||||
| Selection::Keyword(Keyword::HashtagSelection)
|
||||
| Selection::Keyword(Keyword::Edit) => None,
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Selection {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Selection::Keyword(keyword) => write!(f, "{}", keyword),
|
||||
Selection::Payload(payload) => write!(f, "{}", payload),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use enostr::Pubkey;
|
||||
|
||||
use crate::{route::Route, test_data::test_app, timeline::TimelineRoute};
|
||||
|
||||
use super::deserialize_columns;
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_columns() {
|
||||
let serialized = vec![
|
||||
vec!["universe".to_owned()],
|
||||
vec![
|
||||
"notifs:explicit:aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
|
||||
.to_owned(),
|
||||
],
|
||||
];
|
||||
|
||||
let user =
|
||||
Pubkey::from_hex("aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe")
|
||||
.unwrap();
|
||||
|
||||
let app = test_app();
|
||||
let cols = deserialize_columns(&app.ndb, user.bytes(), serialized);
|
||||
|
||||
assert_eq!(cols.columns().len(), 2);
|
||||
let router = cols.column(0).router();
|
||||
assert_eq!(router.routes().len(), 1);
|
||||
|
||||
if let Route::Timeline(TimelineRoute::Timeline(_)) = router.routes().first().unwrap() {
|
||||
} else {
|
||||
panic!("The first router route is not a TimelineRoute::Timeline variant");
|
||||
}
|
||||
|
||||
let router = cols.column(1).router();
|
||||
assert_eq!(router.routes().len(), 1);
|
||||
if let Route::Timeline(TimelineRoute::Timeline(_)) = router.routes().first().unwrap() {
|
||||
} else {
|
||||
panic!("The second router route is not a TimelineRoute::Timeline variant");
|
||||
}
|
||||
}
|
||||
}
|
||||
176
crates/notedeck_columns/src/storage/file_key_storage.rs
Normal file
176
crates/notedeck_columns/src/storage/file_key_storage.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use eframe::Result;
|
||||
use enostr::{Keypair, Pubkey, SerializableKeypair};
|
||||
|
||||
use crate::Error;
|
||||
|
||||
use super::{
|
||||
file_storage::{delete_file, write_file, Directory},
|
||||
key_storage_impl::{KeyStorageError, KeyStorageResponse},
|
||||
};
|
||||
|
||||
static SELECTED_PUBKEY_FILE_NAME: &str = "selected_pubkey";
|
||||
|
||||
/// An OS agnostic file key storage implementation
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct FileKeyStorage {
|
||||
keys_directory: Directory,
|
||||
selected_key_directory: Directory,
|
||||
}
|
||||
|
||||
impl FileKeyStorage {
|
||||
pub fn new(keys_directory: Directory, selected_key_directory: Directory) -> Self {
|
||||
Self {
|
||||
keys_directory,
|
||||
selected_key_directory,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_key_internal(&self, key: &Keypair) -> Result<(), KeyStorageError> {
|
||||
write_file(
|
||||
&self.keys_directory.file_path,
|
||||
key.pubkey.hex(),
|
||||
&serde_json::to_string(&SerializableKeypair::from_keypair(key, "", 7))
|
||||
.map_err(|e| KeyStorageError::Addition(Error::Generic(e.to_string())))?,
|
||||
)
|
||||
.map_err(KeyStorageError::Addition)
|
||||
}
|
||||
|
||||
fn get_keys_internal(&self) -> Result<Vec<Keypair>, KeyStorageError> {
|
||||
let keys = self
|
||||
.keys_directory
|
||||
.get_files()
|
||||
.map_err(KeyStorageError::Retrieval)?
|
||||
.values()
|
||||
.filter_map(|str_key| serde_json::from_str::<SerializableKeypair>(str_key).ok())
|
||||
.map(|serializable_keypair| serializable_keypair.to_keypair(""))
|
||||
.collect();
|
||||
Ok(keys)
|
||||
}
|
||||
|
||||
fn remove_key_internal(&self, key: &Keypair) -> Result<(), KeyStorageError> {
|
||||
delete_file(&self.keys_directory.file_path, key.pubkey.hex())
|
||||
.map_err(KeyStorageError::Removal)
|
||||
}
|
||||
|
||||
fn get_selected_pubkey(&self) -> Result<Option<Pubkey>, KeyStorageError> {
|
||||
let pubkey_str = self
|
||||
.selected_key_directory
|
||||
.get_file(SELECTED_PUBKEY_FILE_NAME.to_owned())
|
||||
.map_err(KeyStorageError::Selection)?;
|
||||
|
||||
serde_json::from_str(&pubkey_str)
|
||||
.map_err(|e| KeyStorageError::Selection(Error::Generic(e.to_string())))
|
||||
}
|
||||
|
||||
fn select_pubkey(&self, pubkey: Option<Pubkey>) -> Result<(), KeyStorageError> {
|
||||
if let Some(pubkey) = pubkey {
|
||||
write_file(
|
||||
&self.selected_key_directory.file_path,
|
||||
SELECTED_PUBKEY_FILE_NAME.to_owned(),
|
||||
&serde_json::to_string(&pubkey.hex())
|
||||
.map_err(|e| KeyStorageError::Selection(Error::Generic(e.to_string())))?,
|
||||
)
|
||||
.map_err(KeyStorageError::Selection)
|
||||
} else if self
|
||||
.selected_key_directory
|
||||
.get_file(SELECTED_PUBKEY_FILE_NAME.to_owned())
|
||||
.is_ok()
|
||||
{
|
||||
// Case where user chose to have no selected pubkey, but one already exists
|
||||
delete_file(
|
||||
&self.selected_key_directory.file_path,
|
||||
SELECTED_PUBKEY_FILE_NAME.to_owned(),
|
||||
)
|
||||
.map_err(KeyStorageError::Selection)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FileKeyStorage {
|
||||
pub fn get_keys(&self) -> KeyStorageResponse<Vec<enostr::Keypair>> {
|
||||
KeyStorageResponse::ReceivedResult(self.get_keys_internal())
|
||||
}
|
||||
|
||||
pub fn add_key(&self, key: &enostr::Keypair) -> KeyStorageResponse<()> {
|
||||
KeyStorageResponse::ReceivedResult(self.add_key_internal(key))
|
||||
}
|
||||
|
||||
pub fn remove_key(&self, key: &enostr::Keypair) -> KeyStorageResponse<()> {
|
||||
KeyStorageResponse::ReceivedResult(self.remove_key_internal(key))
|
||||
}
|
||||
|
||||
pub fn get_selected_key(&self) -> KeyStorageResponse<Option<Pubkey>> {
|
||||
KeyStorageResponse::ReceivedResult(self.get_selected_pubkey())
|
||||
}
|
||||
|
||||
pub fn select_key(&self, key: Option<Pubkey>) -> KeyStorageResponse<()> {
|
||||
KeyStorageResponse::ReceivedResult(self.select_pubkey(key))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::*;
|
||||
use enostr::Keypair;
|
||||
static CREATE_TMP_DIR: fn() -> Result<PathBuf, Error> =
|
||||
|| Ok(tempfile::TempDir::new()?.path().to_path_buf());
|
||||
|
||||
impl FileKeyStorage {
|
||||
fn mock() -> Result<Self, Error> {
|
||||
Ok(Self {
|
||||
keys_directory: Directory::new(CREATE_TMP_DIR()?),
|
||||
selected_key_directory: Directory::new(CREATE_TMP_DIR()?),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic() {
|
||||
let kp = enostr::FullKeypair::generate().to_keypair();
|
||||
let storage = FileKeyStorage::mock().unwrap();
|
||||
let resp = storage.add_key(&kp);
|
||||
|
||||
assert_eq!(resp, KeyStorageResponse::ReceivedResult(Ok(())));
|
||||
assert_num_storage(&storage.get_keys(), 1);
|
||||
|
||||
assert_eq!(
|
||||
storage.remove_key(&kp),
|
||||
KeyStorageResponse::ReceivedResult(Ok(()))
|
||||
);
|
||||
assert_num_storage(&storage.get_keys(), 0);
|
||||
}
|
||||
|
||||
fn assert_num_storage(keys_response: &KeyStorageResponse<Vec<Keypair>>, n: usize) {
|
||||
match keys_response {
|
||||
KeyStorageResponse::ReceivedResult(Ok(keys)) => {
|
||||
assert_eq!(keys.len(), n);
|
||||
}
|
||||
KeyStorageResponse::ReceivedResult(Err(_e)) => {
|
||||
panic!("could not get keys");
|
||||
}
|
||||
KeyStorageResponse::Waiting => {
|
||||
panic!("did not receive result");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_select_key() {
|
||||
let kp = enostr::FullKeypair::generate().to_keypair();
|
||||
|
||||
let storage = FileKeyStorage::mock().unwrap();
|
||||
let _ = storage.add_key(&kp);
|
||||
assert_num_storage(&storage.get_keys(), 1);
|
||||
|
||||
let resp = storage.select_pubkey(Some(kp.pubkey));
|
||||
assert!(resp.is_ok());
|
||||
|
||||
let resp = storage.get_selected_pubkey();
|
||||
|
||||
assert!(resp.is_ok());
|
||||
}
|
||||
}
|
||||
271
crates/notedeck_columns/src/storage/file_storage.rs
Normal file
271
crates/notedeck_columns/src/storage/file_storage.rs
Normal file
@@ -0,0 +1,271 @@
|
||||
use std::{
|
||||
collections::{HashMap, VecDeque},
|
||||
fs::{self, File},
|
||||
io::{self, BufRead},
|
||||
path::{Path, PathBuf},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use crate::Error;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DataPath {
|
||||
base: PathBuf,
|
||||
}
|
||||
|
||||
impl DataPath {
|
||||
pub fn new(base: impl AsRef<Path>) -> Self {
|
||||
let base = base.as_ref().to_path_buf();
|
||||
Self { base }
|
||||
}
|
||||
|
||||
pub fn default_base() -> Option<PathBuf> {
|
||||
dirs::data_local_dir().map(|pb| pb.join("notedeck"))
|
||||
}
|
||||
}
|
||||
|
||||
pub enum DataPathType {
|
||||
Log,
|
||||
Setting,
|
||||
Keys,
|
||||
SelectedKey,
|
||||
Db,
|
||||
Cache,
|
||||
}
|
||||
|
||||
impl DataPath {
|
||||
pub fn rel_path(&self, typ: DataPathType) -> PathBuf {
|
||||
match typ {
|
||||
DataPathType::Log => PathBuf::from("logs"),
|
||||
DataPathType::Setting => PathBuf::from("settings"),
|
||||
DataPathType::Keys => PathBuf::from("storage").join("accounts"),
|
||||
DataPathType::SelectedKey => PathBuf::from("storage").join("selected_account"),
|
||||
DataPathType::Db => PathBuf::from("db"),
|
||||
DataPathType::Cache => PathBuf::from("cache"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path(&self, typ: DataPathType) -> PathBuf {
|
||||
self.base.join(self.rel_path(typ))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct Directory {
|
||||
pub file_path: PathBuf,
|
||||
}
|
||||
|
||||
impl Directory {
|
||||
pub fn new(file_path: PathBuf) -> Self {
|
||||
Self { file_path }
|
||||
}
|
||||
|
||||
/// Get the files in the current directory where the key is the file name and the value is the file contents
|
||||
pub fn get_files(&self) -> Result<HashMap<String, String>, Error> {
|
||||
let dir = fs::read_dir(self.file_path.clone())?;
|
||||
let map = dir
|
||||
.filter_map(|f| f.ok())
|
||||
.filter(|f| f.path().is_file())
|
||||
.filter_map(|f| {
|
||||
let file_name = f.file_name().into_string().ok()?;
|
||||
let contents = fs::read_to_string(f.path()).ok()?;
|
||||
Some((file_name, contents))
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
pub fn get_file_names(&self) -> Result<Vec<String>, Error> {
|
||||
let dir = fs::read_dir(self.file_path.clone())?;
|
||||
let names = dir
|
||||
.filter_map(|f| f.ok())
|
||||
.filter(|f| f.path().is_file())
|
||||
.filter_map(|f| f.file_name().into_string().ok())
|
||||
.collect();
|
||||
|
||||
Ok(names)
|
||||
}
|
||||
|
||||
pub fn get_file(&self, file_name: String) -> Result<String, Error> {
|
||||
let filepath = self.file_path.clone().join(file_name.clone());
|
||||
|
||||
if filepath.exists() && filepath.is_file() {
|
||||
let filepath_str = filepath
|
||||
.to_str()
|
||||
.ok_or_else(|| Error::Generic("Could not turn path to string".to_owned()))?;
|
||||
Ok(fs::read_to_string(filepath_str)?)
|
||||
} else {
|
||||
Err(Error::Generic(format!(
|
||||
"Requested file was not found: {}",
|
||||
file_name
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_file_last_n_lines(&self, file_name: String, n: usize) -> Result<FileResult, Error> {
|
||||
let filepath = self.file_path.clone().join(file_name.clone());
|
||||
|
||||
if filepath.exists() && filepath.is_file() {
|
||||
let file = File::open(&filepath)?;
|
||||
let reader = io::BufReader::new(file);
|
||||
|
||||
let mut queue: VecDeque<String> = VecDeque::with_capacity(n);
|
||||
|
||||
let mut total_lines_in_file = 0;
|
||||
for line in reader.lines() {
|
||||
let line = line?;
|
||||
|
||||
queue.push_back(line);
|
||||
|
||||
if queue.len() > n {
|
||||
queue.pop_front();
|
||||
}
|
||||
total_lines_in_file += 1;
|
||||
}
|
||||
|
||||
let output_num_lines = queue.len();
|
||||
let output = queue.into_iter().collect::<Vec<String>>().join("\n");
|
||||
Ok(FileResult {
|
||||
output,
|
||||
output_num_lines,
|
||||
total_lines_in_file,
|
||||
})
|
||||
} else {
|
||||
Err(Error::Generic(format!(
|
||||
"Requested file was not found: {}",
|
||||
file_name
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the file name which is most recently modified in the directory
|
||||
pub fn get_most_recent(&self) -> Result<Option<String>, Error> {
|
||||
let mut most_recent: Option<(SystemTime, String)> = None;
|
||||
|
||||
for entry in fs::read_dir(&self.file_path)? {
|
||||
let entry = entry?;
|
||||
let metadata = entry.metadata()?;
|
||||
if metadata.is_file() {
|
||||
let modified = metadata.modified()?;
|
||||
let file_name = entry.file_name().to_string_lossy().to_string();
|
||||
|
||||
match most_recent {
|
||||
Some((last_modified, _)) if modified > last_modified => {
|
||||
most_recent = Some((modified, file_name));
|
||||
}
|
||||
None => {
|
||||
most_recent = Some((modified, file_name));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(most_recent.map(|(_, file_name)| file_name))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FileResult {
|
||||
pub output: String,
|
||||
pub output_num_lines: usize,
|
||||
pub total_lines_in_file: usize,
|
||||
}
|
||||
|
||||
/// Write the file to the directory
|
||||
pub fn write_file(directory: &Path, file_name: String, data: &str) -> Result<(), Error> {
|
||||
if !directory.exists() {
|
||||
fs::create_dir_all(directory)?
|
||||
}
|
||||
|
||||
std::fs::write(directory.join(file_name), data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_file(directory: &Path, file_name: String) -> Result<(), Error> {
|
||||
let file_to_delete = directory.join(file_name.clone());
|
||||
if file_to_delete.exists() && file_to_delete.is_file() {
|
||||
fs::remove_file(file_to_delete).map_err(Error::Io)
|
||||
} else {
|
||||
Err(Error::Generic(format!(
|
||||
"Requested file to delete was not found: {}",
|
||||
file_name
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::{
|
||||
storage::file_storage::{delete_file, write_file},
|
||||
Error,
|
||||
};
|
||||
|
||||
use super::Directory;
|
||||
|
||||
static CREATE_TMP_DIR: fn() -> Result<PathBuf, Error> =
|
||||
|| Ok(tempfile::TempDir::new()?.path().to_path_buf());
|
||||
|
||||
#[test]
|
||||
fn test_add_get_delete() {
|
||||
if let Ok(path) = CREATE_TMP_DIR() {
|
||||
let directory = Directory::new(path);
|
||||
let file_name = "file_test_name.txt".to_string();
|
||||
let file_contents = "test";
|
||||
let write_res = write_file(&directory.file_path, file_name.clone(), file_contents);
|
||||
assert!(write_res.is_ok());
|
||||
|
||||
if let Ok(asserted_file_contents) = directory.get_file(file_name.clone()) {
|
||||
assert_eq!(asserted_file_contents, file_contents);
|
||||
} else {
|
||||
panic!("File not found");
|
||||
}
|
||||
|
||||
let delete_res = delete_file(&directory.file_path, file_name);
|
||||
assert!(delete_res.is_ok());
|
||||
} else {
|
||||
panic!("could not get interactor")
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_multiple() {
|
||||
if let Ok(path) = CREATE_TMP_DIR() {
|
||||
let directory = Directory::new(path);
|
||||
|
||||
for i in 0..10 {
|
||||
let file_name = format!("file{}.txt", i);
|
||||
let write_res = write_file(&directory.file_path, file_name, "test");
|
||||
assert!(write_res.is_ok());
|
||||
}
|
||||
|
||||
if let Ok(files) = directory.get_files() {
|
||||
for i in 0..10 {
|
||||
let file_name = format!("file{}.txt", i);
|
||||
assert!(files.contains_key(&file_name));
|
||||
assert_eq!(files.get(&file_name).unwrap(), "test");
|
||||
}
|
||||
} else {
|
||||
panic!("Files not found");
|
||||
}
|
||||
|
||||
if let Ok(file_names) = directory.get_file_names() {
|
||||
for i in 0..10 {
|
||||
let file_name = format!("file{}.txt", i);
|
||||
assert!(file_names.contains(&file_name));
|
||||
}
|
||||
} else {
|
||||
panic!("File names not found");
|
||||
}
|
||||
|
||||
for i in 0..10 {
|
||||
let file_name = format!("file{}.txt", i);
|
||||
assert!(delete_file(&directory.file_path, file_name).is_ok());
|
||||
}
|
||||
} else {
|
||||
panic!("could not get interactor")
|
||||
}
|
||||
}
|
||||
}
|
||||
112
crates/notedeck_columns/src/storage/key_storage_impl.rs
Normal file
112
crates/notedeck_columns/src/storage/key_storage_impl.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use enostr::{Keypair, Pubkey};
|
||||
|
||||
use super::file_key_storage::FileKeyStorage;
|
||||
use crate::Error;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
use super::security_framework_key_storage::SecurityFrameworkKeyStorage;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum KeyStorageType {
|
||||
None,
|
||||
FileSystem(FileKeyStorage),
|
||||
#[cfg(target_os = "macos")]
|
||||
SecurityFramework(SecurityFrameworkKeyStorage),
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum KeyStorageResponse<R> {
|
||||
Waiting,
|
||||
ReceivedResult(Result<R, KeyStorageError>),
|
||||
}
|
||||
|
||||
impl<R: PartialEq> PartialEq for KeyStorageResponse<R> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(KeyStorageResponse::Waiting, KeyStorageResponse::Waiting) => true,
|
||||
(
|
||||
KeyStorageResponse::ReceivedResult(Ok(r1)),
|
||||
KeyStorageResponse::ReceivedResult(Ok(r2)),
|
||||
) => r1 == r2,
|
||||
(
|
||||
KeyStorageResponse::ReceivedResult(Err(_)),
|
||||
KeyStorageResponse::ReceivedResult(Err(_)),
|
||||
) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyStorageType {
|
||||
pub fn get_keys(&self) -> KeyStorageResponse<Vec<Keypair>> {
|
||||
match self {
|
||||
Self::None => KeyStorageResponse::ReceivedResult(Ok(Vec::new())),
|
||||
Self::FileSystem(f) => f.get_keys(),
|
||||
#[cfg(target_os = "macos")]
|
||||
Self::SecurityFramework(f) => f.get_keys(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
|
||||
let _ = key;
|
||||
match self {
|
||||
Self::None => KeyStorageResponse::ReceivedResult(Ok(())),
|
||||
Self::FileSystem(f) => f.add_key(key),
|
||||
#[cfg(target_os = "macos")]
|
||||
Self::SecurityFramework(f) => f.add_key(key),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
|
||||
let _ = key;
|
||||
match self {
|
||||
Self::None => KeyStorageResponse::ReceivedResult(Ok(())),
|
||||
Self::FileSystem(f) => f.remove_key(key),
|
||||
#[cfg(target_os = "macos")]
|
||||
Self::SecurityFramework(f) => f.remove_key(key),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_selected_key(&self) -> KeyStorageResponse<Option<Pubkey>> {
|
||||
match self {
|
||||
Self::None => KeyStorageResponse::ReceivedResult(Ok(None)),
|
||||
Self::FileSystem(f) => f.get_selected_key(),
|
||||
#[cfg(target_os = "macos")]
|
||||
Self::SecurityFramework(_) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn select_key(&self, key: Option<Pubkey>) -> KeyStorageResponse<()> {
|
||||
match self {
|
||||
Self::None => KeyStorageResponse::ReceivedResult(Ok(())),
|
||||
Self::FileSystem(f) => f.select_key(key),
|
||||
#[cfg(target_os = "macos")]
|
||||
Self::SecurityFramework(_) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub enum KeyStorageError {
|
||||
Retrieval(Error),
|
||||
Addition(Error),
|
||||
Selection(Error),
|
||||
Removal(Error),
|
||||
OSError(Error),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for KeyStorageError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Retrieval(e) => write!(f, "Failed to retrieve keys: {:?}", e),
|
||||
Self::Addition(key) => write!(f, "Failed to add key: {:?}", key),
|
||||
Self::Selection(pubkey) => write!(f, "Failed to select key: {:?}", pubkey),
|
||||
Self::Removal(key) => write!(f, "Failed to remove key: {:?}", key),
|
||||
Self::OSError(e) => write!(f, "OS had an error: {:?}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for KeyStorageError {}
|
||||
695
crates/notedeck_columns/src/storage/migration.rs
Normal file
695
crates/notedeck_columns/src/storage/migration.rs
Normal file
@@ -0,0 +1,695 @@
|
||||
use enostr::{NoteId, Pubkey};
|
||||
use nostrdb::Ndb;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use tracing::error;
|
||||
|
||||
use crate::{
|
||||
accounts::AccountsRoute,
|
||||
column::{Columns, IntermediaryRoute},
|
||||
route::Route,
|
||||
timeline::{kind::ListKind, PubkeySource, Timeline, TimelineId, TimelineKind, TimelineRoute},
|
||||
ui::add_column::AddColumnRoute,
|
||||
Error,
|
||||
};
|
||||
|
||||
use super::{DataPath, DataPathType, Directory};
|
||||
|
||||
pub static COLUMNS_FILE: &str = "columns.json";
|
||||
|
||||
fn columns_json(path: &DataPath) -> Option<String> {
|
||||
let data_path = path.path(DataPathType::Setting);
|
||||
Directory::new(data_path)
|
||||
.get_file(COLUMNS_FILE.to_string())
|
||||
.ok()
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
enum MigrationTimelineRoute {
|
||||
Timeline(u32),
|
||||
Thread(String),
|
||||
Profile(String),
|
||||
Reply(String),
|
||||
Quote(String),
|
||||
}
|
||||
|
||||
impl MigrationTimelineRoute {
|
||||
fn timeline_route(self) -> Option<TimelineRoute> {
|
||||
match self {
|
||||
MigrationTimelineRoute::Timeline(id) => {
|
||||
Some(TimelineRoute::Timeline(TimelineId::new(id)))
|
||||
}
|
||||
MigrationTimelineRoute::Thread(note_id_hex) => {
|
||||
Some(TimelineRoute::Thread(NoteId::from_hex(¬e_id_hex).ok()?))
|
||||
}
|
||||
MigrationTimelineRoute::Profile(pubkey_hex) => {
|
||||
Some(TimelineRoute::Profile(Pubkey::from_hex(&pubkey_hex).ok()?))
|
||||
}
|
||||
MigrationTimelineRoute::Reply(note_id_hex) => {
|
||||
Some(TimelineRoute::Reply(NoteId::from_hex(¬e_id_hex).ok()?))
|
||||
}
|
||||
MigrationTimelineRoute::Quote(note_id_hex) => {
|
||||
Some(TimelineRoute::Quote(NoteId::from_hex(¬e_id_hex).ok()?))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
enum MigrationRoute {
|
||||
Timeline(MigrationTimelineRoute),
|
||||
Accounts(MigrationAccountsRoute),
|
||||
Relays,
|
||||
ComposeNote,
|
||||
AddColumn(MigrationAddColumnRoute),
|
||||
Support,
|
||||
}
|
||||
|
||||
impl MigrationRoute {
|
||||
fn route(self) -> Option<Route> {
|
||||
match self {
|
||||
MigrationRoute::Timeline(migration_timeline_route) => {
|
||||
Some(Route::Timeline(migration_timeline_route.timeline_route()?))
|
||||
}
|
||||
MigrationRoute::Accounts(migration_accounts_route) => {
|
||||
Some(Route::Accounts(migration_accounts_route.accounts_route()))
|
||||
}
|
||||
MigrationRoute::Relays => Some(Route::Relays),
|
||||
MigrationRoute::ComposeNote => Some(Route::ComposeNote),
|
||||
MigrationRoute::AddColumn(migration_add_column_route) => Some(Route::AddColumn(
|
||||
migration_add_column_route.add_column_route(),
|
||||
)),
|
||||
MigrationRoute::Support => Some(Route::Support),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
enum MigrationAccountsRoute {
|
||||
Accounts,
|
||||
AddAccount,
|
||||
}
|
||||
|
||||
impl MigrationAccountsRoute {
|
||||
fn accounts_route(self) -> AccountsRoute {
|
||||
match self {
|
||||
MigrationAccountsRoute::Accounts => AccountsRoute::Accounts,
|
||||
MigrationAccountsRoute::AddAccount => AccountsRoute::AddAccount,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, PartialEq)]
|
||||
enum MigrationAddColumnRoute {
|
||||
Base,
|
||||
UndecidedNotification,
|
||||
ExternalNotification,
|
||||
Hashtag,
|
||||
}
|
||||
|
||||
impl MigrationAddColumnRoute {
|
||||
fn add_column_route(self) -> AddColumnRoute {
|
||||
match self {
|
||||
MigrationAddColumnRoute::Base => AddColumnRoute::Base,
|
||||
MigrationAddColumnRoute::UndecidedNotification => AddColumnRoute::UndecidedNotification,
|
||||
MigrationAddColumnRoute::ExternalNotification => AddColumnRoute::ExternalNotification,
|
||||
MigrationAddColumnRoute::Hashtag => AddColumnRoute::Hashtag,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct MigrationColumn {
|
||||
routes: Vec<MigrationRoute>,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for MigrationColumn {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let routes = Vec::<MigrationRoute>::deserialize(deserializer)?;
|
||||
|
||||
Ok(MigrationColumn { routes })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct MigrationColumns {
|
||||
columns: Vec<MigrationColumn>,
|
||||
timelines: Vec<MigrationTimeline>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq)]
|
||||
struct MigrationTimeline {
|
||||
id: u32,
|
||||
kind: MigrationTimelineKind,
|
||||
}
|
||||
|
||||
impl MigrationTimeline {
|
||||
fn into_timeline(self, ndb: &Ndb, deck_user_pubkey: Option<&[u8; 32]>) -> Option<Timeline> {
|
||||
self.kind
|
||||
.into_timeline_kind()?
|
||||
.into_timeline(ndb, deck_user_pubkey)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone, Debug, PartialEq)]
|
||||
enum MigrationListKind {
|
||||
Contact(MigrationPubkeySource),
|
||||
}
|
||||
|
||||
impl MigrationListKind {
|
||||
fn list_kind(self) -> Option<ListKind> {
|
||||
match self {
|
||||
MigrationListKind::Contact(migration_pubkey_source) => {
|
||||
Some(ListKind::Contact(migration_pubkey_source.pubkey_source()?))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone, Debug, PartialEq)]
|
||||
enum MigrationPubkeySource {
|
||||
Explicit(String),
|
||||
DeckAuthor,
|
||||
}
|
||||
|
||||
impl MigrationPubkeySource {
|
||||
fn pubkey_source(self) -> Option<PubkeySource> {
|
||||
match self {
|
||||
MigrationPubkeySource::Explicit(hex) => {
|
||||
Some(PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?))
|
||||
}
|
||||
MigrationPubkeySource::DeckAuthor => Some(PubkeySource::DeckAuthor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone, Debug, PartialEq)]
|
||||
enum MigrationTimelineKind {
|
||||
List(MigrationListKind),
|
||||
Notifications(MigrationPubkeySource),
|
||||
Profile(MigrationPubkeySource),
|
||||
Universe,
|
||||
Generic,
|
||||
Hashtag(String),
|
||||
}
|
||||
|
||||
impl MigrationTimelineKind {
|
||||
fn into_timeline_kind(self) -> Option<TimelineKind> {
|
||||
match self {
|
||||
MigrationTimelineKind::List(migration_list_kind) => {
|
||||
Some(TimelineKind::List(migration_list_kind.list_kind()?))
|
||||
}
|
||||
MigrationTimelineKind::Notifications(migration_pubkey_source) => Some(
|
||||
TimelineKind::Notifications(migration_pubkey_source.pubkey_source()?),
|
||||
),
|
||||
MigrationTimelineKind::Profile(migration_pubkey_source) => Some(TimelineKind::Profile(
|
||||
migration_pubkey_source.pubkey_source()?,
|
||||
)),
|
||||
MigrationTimelineKind::Universe => Some(TimelineKind::Universe),
|
||||
MigrationTimelineKind::Generic => Some(TimelineKind::Generic),
|
||||
MigrationTimelineKind::Hashtag(hashtag) => Some(TimelineKind::Hashtag(hashtag)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MigrationColumns {
|
||||
fn into_columns(self, ndb: &Ndb, deck_pubkey: Option<&[u8; 32]>) -> Columns {
|
||||
let mut columns = Columns::default();
|
||||
|
||||
for column in self.columns {
|
||||
let mut cur_routes = Vec::new();
|
||||
for route in column.routes {
|
||||
match route {
|
||||
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(timeline_id)) => {
|
||||
if let Some(migration_tl) =
|
||||
self.timelines.iter().find(|tl| tl.id == timeline_id)
|
||||
{
|
||||
let tl = migration_tl.clone().into_timeline(ndb, deck_pubkey);
|
||||
if let Some(tl) = tl {
|
||||
cur_routes.push(IntermediaryRoute::Timeline(tl));
|
||||
} else {
|
||||
error!("Problem deserializing timeline {:?}", migration_tl);
|
||||
}
|
||||
}
|
||||
}
|
||||
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(_thread)) => {}
|
||||
_ => {
|
||||
if let Some(route) = route.route() {
|
||||
cur_routes.push(IntermediaryRoute::Route(route));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !cur_routes.is_empty() {
|
||||
columns.insert_intermediary_routes(cur_routes);
|
||||
}
|
||||
}
|
||||
columns
|
||||
}
|
||||
}
|
||||
|
||||
fn string_to_columns(
|
||||
serialized_columns: String,
|
||||
ndb: &Ndb,
|
||||
user: Option<&[u8; 32]>,
|
||||
) -> Option<Columns> {
|
||||
Some(
|
||||
deserialize_columns_string(serialized_columns)
|
||||
.ok()?
|
||||
.into_columns(ndb, user),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn deserialize_columns(path: &DataPath, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Columns> {
|
||||
string_to_columns(columns_json(path)?, ndb, user)
|
||||
}
|
||||
|
||||
fn deserialize_columns_string(serialized_columns: String) -> Result<MigrationColumns, Error> {
|
||||
serde_json::from_str::<MigrationColumns>(&serialized_columns)
|
||||
.map_err(|e| Error::Generic(e.to_string()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::storage::migration::{
|
||||
MigrationColumn, MigrationListKind, MigrationPubkeySource, MigrationRoute,
|
||||
MigrationTimeline, MigrationTimelineKind, MigrationTimelineRoute,
|
||||
};
|
||||
|
||||
impl MigrationColumn {
|
||||
fn from_route(route: MigrationRoute) -> Self {
|
||||
Self {
|
||||
routes: vec![route],
|
||||
}
|
||||
}
|
||||
|
||||
fn from_routes(routes: Vec<MigrationRoute>) -> Self {
|
||||
Self { routes }
|
||||
}
|
||||
}
|
||||
|
||||
impl MigrationTimeline {
|
||||
fn new(id: u32, kind: MigrationTimelineKind) -> Self {
|
||||
Self { id, kind }
|
||||
}
|
||||
}
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn multi_column() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}],[{"Timeline":{"Timeline":0}}],[{"Timeline":{"Timeline":1}}]],"timelines":[{"id":0,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}},{"id":1,"kind":{"Hashtag":"introductions"}},{"id":2,"kind":"Universe"}]}"#; // Multi-column
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
|
||||
assert_eq!(migration_cols.columns.len(), 3);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(2)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
*migration_cols.columns.get(1).unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(0)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
*migration_cols.columns.get(2).unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(1)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 3);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(
|
||||
0,
|
||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
||||
MigrationPubkeySource::Explicit(
|
||||
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
|
||||
.to_owned()
|
||||
)
|
||||
))
|
||||
)
|
||||
);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.get(1).unwrap(),
|
||||
MigrationTimeline::new(
|
||||
1,
|
||||
MigrationTimelineKind::Hashtag("introductions".to_owned())
|
||||
)
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.get(2).unwrap(),
|
||||
MigrationTimeline::new(2, MigrationTimelineKind::Universe)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn base() {
|
||||
let route = r#"{"columns":[[{"AddColumn":"Base"}]],"timelines":[]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::AddColumn(MigrationAddColumnRoute::Base))
|
||||
);
|
||||
|
||||
assert!(migration_cols.timelines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn universe() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":0}}]],"timelines":[{"id":0,"kind":"Universe"}]}"#;
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(0)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(0, MigrationTimelineKind::Universe)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn home() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}]],"timelines":[{"id":2,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}}]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(2)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(
|
||||
2,
|
||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
||||
MigrationPubkeySource::Explicit(
|
||||
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
|
||||
.to_owned()
|
||||
)
|
||||
))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn thread() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Thread":"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
|
||||
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(
|
||||
"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25".to_owned()
|
||||
)),
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(
|
||||
7,
|
||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
||||
MigrationPubkeySource::Explicit(
|
||||
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
|
||||
.to_owned()
|
||||
)
|
||||
))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn profile() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Profile":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
|
||||
MigrationRoute::Timeline(MigrationTimelineRoute::Profile(
|
||||
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
|
||||
)),
|
||||
])
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(
|
||||
7,
|
||||
MigrationTimelineKind::List(MigrationListKind::Contact(
|
||||
MigrationPubkeySource::Explicit(
|
||||
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
|
||||
.to_owned()
|
||||
)
|
||||
))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn your_notifs() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":5}}]],"timelines":[{"id":5,"kind":{"Notifications":"DeckAuthor"}}]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(5)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(
|
||||
5,
|
||||
MigrationTimelineKind::Notifications(MigrationPubkeySource::DeckAuthor)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn undecided_notifs() {
|
||||
let route = r#"{"columns":[[{"AddColumn":"Base"},{"AddColumn":"UndecidedNotification"}]],"timelines":[]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::UndecidedNotification),
|
||||
])
|
||||
);
|
||||
|
||||
assert!(migration_cols.timelines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn extern_notifs() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":4}}]],"timelines":[{"id":4,"kind":{"Notifications":{"Explicit":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}}]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(4)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(
|
||||
4,
|
||||
MigrationTimelineKind::Notifications(MigrationPubkeySource::Explicit(
|
||||
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
|
||||
))
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hashtag() {
|
||||
let route = r#"{"columns":[[{"Timeline":{"Timeline":6}}]],"timelines":[{"id":6,"kind":{"Hashtag":"notedeck"}}]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_route(MigrationRoute::Timeline(
|
||||
MigrationTimelineRoute::Timeline(6)
|
||||
))
|
||||
);
|
||||
|
||||
assert_eq!(migration_cols.timelines.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.timelines.first().unwrap(),
|
||||
MigrationTimeline::new(6, MigrationTimelineKind::Hashtag("notedeck".to_owned()))
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn support() {
|
||||
let route = r#"{"columns":[[{"AddColumn":"Base"},"Support"]],"timelines":[]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
||||
MigrationRoute::Support
|
||||
])
|
||||
);
|
||||
|
||||
assert!(migration_cols.timelines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn post() {
|
||||
let route = r#"{"columns":[[{"AddColumn":"Base"},"ComposeNote"]],"timelines":[]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
||||
MigrationRoute::ComposeNote
|
||||
])
|
||||
);
|
||||
|
||||
assert!(migration_cols.timelines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn relay() {
|
||||
let route = r#"{"columns":[[{"AddColumn":"Base"},"Relays"]],"timelines":[]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
||||
MigrationRoute::Relays
|
||||
])
|
||||
);
|
||||
|
||||
assert!(migration_cols.timelines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accounts() {
|
||||
let route =
|
||||
r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"}]],"timelines":[]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
||||
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
|
||||
])
|
||||
);
|
||||
|
||||
assert!(migration_cols.timelines.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn login() {
|
||||
let route = r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"},{"Accounts":"AddAccount"}]],"timelines":[]}"#;
|
||||
|
||||
let deserialized_columns = deserialize_columns_string(route.to_string());
|
||||
assert!(deserialized_columns.is_ok());
|
||||
|
||||
let migration_cols = deserialized_columns.unwrap();
|
||||
assert_eq!(migration_cols.columns.len(), 1);
|
||||
assert_eq!(
|
||||
*migration_cols.columns.first().unwrap(),
|
||||
MigrationColumn::from_routes(vec![
|
||||
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
|
||||
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
|
||||
MigrationRoute::Accounts(MigrationAccountsRoute::AddAccount),
|
||||
])
|
||||
);
|
||||
|
||||
assert!(migration_cols.timelines.is_empty());
|
||||
}
|
||||
}
|
||||
15
crates/notedeck_columns/src/storage/mod.rs
Normal file
15
crates/notedeck_columns/src/storage/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
mod decks;
|
||||
mod file_key_storage;
|
||||
mod file_storage;
|
||||
mod migration;
|
||||
|
||||
pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE};
|
||||
pub use file_key_storage::FileKeyStorage;
|
||||
pub use file_storage::{delete_file, write_file, DataPath, DataPathType, Directory};
|
||||
pub use migration::{deserialize_columns, COLUMNS_FILE};
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod security_framework_key_storage;
|
||||
|
||||
pub mod key_storage_impl;
|
||||
pub use key_storage_impl::{KeyStorageResponse, KeyStorageType};
|
||||
@@ -0,0 +1,198 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use enostr::{Keypair, Pubkey, SecretKey};
|
||||
use security_framework::{
|
||||
item::{ItemClass, ItemSearchOptions, Limit, SearchResult},
|
||||
passwords::{delete_generic_password, set_generic_password},
|
||||
};
|
||||
use tracing::error;
|
||||
|
||||
use crate::Error;
|
||||
|
||||
use super::{key_storage_impl::KeyStorageError, KeyStorageResponse};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct SecurityFrameworkKeyStorage {
|
||||
pub service_name: Cow<'static, str>,
|
||||
}
|
||||
|
||||
impl SecurityFrameworkKeyStorage {
|
||||
pub fn new(service_name: String) -> Self {
|
||||
SecurityFrameworkKeyStorage {
|
||||
service_name: Cow::Owned(service_name),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_key_internal(&self, key: &Keypair) -> Result<(), KeyStorageError> {
|
||||
match set_generic_password(
|
||||
&self.service_name,
|
||||
key.pubkey.hex().as_str(),
|
||||
key.secret_key
|
||||
.as_ref()
|
||||
.map_or_else(|| &[] as &[u8], |sc| sc.as_secret_bytes()),
|
||||
) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(KeyStorageError::Addition(Error::Generic(e.to_string()))),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_pubkey_strings(&self) -> Vec<String> {
|
||||
let search_results = ItemSearchOptions::new()
|
||||
.class(ItemClass::generic_password())
|
||||
.service(&self.service_name)
|
||||
.load_attributes(true)
|
||||
.limit(Limit::All)
|
||||
.search();
|
||||
|
||||
let mut accounts = Vec::new();
|
||||
|
||||
if let Ok(search_results) = search_results {
|
||||
for result in search_results {
|
||||
if let Some(map) = result.simplify_dict() {
|
||||
if let Some(val) = map.get("acct") {
|
||||
accounts.push(val.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
accounts
|
||||
}
|
||||
|
||||
fn get_pubkeys(&self) -> Vec<Pubkey> {
|
||||
self.get_pubkey_strings()
|
||||
.iter_mut()
|
||||
.filter_map(|pubkey_str| Pubkey::from_hex(pubkey_str.as_str()).ok())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn get_privkey_bytes_for(&self, account: &str) -> Option<Vec<u8>> {
|
||||
let search_result = ItemSearchOptions::new()
|
||||
.class(ItemClass::generic_password())
|
||||
.service(&self.service_name)
|
||||
.load_data(true)
|
||||
.account(account)
|
||||
.search();
|
||||
|
||||
if let Ok(results) = search_result {
|
||||
if let Some(SearchResult::Data(vec)) = results.first() {
|
||||
return Some(vec.clone());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn get_secret_key_for_pubkey(&self, pubkey: &Pubkey) -> Option<SecretKey> {
|
||||
if let Some(bytes) = self.get_privkey_bytes_for(pubkey.hex().as_str()) {
|
||||
SecretKey::from_slice(bytes.as_slice()).ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_all_keypairs(&self) -> Vec<Keypair> {
|
||||
self.get_pubkeys()
|
||||
.iter()
|
||||
.map(|pubkey| {
|
||||
let maybe_secret = self.get_secret_key_for_pubkey(pubkey);
|
||||
Keypair::new(*pubkey, maybe_secret)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn delete_key(&self, pubkey: &Pubkey) -> Result<(), KeyStorageError> {
|
||||
match delete_generic_password(&self.service_name, pubkey.hex().as_str()) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => {
|
||||
error!("delete key error {}", e);
|
||||
Err(KeyStorageError::Removal(Error::Generic(e.to_string())))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SecurityFrameworkKeyStorage {
|
||||
pub fn add_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
|
||||
KeyStorageResponse::ReceivedResult(self.add_key_internal(key))
|
||||
}
|
||||
|
||||
pub fn get_keys(&self) -> KeyStorageResponse<Vec<Keypair>> {
|
||||
KeyStorageResponse::ReceivedResult(Ok(self.get_all_keypairs()))
|
||||
}
|
||||
|
||||
pub fn remove_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
|
||||
KeyStorageResponse::ReceivedResult(self.delete_key(&key.pubkey))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use enostr::FullKeypair;
|
||||
|
||||
static TEST_SERVICE_NAME: &str = "NOTEDECKTEST";
|
||||
static STORAGE: SecurityFrameworkKeyStorage = SecurityFrameworkKeyStorage {
|
||||
service_name: Cow::Borrowed(TEST_SERVICE_NAME),
|
||||
};
|
||||
|
||||
// individual tests are ignored so test runner doesn't run them all concurrently
|
||||
// TODO: a way to run them all serially should be devised
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn add_and_remove_test_pubkey_only() {
|
||||
let num_keys_before_test = STORAGE.get_pubkeys().len();
|
||||
|
||||
let keypair = FullKeypair::generate().to_keypair();
|
||||
let add_result = STORAGE.add_key_internal(&keypair);
|
||||
assert!(add_result.is_ok());
|
||||
|
||||
let get_pubkeys_result = STORAGE.get_pubkeys();
|
||||
assert_eq!(get_pubkeys_result.len() - num_keys_before_test, 1);
|
||||
|
||||
let remove_result = STORAGE.delete_key(&keypair.pubkey);
|
||||
assert!(remove_result.is_ok());
|
||||
|
||||
let keys = STORAGE.get_pubkeys();
|
||||
assert_eq!(keys.len() - num_keys_before_test, 0);
|
||||
}
|
||||
|
||||
fn add_and_remove_full_n(n: usize) {
|
||||
let num_keys_before_test = STORAGE.get_all_keypairs().len();
|
||||
// there must be zero keys in storage for the test to work as intended
|
||||
assert_eq!(num_keys_before_test, 0);
|
||||
|
||||
let expected_keypairs: Vec<Keypair> = (0..n)
|
||||
.map(|_| FullKeypair::generate().to_keypair())
|
||||
.collect();
|
||||
|
||||
expected_keypairs.iter().for_each(|keypair| {
|
||||
let add_result = STORAGE.add_key_internal(keypair);
|
||||
assert!(add_result.is_ok());
|
||||
});
|
||||
|
||||
let asserted_keypairs = STORAGE.get_all_keypairs();
|
||||
assert_eq!(expected_keypairs, asserted_keypairs);
|
||||
|
||||
expected_keypairs.iter().for_each(|keypair| {
|
||||
let remove_result = STORAGE.delete_key(&keypair.pubkey);
|
||||
assert!(remove_result.is_ok());
|
||||
});
|
||||
|
||||
let num_keys_after_test = STORAGE.get_all_keypairs().len();
|
||||
assert_eq!(num_keys_after_test, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn add_and_remove_full() {
|
||||
add_and_remove_full_n(1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn add_and_remove_full_10() {
|
||||
add_and_remove_full_n(10);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user