@@ -3,6 +3,8 @@ use std::{collections::HashMap, fmt, str::FromStr};
|
||||
use enostr::{NoteId, Pubkey};
|
||||
use nostrdb::Ndb;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::IntoEnumIterator;
|
||||
use strum_macros::EnumIter;
|
||||
use tracing::{error, info};
|
||||
|
||||
use crate::{
|
||||
@@ -10,8 +12,8 @@ use crate::{
|
||||
column::{Columns, IntermediaryRoute},
|
||||
decks::{Deck, Decks, DecksCache},
|
||||
route::Route,
|
||||
timeline::{kind::ListKind, PubkeySource, TimelineKind, TimelineRoute},
|
||||
ui::add_column::AddColumnRoute,
|
||||
timeline::{kind::ListKind, AlgoTimeline, PubkeySource, TimelineKind, TimelineRoute},
|
||||
ui::add_column::{AddAlgoRoute, AddColumnRoute},
|
||||
Error,
|
||||
};
|
||||
|
||||
@@ -299,7 +301,7 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec<Vec<Stri
|
||||
let mut cur_routes = Vec::new();
|
||||
for serialized_route in serialized_routes {
|
||||
let selections = Selection::from_serialized(&serialized_route);
|
||||
if let Some(route_intermediary) = selections_to_route(selections.clone()) {
|
||||
if let Some(route_intermediary) = selections_to_route(&selections) {
|
||||
if let Some(ir) = route_intermediary.intermediary_route(ndb, Some(deck_user)) {
|
||||
match &ir {
|
||||
IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Thread(_)))
|
||||
@@ -325,19 +327,75 @@ fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec<Vec<Stri
|
||||
cols
|
||||
}
|
||||
|
||||
/// Different token types for our deck serializer/deserializer
|
||||
///
|
||||
/// We have more than one token type so that we can avoid match catch-alls
|
||||
/// in different parts of our parser.
|
||||
#[derive(Clone, Debug)]
|
||||
enum Selection {
|
||||
Keyword(Keyword),
|
||||
Algo(AlgoKeyword),
|
||||
List(ListKeyword),
|
||||
PubkeySource(PubkeySourceKeyword),
|
||||
Payload(String),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
impl FromStr for Selection {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
|
||||
Ok(parse_selection(serialized))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, EnumIter)]
|
||||
enum AlgoKeyword {
|
||||
LastPerPubkey,
|
||||
}
|
||||
|
||||
impl AlgoKeyword {
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'static str {
|
||||
match self {
|
||||
AlgoKeyword::LastPerPubkey => "last_per_pubkey",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, EnumIter)]
|
||||
enum ListKeyword {
|
||||
Contact,
|
||||
}
|
||||
|
||||
impl ListKeyword {
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'static str {
|
||||
match self {
|
||||
ListKeyword::Contact => "contact",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, EnumIter)]
|
||||
enum PubkeySourceKeyword {
|
||||
Explicit,
|
||||
DeckAuthor,
|
||||
}
|
||||
|
||||
impl PubkeySourceKeyword {
|
||||
#[inline]
|
||||
pub fn name(&self) -> &'static str {
|
||||
match self {
|
||||
PubkeySourceKeyword::Explicit => "explicit",
|
||||
PubkeySourceKeyword::DeckAuthor => "deck_author",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, EnumIter)]
|
||||
enum Keyword {
|
||||
Notifs,
|
||||
Universe,
|
||||
Contact,
|
||||
Explicit,
|
||||
DeckAuthor,
|
||||
Profile,
|
||||
Hashtag,
|
||||
Generic,
|
||||
@@ -350,6 +408,7 @@ enum Keyword {
|
||||
Relay,
|
||||
Compose,
|
||||
Column,
|
||||
AlgoSelection,
|
||||
NotificationSelection,
|
||||
ExternalNotifSelection,
|
||||
HashtagSelection,
|
||||
@@ -361,60 +420,104 @@ enum Keyword {
|
||||
}
|
||||
|
||||
impl Keyword {
|
||||
const MAPPING: &'static [(&'static str, Keyword, bool)] = &[
|
||||
("notifs", Keyword::Notifs, false),
|
||||
("universe", Keyword::Universe, false),
|
||||
("contact", Keyword::Contact, false),
|
||||
("explicit", Keyword::Explicit, true),
|
||||
("deck_author", Keyword::DeckAuthor, false),
|
||||
("profile", Keyword::Profile, false),
|
||||
("hashtag", Keyword::Hashtag, true),
|
||||
("generic", Keyword::Generic, false),
|
||||
("thread", Keyword::Thread, true),
|
||||
("reply", Keyword::Reply, true),
|
||||
("quote", Keyword::Quote, true),
|
||||
("account", Keyword::Account, false),
|
||||
("show", Keyword::Show, false),
|
||||
("new", Keyword::New, false),
|
||||
("relay", Keyword::Relay, false),
|
||||
("compose", Keyword::Compose, false),
|
||||
("column", Keyword::Column, false),
|
||||
(
|
||||
"notification_selection",
|
||||
Keyword::NotificationSelection,
|
||||
false,
|
||||
),
|
||||
(
|
||||
"external_notif_selection",
|
||||
Keyword::ExternalNotifSelection,
|
||||
false,
|
||||
),
|
||||
("hashtag_selection", Keyword::HashtagSelection, false),
|
||||
("support", Keyword::Support, false),
|
||||
("deck", Keyword::Deck, false),
|
||||
("edit", Keyword::Edit, true),
|
||||
];
|
||||
|
||||
fn has_payload(&self) -> bool {
|
||||
Keyword::MAPPING
|
||||
.iter()
|
||||
.find(|(_, keyword, _)| keyword == self)
|
||||
.map(|(_, _, has_payload)| *has_payload)
|
||||
.unwrap_or(false)
|
||||
fn name(&self) -> &'static str {
|
||||
match self {
|
||||
Keyword::Notifs => "notifs",
|
||||
Keyword::Universe => "universe",
|
||||
Keyword::Profile => "profile",
|
||||
Keyword::Hashtag => "hashtag",
|
||||
Keyword::Generic => "generic",
|
||||
Keyword::Thread => "thread",
|
||||
Keyword::Reply => "reply",
|
||||
Keyword::Quote => "quote",
|
||||
Keyword::Account => "account",
|
||||
Keyword::Show => "show",
|
||||
Keyword::New => "new",
|
||||
Keyword::Relay => "relay",
|
||||
Keyword::Compose => "compose",
|
||||
Keyword::Column => "column",
|
||||
Keyword::AlgoSelection => "algo_selection",
|
||||
Keyword::NotificationSelection => "notification_selection",
|
||||
Keyword::ExternalNotifSelection => "external_notif_selection",
|
||||
Keyword::IndividualSelection => "individual_selection",
|
||||
Keyword::ExternalIndividualSelection => "external_individual_selection",
|
||||
Keyword::HashtagSelection => "hashtag_selection",
|
||||
Keyword::Support => "support",
|
||||
Keyword::Deck => "deck",
|
||||
Keyword::Edit => "edit",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Keyword {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if let Some(name) = Keyword::MAPPING
|
||||
.iter()
|
||||
.find(|(_, keyword, _)| keyword == self)
|
||||
.map(|(name, _, _)| *name)
|
||||
{
|
||||
write!(f, "{}", name)
|
||||
} else {
|
||||
write!(f, "UnknownKeyword")
|
||||
write!(f, "{}", self.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AlgoKeyword {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ListKeyword {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PubkeySourceKeyword {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
|
||||
for keyword in Self::iter() {
|
||||
if serialized == keyword.name() {
|
||||
return Ok(keyword);
|
||||
}
|
||||
}
|
||||
|
||||
Err(Error::Generic(
|
||||
"Could not convert string to Keyword enum".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ListKeyword {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
|
||||
for keyword in Self::iter() {
|
||||
if serialized == keyword.name() {
|
||||
return Ok(keyword);
|
||||
}
|
||||
}
|
||||
|
||||
Err(Error::Generic(
|
||||
"Could not convert string to Keyword enum".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PubkeySourceKeyword {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for AlgoKeyword {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
|
||||
for keyword in Self::iter() {
|
||||
if serialized == keyword.name() {
|
||||
return Ok(keyword);
|
||||
}
|
||||
}
|
||||
|
||||
Err(Error::Generic(
|
||||
"Could not convert string to Keyword enum".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -422,13 +525,15 @@ impl FromStr for Keyword {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
|
||||
Keyword::MAPPING
|
||||
.iter()
|
||||
.find(|(name, _, _)| *name == serialized)
|
||||
.map(|(_, keyword, _)| keyword.clone())
|
||||
.ok_or(Error::Generic(
|
||||
"Could not convert string to Keyword enum".to_owned(),
|
||||
))
|
||||
for keyword in Self::iter() {
|
||||
if serialized == keyword.name() {
|
||||
return Ok(keyword);
|
||||
}
|
||||
}
|
||||
|
||||
Err(Error::Generic(
|
||||
"Could not convert string to Keyword enum".to_owned(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -458,10 +563,19 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option<String> {
|
||||
match &timeline.kind {
|
||||
TimelineKind::List(list_kind) => match list_kind {
|
||||
ListKind::Contact(pubkey_source) => {
|
||||
selections.push(Selection::Keyword(Keyword::Contact));
|
||||
selections.push(Selection::List(ListKeyword::Contact));
|
||||
selections.extend(generate_pubkey_selections(pubkey_source));
|
||||
}
|
||||
},
|
||||
TimelineKind::Algo(AlgoTimeline::LastPerPubkey(list_kind)) => {
|
||||
match list_kind {
|
||||
ListKind::Contact(pk_src) => {
|
||||
selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey));
|
||||
selections.push(Selection::List(ListKeyword::Contact));
|
||||
selections.extend(generate_pubkey_selections(pk_src));
|
||||
}
|
||||
}
|
||||
}
|
||||
TimelineKind::Notifications(pubkey_source) => {
|
||||
selections.push(Selection::Keyword(Keyword::Notifs));
|
||||
selections.extend(generate_pubkey_selections(pubkey_source));
|
||||
@@ -493,7 +607,7 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option<String> {
|
||||
}
|
||||
TimelineRoute::Profile(pubkey) => {
|
||||
selections.push(Selection::Keyword(Keyword::Profile));
|
||||
selections.push(Selection::Keyword(Keyword::Explicit));
|
||||
selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit));
|
||||
selections.push(Selection::Payload(pubkey.hex()));
|
||||
}
|
||||
TimelineRoute::Reply(note_id) => {
|
||||
@@ -518,6 +632,16 @@ fn serialize_route(route: &Route, columns: &Columns) -> Option<String> {
|
||||
selections.push(Selection::Keyword(Keyword::Column));
|
||||
match add_column_route {
|
||||
AddColumnRoute::Base => (),
|
||||
AddColumnRoute::Algo(algo_route) => match algo_route {
|
||||
AddAlgoRoute::Base => {
|
||||
selections.push(Selection::Keyword(Keyword::AlgoSelection))
|
||||
}
|
||||
|
||||
AddAlgoRoute::LastPerPubkey => {
|
||||
selections.push(Selection::Keyword(Keyword::AlgoSelection));
|
||||
selections.push(Selection::Algo(AlgoKeyword::LastPerPubkey));
|
||||
}
|
||||
},
|
||||
AddColumnRoute::UndecidedNotification => {
|
||||
selections.push(Selection::Keyword(Keyword::NotificationSelection))
|
||||
}
|
||||
@@ -569,109 +693,149 @@ fn generate_pubkey_selections(source: &PubkeySource) -> Vec<Selection> {
|
||||
let mut selections = Vec::new();
|
||||
match source {
|
||||
PubkeySource::Explicit(pubkey) => {
|
||||
selections.push(Selection::Keyword(Keyword::Explicit));
|
||||
selections.push(Selection::PubkeySource(PubkeySourceKeyword::Explicit));
|
||||
selections.push(Selection::Payload(pubkey.hex()));
|
||||
}
|
||||
PubkeySource::DeckAuthor => {
|
||||
selections.push(Selection::Keyword(Keyword::DeckAuthor));
|
||||
selections.push(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor));
|
||||
}
|
||||
}
|
||||
selections
|
||||
}
|
||||
|
||||
/// Parses a selection
|
||||
fn parse_selection(token: &str) -> Selection {
|
||||
AlgoKeyword::from_str(token)
|
||||
.map(Selection::Algo)
|
||||
.or_else(|_| ListKeyword::from_str(token).map(Selection::List))
|
||||
.or_else(|_| PubkeySourceKeyword::from_str(token).map(Selection::PubkeySource))
|
||||
.or_else(|_| Keyword::from_str(token).map(Selection::Keyword))
|
||||
.unwrap_or_else(|_| Selection::Payload(token.to_owned()))
|
||||
}
|
||||
|
||||
impl Selection {
|
||||
fn from_serialized(serialized: &str) -> Vec<Self> {
|
||||
fn from_serialized(buffer: &str) -> Vec<Self> {
|
||||
let mut selections = Vec::new();
|
||||
let seperator = ":";
|
||||
let sep_len = seperator.len();
|
||||
let mut pos = 0;
|
||||
|
||||
let mut serialized_copy = serialized.to_string();
|
||||
let mut buffer = serialized_copy.as_mut();
|
||||
|
||||
let mut next_is_payload = false;
|
||||
while let Some(index) = buffer.find(seperator) {
|
||||
if let Ok(keyword) = Keyword::from_str(&buffer[..index]) {
|
||||
selections.push(Selection::Keyword(keyword.clone()));
|
||||
if keyword.has_payload() {
|
||||
next_is_payload = true;
|
||||
}
|
||||
}
|
||||
|
||||
buffer = &mut buffer[index + seperator.len()..];
|
||||
while let Some(offset) = buffer[pos..].find(seperator) {
|
||||
selections.push(parse_selection(&buffer[pos..pos + offset]));
|
||||
pos = pos + offset + sep_len;
|
||||
}
|
||||
|
||||
if next_is_payload {
|
||||
selections.push(Selection::Payload(buffer.to_string()));
|
||||
} else if let Ok(keyword) = Keyword::from_str(buffer) {
|
||||
selections.push(Selection::Keyword(keyword.clone()));
|
||||
}
|
||||
selections.push(parse_selection(&buffer[pos..]));
|
||||
|
||||
selections
|
||||
}
|
||||
}
|
||||
|
||||
fn selections_to_route(selections: Vec<Selection>) -> Option<CleanIntermediaryRoute> {
|
||||
/// Parse an explicit:abdef... or deck_author from a Selection token stream.
|
||||
///
|
||||
/// Also handle the case where there is nothing. We assume this means deck_author.
|
||||
fn parse_pubkey_src_selection(tokens: &[Selection]) -> Option<PubkeySource> {
|
||||
match tokens.first() {
|
||||
// we handle bare payloads and assume they are explicit pubkey sources
|
||||
Some(Selection::Payload(hex)) => {
|
||||
let pk = Pubkey::from_hex(hex.as_str()).ok()?;
|
||||
Some(PubkeySource::Explicit(pk))
|
||||
}
|
||||
|
||||
Some(Selection::PubkeySource(PubkeySourceKeyword::Explicit)) => {
|
||||
if let Selection::Payload(hex) = tokens.get(1)? {
|
||||
let pk = Pubkey::from_hex(hex.as_str()).ok()?;
|
||||
Some(PubkeySource::Explicit(pk))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
None | Some(Selection::PubkeySource(PubkeySourceKeyword::DeckAuthor)) => {
|
||||
Some(PubkeySource::DeckAuthor)
|
||||
}
|
||||
|
||||
Some(Selection::Keyword(_kw)) => None,
|
||||
Some(Selection::Algo(_kw)) => None,
|
||||
Some(Selection::List(_kw)) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse ListKinds from Selections
|
||||
fn parse_list_kind_selections(tokens: &[Selection]) -> Option<ListKind> {
|
||||
// only list selections are valid in this position
|
||||
let list_kw = if let Selection::List(list_kw) = tokens.first()? {
|
||||
list_kw
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let pubkey_src = parse_pubkey_src_selection(&tokens[1..])?;
|
||||
|
||||
Some(match list_kw {
|
||||
ListKeyword::Contact => ListKind::contact_list(pubkey_src),
|
||||
})
|
||||
}
|
||||
|
||||
fn selections_to_route(selections: &[Selection]) -> Option<CleanIntermediaryRoute> {
|
||||
match selections.first()? {
|
||||
Selection::Keyword(Keyword::Contact) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::Explicit) => {
|
||||
if let Selection::Payload(hex) = selections.get(2)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::contact_list(PubkeySource::Explicit(
|
||||
Pubkey::from_hex(hex.as_str()).ok()?,
|
||||
)),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
Selection::Keyword(Keyword::AlgoSelection) => {
|
||||
let r = match selections.get(1) {
|
||||
None => AddColumnRoute::Algo(AddAlgoRoute::Base),
|
||||
Some(Selection::Algo(algo_kw)) => match algo_kw {
|
||||
AlgoKeyword::LastPerPubkey => AddColumnRoute::Algo(AddAlgoRoute::LastPerPubkey),
|
||||
},
|
||||
// other keywords are invalid here
|
||||
Some(_) => {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::contact_list(PubkeySource::DeckAuthor),
|
||||
)),
|
||||
_ => None,
|
||||
},
|
||||
Selection::Keyword(Keyword::Notifs) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::Explicit) => {
|
||||
if let Selection::Payload(hex) = selections.get(2)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::notifications(PubkeySource::Explicit(
|
||||
Pubkey::from_hex(hex.as_str()).ok()?,
|
||||
)),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(r)))
|
||||
}
|
||||
|
||||
// Algorithm timelines
|
||||
Selection::Algo(algo_kw) => {
|
||||
let timeline_kind = match algo_kw {
|
||||
AlgoKeyword::LastPerPubkey => {
|
||||
let list_kind = parse_list_kind_selections(&selections[1..])?;
|
||||
TimelineKind::last_per_pubkey(list_kind)
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::notifications(PubkeySource::DeckAuthor),
|
||||
)),
|
||||
_ => None,
|
||||
},
|
||||
Selection::Keyword(Keyword::Profile) => match selections.get(1)? {
|
||||
Selection::Keyword(Keyword::Explicit) => {
|
||||
if let Selection::Payload(hex) = selections.get(2)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile(
|
||||
PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?),
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::profile(PubkeySource::DeckAuthor),
|
||||
)),
|
||||
Selection::Keyword(Keyword::Edit) => {
|
||||
if let Selection::Payload(hex) = selections.get(2)? {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::EditProfile(
|
||||
Pubkey::from_hex(hex.as_str()).ok()?,
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
};
|
||||
|
||||
Some(CleanIntermediaryRoute::ToTimeline(timeline_kind))
|
||||
}
|
||||
|
||||
// We never have PubkeySource keywords at the top level
|
||||
Selection::PubkeySource(_pk_src) => None,
|
||||
|
||||
Selection::List(ListKeyword::Contact) => {
|
||||
// only pubkey/src is allowed in this position
|
||||
let pubkey_src = parse_pubkey_src_selection(&selections[1..])?;
|
||||
Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::contact_list(pubkey_src),
|
||||
))
|
||||
}
|
||||
|
||||
Selection::Keyword(Keyword::Notifs) => {
|
||||
let pubkey_src = parse_pubkey_src_selection(&selections[1..])?;
|
||||
Some(CleanIntermediaryRoute::ToTimeline(
|
||||
TimelineKind::notifications(pubkey_src),
|
||||
))
|
||||
}
|
||||
|
||||
Selection::Keyword(Keyword::Profile) => {
|
||||
// we only expect PubkeySource in this position
|
||||
let pubkey_src = parse_pubkey_src_selection(&selections[1..])?;
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile(
|
||||
pubkey_src,
|
||||
)))
|
||||
}
|
||||
|
||||
Selection::Keyword(Keyword::Universe) => {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Universe))
|
||||
}
|
||||
|
||||
Selection::Keyword(Keyword::Hashtag) => {
|
||||
if let Selection::Payload(hashtag) = selections.get(1)? {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Hashtag(
|
||||
@@ -681,9 +845,11 @@ fn selections_to_route(selections: Vec<Selection>) -> Option<CleanIntermediaryRo
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
Selection::Keyword(Keyword::Generic) => {
|
||||
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Generic))
|
||||
}
|
||||
|
||||
Selection::Keyword(Keyword::Thread) => {
|
||||
if let Selection::Payload(hex) = selections.get(1)? {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::thread(
|
||||
@@ -693,6 +859,7 @@ fn selections_to_route(selections: Vec<Selection>) -> Option<CleanIntermediaryRo
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
Selection::Keyword(Keyword::Reply) => {
|
||||
if let Selection::Payload(hex) = selections.get(1)? {
|
||||
Some(CleanIntermediaryRoute::ToRoute(Route::reply(
|
||||
@@ -770,9 +937,7 @@ fn selections_to_route(selections: Vec<Selection>) -> Option<CleanIntermediaryRo
|
||||
_ => None,
|
||||
},
|
||||
Selection::Payload(_)
|
||||
| Selection::Keyword(Keyword::Explicit)
|
||||
| Selection::Keyword(Keyword::New)
|
||||
| Selection::Keyword(Keyword::DeckAuthor)
|
||||
| Selection::Keyword(Keyword::Show)
|
||||
| Selection::Keyword(Keyword::NotificationSelection)
|
||||
| Selection::Keyword(Keyword::ExternalNotifSelection)
|
||||
@@ -788,6 +953,9 @@ impl fmt::Display for Selection {
|
||||
match self {
|
||||
Selection::Keyword(keyword) => write!(f, "{}", keyword),
|
||||
Selection::Payload(payload) => write!(f, "{}", payload),
|
||||
Selection::Algo(algo_kw) => write!(f, "{}", algo_kw),
|
||||
Selection::List(list_kw) => write!(f, "{}", list_kw),
|
||||
Selection::PubkeySource(pk_src_kw) => write!(f, "{}", pk_src_kw),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user