diff --git a/crates/notedeck_columns/src/storage/mod.rs b/crates/notedeck_columns/src/storage/mod.rs index edd5df3d..d870d91d 100644 --- a/crates/notedeck_columns/src/storage/mod.rs +++ b/crates/notedeck_columns/src/storage/mod.rs @@ -5,4 +5,4 @@ mod token_parser; pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE}; pub use migration::{deserialize_columns, COLUMNS_FILE}; -pub use token_parser::{ParseError, TokenParser, TokenSerializable}; +pub use token_parser::{ParseError, TokenParser, TokenSerializable, TokenWriter}; diff --git a/crates/notedeck_columns/src/storage/token_parser.rs b/crates/notedeck_columns/src/storage/token_parser.rs index 45b43595..f7bd9641 100644 --- a/crates/notedeck_columns/src/storage/token_parser.rs +++ b/crates/notedeck_columns/src/storage/token_parser.rs @@ -25,6 +25,42 @@ pub enum ParseError<'a> { EOF, } +pub struct TokenWriter { + delim: &'static str, + tokens_written: usize, + buf: Vec, +} + +impl Default for TokenWriter { + fn default() -> Self { + Self::new(":") + } +} + +impl TokenWriter { + pub fn new(delim: &'static str) -> Self { + let buf = vec![]; + let tokens_written = 0; + Self { + buf, + tokens_written, + delim, + } + } + + pub fn write_token(&mut self, token: &str) { + if self.tokens_written > 0 { + self.buf.extend_from_slice(self.delim.as_bytes()) + } + self.buf.extend_from_slice(token.as_bytes()); + self.tokens_written += 1; + } + + pub fn buffer(&self) -> &[u8] { + &self.buf + } +} + #[derive(Clone)] pub struct TokenParser<'a> { tokens: &'a [&'a str], @@ -146,7 +182,7 @@ pub trait TokenSerializable: Sized { /// Return a list of serialization plans for a type. We do this for /// type safety and assume constructing these types are lightweight fn parse<'a>(parser: &mut TokenParser<'a>) -> Result>; - fn serialize(&self, write_token: fn(&str) -> Result<(), std::io::Error>) -> Result<(), std::io::Error>; + fn serialize(&self, writer: &mut TokenWriter); } #[cfg(test)] diff --git a/crates/notedeck_columns/src/ui/add_column.rs b/crates/notedeck_columns/src/ui/add_column.rs index 0ee4ffba..958d5aac 100644 --- a/crates/notedeck_columns/src/ui/add_column.rs +++ b/crates/notedeck_columns/src/ui/add_column.rs @@ -11,7 +11,7 @@ use nostrdb::{Ndb, Transaction}; use crate::{ login_manager::AcquireKeyState, route::Route, - storage::{ParseError, TokenParser, TokenSerializable}, + storage::{ParseError, TokenParser, TokenSerializable, TokenWriter}, timeline::{kind::ListKind, PubkeySource, Timeline, TimelineKind}, ui::anim::ICON_EXPANSION_MULTIPLE, Damus, @@ -111,14 +111,10 @@ impl AddColumnRoute { } impl TokenSerializable for AddColumnRoute { - fn serialize( - &self, - write_token: fn(&str) -> Result<(), std::io::Error>, - ) -> Result<(), std::io::Error> { + fn serialize(&self, writer: &mut TokenWriter) { for token in self.tokens() { - write_token(token)?; + writer.write_token(token); } - Ok(()) } fn parse<'a>(parser: &mut TokenParser<'a>) -> Result> {