commit 4522920939cd43352cd414404be45eb7145d304d
parent 31b3316d9c4e8230251ef5e6c2b0162731b08e32
Author: kernelkind <kernelkind@gmail.com>
Date: Sun, 6 Apr 2025 17:01:28 -0400
introduce `TokenHandler`
used for saving anything `TokenSerializable` to disk
Signed-off-by: kernelkind <kernelkind@gmail.com>
Diffstat:
3 files changed, 90 insertions(+), 0 deletions(-)
diff --git a/crates/notedeck/src/persist/mod.rs b/crates/notedeck/src/persist/mod.rs
@@ -1,7 +1,9 @@
mod app_size;
mod theme_handler;
+mod token_handler;
mod zoom;
pub use app_size::AppSizeHandler;
pub use theme_handler::ThemeHandler;
+pub use token_handler::TokenHandler;
pub use zoom::ZoomHandler;
diff --git a/crates/notedeck/src/persist/token_handler.rs b/crates/notedeck/src/persist/token_handler.rs
@@ -0,0 +1,54 @@
+use tokenator::{ParseError, ParseErrorOwned, TokenParser, TokenSerializable, TokenWriter};
+
+use crate::{storage, DataPath, DataPathType, Directory};
+
+pub struct TokenHandler {
+ directory: Directory,
+ file_name: &'static str,
+}
+
+impl TokenHandler {
+ pub fn new(path: &DataPath, path_type: DataPathType, file_name: &'static str) -> Self {
+ let directory = Directory::new(path.path(path_type));
+
+ Self {
+ directory,
+ file_name,
+ }
+ }
+
+ pub fn save(
+ &self,
+ tokenator: &impl TokenSerializable,
+ delim: &'static str,
+ ) -> crate::Result<()> {
+ let mut writer = TokenWriter::new(delim);
+
+ tokenator.serialize_tokens(&mut writer);
+ let to_write = writer.str();
+
+ storage::write_file(
+ &self.directory.file_path,
+ self.file_name.to_owned(),
+ to_write,
+ )
+ }
+
+ pub fn load<T: TokenSerializable>(
+ &self,
+ delim: &'static str,
+ ) -> crate::Result<Result<T, ParseErrorOwned>> {
+ match self.directory.get_file(self.file_name.to_owned()) {
+ Ok(s) => {
+ let data = s.split(delim).collect::<Vec<&str>>();
+ let mut parser = TokenParser::new(&data);
+ Ok(TokenSerializable::parse_from_tokens(&mut parser).map_err(ParseError::into))
+ }
+ Err(e) => Err(e),
+ }
+ }
+
+ pub fn clear(&self) -> crate::Result<()> {
+ storage::write_file(&self.directory.file_path, self.file_name.to_owned(), "")
+ }
+}
diff --git a/crates/tokenator/src/lib.rs b/crates/tokenator/src/lib.rs
@@ -5,6 +5,12 @@ pub struct UnexpectedToken<'fnd, 'exp> {
}
#[derive(Debug, Clone)]
+pub struct UnexpectedTokenOwned {
+ pub expected: String,
+ pub found: String,
+}
+
+#[derive(Debug, Clone)]
pub enum ParseError<'a> {
/// Not done parsing yet
Incomplete,
@@ -24,6 +30,34 @@ pub enum ParseError<'a> {
EOF,
}
+#[derive(Debug, Clone)]
+pub enum ParseErrorOwned {
+ Incomplete,
+ AltAllFailed,
+ DecodeFailed,
+ HexDecodeFailed,
+ UnexpectedToken(UnexpectedTokenOwned),
+ EOF,
+}
+
+impl From<ParseError<'_>> for ParseErrorOwned {
+ fn from(value: ParseError) -> Self {
+ match value {
+ ParseError::Incomplete => Self::Incomplete,
+ ParseError::AltAllFailed => Self::AltAllFailed,
+ ParseError::DecodeFailed => Self::DecodeFailed,
+ ParseError::HexDecodeFailed => Self::HexDecodeFailed,
+ ParseError::UnexpectedToken(unexpected_token) => {
+ Self::UnexpectedToken(UnexpectedTokenOwned {
+ expected: unexpected_token.expected.to_owned(),
+ found: unexpected_token.found.to_owned(),
+ })
+ }
+ ParseError::EOF => Self::EOF,
+ }
+ }
+}
+
pub struct TokenWriter {
delim: &'static str,
tokens_written: usize,