notedeck

One damus client to rule them all
git clone git://jb55.com/notedeck
Log | Files | Refs | README | LICENSE

lib.rs (7148B)


      1 #[derive(Debug, Clone)]
      2 pub struct UnexpectedToken<'fnd, 'exp> {
      3     pub expected: &'exp str,
      4     pub found: &'fnd str,
      5 }
      6 
      7 #[derive(Debug, Clone)]
      8 pub struct UnexpectedTokenOwned {
      9     pub expected: String,
     10     pub found: String,
     11 }
     12 
     13 #[derive(Debug, Clone)]
     14 pub enum ParseError<'a> {
     15     /// Not done parsing yet
     16     Incomplete,
     17 
     18     /// All parsing options failed
     19     AltAllFailed,
     20 
     21     /// There was some issue decoding the data
     22     DecodeFailed,
     23 
     24     HexDecodeFailed,
     25 
     26     /// We encountered an unexpected token
     27     UnexpectedToken(UnexpectedToken<'a, 'static>),
     28 
     29     /// No more tokens
     30     EOF,
     31 }
     32 
     33 #[derive(Debug, Clone)]
     34 pub enum ParseErrorOwned {
     35     Incomplete,
     36     AltAllFailed,
     37     DecodeFailed,
     38     HexDecodeFailed,
     39     UnexpectedToken(UnexpectedTokenOwned),
     40     EOF,
     41 }
     42 
     43 impl From<ParseError<'_>> for ParseErrorOwned {
     44     fn from(value: ParseError) -> Self {
     45         match value {
     46             ParseError::Incomplete => Self::Incomplete,
     47             ParseError::AltAllFailed => Self::AltAllFailed,
     48             ParseError::DecodeFailed => Self::DecodeFailed,
     49             ParseError::HexDecodeFailed => Self::HexDecodeFailed,
     50             ParseError::UnexpectedToken(unexpected_token) => {
     51                 Self::UnexpectedToken(UnexpectedTokenOwned {
     52                     expected: unexpected_token.expected.to_owned(),
     53                     found: unexpected_token.found.to_owned(),
     54                 })
     55             }
     56             ParseError::EOF => Self::EOF,
     57         }
     58     }
     59 }
     60 
     61 pub struct TokenWriter {
     62     delim: &'static str,
     63     tokens_written: usize,
     64     buf: Vec<u8>,
     65 }
     66 
     67 impl Default for TokenWriter {
     68     fn default() -> Self {
     69         Self::new(":")
     70     }
     71 }
     72 
     73 impl TokenWriter {
     74     pub fn new(delim: &'static str) -> Self {
     75         let buf = vec![];
     76         let tokens_written = 0;
     77         Self {
     78             buf,
     79             tokens_written,
     80             delim,
     81         }
     82     }
     83 
     84     pub fn write_token(&mut self, token: &str) {
     85         if self.tokens_written > 0 {
     86             self.buf.extend_from_slice(self.delim.as_bytes())
     87         }
     88         self.buf.extend_from_slice(token.as_bytes());
     89         self.tokens_written += 1;
     90     }
     91 
     92     pub fn str(&self) -> &str {
     93         // SAFETY: only &strs are ever serialized, so its guaranteed to be
     94         // correct here
     95         unsafe { std::str::from_utf8_unchecked(self.buffer()) }
     96     }
     97 
     98     pub fn buffer(&self) -> &[u8] {
     99         &self.buf
    100     }
    101 }
    102 
    103 #[derive(Clone)]
    104 pub struct TokenParser<'a> {
    105     tokens: &'a [&'a str],
    106     index: usize,
    107 }
    108 
    109 impl<'a> TokenParser<'a> {
    110     /// alt tries each parser in `routes` until one succeeds.
    111     /// If all fail, returns `ParseError::AltAllFailed`.
    112     #[allow(clippy::type_complexity)]
    113     pub fn alt<R>(
    114         parser: &mut TokenParser<'a>,
    115         routes: &[fn(&mut TokenParser<'a>) -> Result<R, ParseError<'a>>],
    116     ) -> Result<R, ParseError<'a>> {
    117         let start = parser.index;
    118         for route in routes {
    119             match route(parser) {
    120                 Ok(r) => return Ok(r), // if success, stop trying more routes
    121                 Err(_) => {
    122                     // revert index & try next route
    123                     parser.index = start;
    124                 }
    125             }
    126         }
    127         // if we tried them all and none succeeded
    128         Err(ParseError::AltAllFailed)
    129     }
    130 
    131     pub fn new(tokens: &'a [&'a str]) -> Self {
    132         let index = 0;
    133         Self { tokens, index }
    134     }
    135 
    136     pub fn peek_parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> {
    137         let found = self.peek_token()?;
    138         if found == expected {
    139             Ok(found)
    140         } else {
    141             Err(ParseError::UnexpectedToken(UnexpectedToken {
    142                 expected,
    143                 found,
    144             }))
    145         }
    146     }
    147 
    148     /// Parse a list of alternative tokens, returning success if any match.
    149     pub fn parse_any_token(
    150         &mut self,
    151         expected: &[&'static str],
    152     ) -> Result<&'a str, ParseError<'a>> {
    153         for token in expected {
    154             let result = self.try_parse(|p| p.parse_token(token));
    155             if result.is_ok() {
    156                 return result;
    157             }
    158         }
    159 
    160         Err(ParseError::AltAllFailed)
    161     }
    162 
    163     pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> {
    164         let found = self.pull_token()?;
    165         if found == expected {
    166             Ok(found)
    167         } else {
    168             Err(ParseError::UnexpectedToken(UnexpectedToken {
    169                 expected,
    170                 found,
    171             }))
    172         }
    173     }
    174 
    175     /// Ensure that we have parsed all tokens. If not the parser backtracks
    176     /// and the parse does not succeed, returning [`ParseError::Incomplete`].
    177     pub fn parse_all<R>(
    178         &mut self,
    179         parse_fn: impl FnOnce(&mut Self) -> Result<R, ParseError<'a>>,
    180     ) -> Result<R, ParseError<'a>> {
    181         let start = self.index;
    182         let result = parse_fn(self);
    183 
    184         // If the parser closure fails, revert the index
    185         if result.is_err() {
    186             self.index = start;
    187             result
    188         } else if !self.is_eof() {
    189             Err(ParseError::Incomplete)
    190         } else {
    191             result
    192         }
    193     }
    194 
    195     /// Attempt to parse something, backtrack if we fail.
    196     pub fn try_parse<R>(
    197         &mut self,
    198         parse_fn: impl FnOnce(&mut Self) -> Result<R, ParseError<'a>>,
    199     ) -> Result<R, ParseError<'a>> {
    200         let start = self.index;
    201         let result = parse_fn(self);
    202 
    203         // If the parser closure fails, revert the index
    204         if result.is_err() {
    205             self.index = start;
    206             result
    207         } else {
    208             result
    209         }
    210     }
    211 
    212     pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> {
    213         let token = self
    214             .tokens
    215             .get(self.index)
    216             .copied()
    217             .ok_or(ParseError::EOF)?;
    218         self.index += 1;
    219         Ok(token)
    220     }
    221 
    222     pub fn unpop_token(&mut self) {
    223         if (self.index as isize) - 1 < 0 {
    224             return;
    225         }
    226 
    227         self.index -= 1;
    228     }
    229 
    230     pub fn peek_token(&self) -> Result<&'a str, ParseError<'a>> {
    231         self.tokens()
    232             .first()
    233             .ok_or(ParseError::DecodeFailed)
    234             .copied()
    235     }
    236 
    237     #[inline]
    238     pub fn tokens(&self) -> &'a [&'a str] {
    239         let min_index = self.index.min(self.tokens.len());
    240         &self.tokens[min_index..]
    241     }
    242 
    243     #[inline]
    244     pub fn is_eof(&self) -> bool {
    245         self.tokens().is_empty()
    246     }
    247 }
    248 
    249 pub trait TokenSerializable: Sized {
    250     /// Return a list of serialization plans for a type. We do this for
    251     /// type safety and assume constructing these types are lightweight
    252     fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>>;
    253     fn serialize_tokens(&self, writer: &mut TokenWriter);
    254 }
    255 
    256 /// Parse a 32 byte hex string
    257 pub fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> {
    258     use hex;
    259 
    260     let hexid = parser.pull_token()?;
    261     hex::decode(hexid)
    262         .map_err(|_| ParseError::HexDecodeFailed)?
    263         .as_slice()
    264         .try_into()
    265         .map_err(|_| ParseError::HexDecodeFailed)
    266 }