notedeck

One damus client to rule them all
git clone git://jb55.com/notedeck
Log | Files | Refs | README | LICENSE

lib.rs (6158B)


      1 #[derive(Debug, Clone)]
      2 pub struct UnexpectedToken<'fnd, 'exp> {
      3     pub expected: &'exp str,
      4     pub found: &'fnd str,
      5 }
      6 
      7 #[derive(Debug, Clone)]
      8 pub enum ParseError<'a> {
      9     /// Not done parsing yet
     10     Incomplete,
     11 
     12     /// All parsing options failed
     13     AltAllFailed,
     14 
     15     /// There was some issue decoding the data
     16     DecodeFailed,
     17 
     18     HexDecodeFailed,
     19 
     20     /// We encountered an unexpected token
     21     UnexpectedToken(UnexpectedToken<'a, 'static>),
     22 
     23     /// No more tokens
     24     EOF,
     25 }
     26 
     27 pub struct TokenWriter {
     28     delim: &'static str,
     29     tokens_written: usize,
     30     buf: Vec<u8>,
     31 }
     32 
     33 impl Default for TokenWriter {
     34     fn default() -> Self {
     35         Self::new(":")
     36     }
     37 }
     38 
     39 impl TokenWriter {
     40     pub fn new(delim: &'static str) -> Self {
     41         let buf = vec![];
     42         let tokens_written = 0;
     43         Self {
     44             buf,
     45             tokens_written,
     46             delim,
     47         }
     48     }
     49 
     50     pub fn write_token(&mut self, token: &str) {
     51         if self.tokens_written > 0 {
     52             self.buf.extend_from_slice(self.delim.as_bytes())
     53         }
     54         self.buf.extend_from_slice(token.as_bytes());
     55         self.tokens_written += 1;
     56     }
     57 
     58     pub fn str(&self) -> &str {
     59         // SAFETY: only &strs are ever serialized, so its guaranteed to be
     60         // correct here
     61         unsafe { std::str::from_utf8_unchecked(self.buffer()) }
     62     }
     63 
     64     pub fn buffer(&self) -> &[u8] {
     65         &self.buf
     66     }
     67 }
     68 
     69 #[derive(Clone)]
     70 pub struct TokenParser<'a> {
     71     tokens: &'a [&'a str],
     72     index: usize,
     73 }
     74 
     75 impl<'a> TokenParser<'a> {
     76     /// alt tries each parser in `routes` until one succeeds.
     77     /// If all fail, returns `ParseError::AltAllFailed`.
     78     #[allow(clippy::type_complexity)]
     79     pub fn alt<R>(
     80         parser: &mut TokenParser<'a>,
     81         routes: &[fn(&mut TokenParser<'a>) -> Result<R, ParseError<'a>>],
     82     ) -> Result<R, ParseError<'a>> {
     83         let start = parser.index;
     84         for route in routes {
     85             match route(parser) {
     86                 Ok(r) => return Ok(r), // if success, stop trying more routes
     87                 Err(_) => {
     88                     // revert index & try next route
     89                     parser.index = start;
     90                 }
     91             }
     92         }
     93         // if we tried them all and none succeeded
     94         Err(ParseError::AltAllFailed)
     95     }
     96 
     97     pub fn new(tokens: &'a [&'a str]) -> Self {
     98         let index = 0;
     99         Self { tokens, index }
    100     }
    101 
    102     pub fn peek_parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> {
    103         let found = self.peek_token()?;
    104         if found == expected {
    105             Ok(found)
    106         } else {
    107             Err(ParseError::UnexpectedToken(UnexpectedToken {
    108                 expected,
    109                 found,
    110             }))
    111         }
    112     }
    113 
    114     /// Parse a list of alternative tokens, returning success if any match.
    115     pub fn parse_any_token(
    116         &mut self,
    117         expected: &[&'static str],
    118     ) -> Result<&'a str, ParseError<'a>> {
    119         for token in expected {
    120             let result = self.try_parse(|p| p.parse_token(token));
    121             if result.is_ok() {
    122                 return result;
    123             }
    124         }
    125 
    126         Err(ParseError::AltAllFailed)
    127     }
    128 
    129     pub fn parse_token(&mut self, expected: &'static str) -> Result<&'a str, ParseError<'a>> {
    130         let found = self.pull_token()?;
    131         if found == expected {
    132             Ok(found)
    133         } else {
    134             Err(ParseError::UnexpectedToken(UnexpectedToken {
    135                 expected,
    136                 found,
    137             }))
    138         }
    139     }
    140 
    141     /// Ensure that we have parsed all tokens. If not the parser backtracks
    142     /// and the parse does not succeed, returning [`ParseError::Incomplete`].
    143     pub fn parse_all<R>(
    144         &mut self,
    145         parse_fn: impl FnOnce(&mut Self) -> Result<R, ParseError<'a>>,
    146     ) -> Result<R, ParseError<'a>> {
    147         let start = self.index;
    148         let result = parse_fn(self);
    149 
    150         // If the parser closure fails, revert the index
    151         if result.is_err() {
    152             self.index = start;
    153             result
    154         } else if !self.is_eof() {
    155             Err(ParseError::Incomplete)
    156         } else {
    157             result
    158         }
    159     }
    160 
    161     /// Attempt to parse something, backtrack if we fail.
    162     pub fn try_parse<R>(
    163         &mut self,
    164         parse_fn: impl FnOnce(&mut Self) -> Result<R, ParseError<'a>>,
    165     ) -> Result<R, ParseError<'a>> {
    166         let start = self.index;
    167         let result = parse_fn(self);
    168 
    169         // If the parser closure fails, revert the index
    170         if result.is_err() {
    171             self.index = start;
    172             result
    173         } else {
    174             result
    175         }
    176     }
    177 
    178     pub fn pull_token(&mut self) -> Result<&'a str, ParseError<'a>> {
    179         let token = self
    180             .tokens
    181             .get(self.index)
    182             .copied()
    183             .ok_or(ParseError::EOF)?;
    184         self.index += 1;
    185         Ok(token)
    186     }
    187 
    188     pub fn unpop_token(&mut self) {
    189         if (self.index as isize) - 1 < 0 {
    190             return;
    191         }
    192 
    193         self.index -= 1;
    194     }
    195 
    196     pub fn peek_token(&self) -> Result<&'a str, ParseError<'a>> {
    197         self.tokens()
    198             .first()
    199             .ok_or(ParseError::DecodeFailed)
    200             .copied()
    201     }
    202 
    203     #[inline]
    204     pub fn tokens(&self) -> &'a [&'a str] {
    205         let min_index = self.index.min(self.tokens.len());
    206         &self.tokens[min_index..]
    207     }
    208 
    209     #[inline]
    210     pub fn is_eof(&self) -> bool {
    211         self.tokens().is_empty()
    212     }
    213 }
    214 
    215 pub trait TokenSerializable: Sized {
    216     /// Return a list of serialization plans for a type. We do this for
    217     /// type safety and assume constructing these types are lightweight
    218     fn parse_from_tokens<'a>(parser: &mut TokenParser<'a>) -> Result<Self, ParseError<'a>>;
    219     fn serialize_tokens(&self, writer: &mut TokenWriter);
    220 }
    221 
    222 /// Parse a 32 byte hex string
    223 pub fn parse_hex_id<'a>(parser: &mut TokenParser<'a>) -> Result<[u8; 32], ParseError<'a>> {
    224     use hex;
    225 
    226     let hexid = parser.pull_token()?;
    227     hex::decode(hexid)
    228         .map_err(|_| ParseError::HexDecodeFailed)?
    229         .as_slice()
    230         .try_into()
    231         .map_err(|_| ParseError::HexDecodeFailed)
    232 }