1 use std::fmt; 2 3 // The type of the parser's input. 4 // 5 // The parser iterators over tuples consisting of the token's starting 6 // position, the token itself, and the token's ending position. 7 pub(crate) type LexerItem<Tok, Loc, Error> 8 = ::std::result::Result<(Loc, Tok, Loc), Error>; 9 10 /// The components of an OpenPGP Message. 11 /// 12 /// Note: This enum cannot be exhaustively matched to allow future 13 /// extensions. 14 #[derive(Debug, Clone, Copy, PartialEq)] 15 pub enum Token { 16 /// A Literal data packet. 17 Literal, 18 /// A Compressed Data packet. 19 CompressedData, 20 21 /// An SK-ESK packet. 22 SKESK, 23 /// An PK-ESK packet. 24 PKESK, 25 /// A SEIP packet. 26 SEIP, 27 /// An MDC packet. 28 MDC, 29 /// An AED packet. 30 AED, 31 32 /// A OnePassSig packet. 33 OPS, 34 /// A Signature packet. 35 SIG, 36 37 /// The end of a container (either a Compressed Data packet or a 38 /// SEIP packet). 39 Pop, 40 41 /// A container's unparsed content. 42 OpaqueContent, 43 44 /// This marks this enum as non-exhaustive. Do not use this 45 /// variant. 46 #[doc(hidden)] __Nonexhaustive, 47 } 48 49 impl fmt::Display for Token { fmt(&self, f: &mut fmt::Formatter) -> fmt::Result50 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 51 write!(f, "{:?}", self) 52 } 53 } 54 55 #[derive(Debug, Clone)] 56 pub enum LexicalError { 57 // There are no lexing errors. 58 } 59 60 impl fmt::Display for LexicalError { fmt(&self, f: &mut fmt::Formatter) -> fmt::Result61 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 62 write!(f, "{:?}", self) 63 } 64 } 65 66 pub(crate) struct Lexer<'input> { 67 iter: Box<dyn Iterator<Item=(usize, &'input Token)> + 'input>, 68 } 69 70 impl<'input> Iterator for Lexer<'input> { 71 type Item = LexerItem<Token, usize, LexicalError>; 72 next(&mut self) -> Option<Self::Item>73 fn next(&mut self) -> Option<Self::Item> { 74 let n = self.iter.next().map(|(pos, tok)| (pos, tok.clone())); 75 if let Some((pos, tok)) = n { 76 Some(Ok((pos, tok, pos))) 77 } else { 78 None 79 } 80 } 81 } 82 83 impl<'input> Lexer<'input> { 84 /// Uses a raw sequence of tokens as input to the parser. from_tokens(raw: &'input [Token]) -> Self85 pub(crate) fn from_tokens(raw: &'input [Token]) -> Self { 86 Lexer { 87 iter: Box::new(raw.iter().enumerate()) 88 } 89 } 90 } 91