lexer.rs 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224
  1. pub use crate::core::{FileRef, Loc, Span};
  2. use logos::{Lexer, Logos};
  3. #[derive(Debug, Clone, Copy)]
  4. pub struct Located<T> {
  5. pub item: T,
  6. pub loc: Loc,
  7. }
  8. impl<T> Located<T> {
  9. pub fn new(item: T, file: FileRef, span: Span) -> Located<T> {
  10. Located { loc: Loc { file, span} , item }
  11. }
  12. }
  13. impl<T: Clone> Located<T> {
  14. pub fn map<R>(&self, func: impl FnOnce(T) -> R) -> Located<R> {
  15. Located {
  16. item: func(self.item.clone()),
  17. loc: self.loc,
  18. }
  19. }
  20. }
  21. fn parse_num<'a>(lex: &mut Lexer<'a, Token<'a>>) -> Option<i64> {
  22. let slice = lex.slice();
  23. slice.parse().ok()
  24. }
  25. fn parse_str<'a>(lex: &mut Lexer<'a, Token<'a>>) -> Option<String> {
  26. let mut buf = String::new();
  27. let s = lex.slice();
  28. let mut src = s[1..s.len() - 1].chars();
  29. while let Some(c) = src.next() {
  30. if c == '\\' {
  31. match src.next() {
  32. Some('n') => buf.push('\n'),
  33. Some('t') => buf.push('\t'),
  34. Some('r') => buf.push('\r'),
  35. Some(c) => buf.push(c),
  36. None => return None,
  37. }
  38. } else {
  39. buf.push(c);
  40. }
  41. }
  42. Some(buf)
  43. }
  44. #[derive(Logos, Debug, PartialEq, Clone)]
  45. pub enum Token<'a> {
  46. #[token("<")]
  47. LAngle,
  48. #[token(">")]
  49. RAngle,
  50. #[token("(")]
  51. LPar,
  52. #[token(")")]
  53. RPar,
  54. #[token("{")]
  55. LCurl,
  56. #[token("}")]
  57. RCurl,
  58. #[token("[")]
  59. LBrac,
  60. #[token("]")]
  61. RBrac,
  62. #[token("|")]
  63. Pipe,
  64. #[token(":")]
  65. Colon,
  66. #[token(",")]
  67. Comma,
  68. #[token(";")]
  69. Semi,
  70. #[token(".")]
  71. Dot,
  72. #[token("_")]
  73. Underscore,
  74. #[token("..")]
  75. DotDot,
  76. #[token("=>")]
  77. Arrow,
  78. #[token(":=")]
  79. Assn,
  80. #[token("::=")]
  81. LitAssn,
  82. #[token("puts")]
  83. Puts,
  84. #[token("case")]
  85. Case,
  86. #[token("let")]
  87. Let,
  88. #[token("in")]
  89. In,
  90. #[token("fix")]
  91. Fix,
  92. #[regex(r"\p{Ll}(\pL|[0-9_/-])*")]
  93. Var(&'a str),
  94. #[regex(r"\p{Lu}(\pL|[0-9_/-])*")]
  95. Atom(&'a str),
  96. #[regex(r"[0-9]+", parse_num)]
  97. Num(i64),
  98. #[regex(r"'([^'\\]|\\.)*'", parse_str)]
  99. #[regex("\"([^\"\\\\]|\\\\.)*\"", parse_str)]
  100. Str(String),
  101. #[error]
  102. #[regex(r"[ \t\n\f]+", logos::skip)]
  103. #[regex(r"\(\*([^*]|\*[^)])*\*\)", logos::skip)]
  104. Error,
  105. }
  106. impl<'a> Token<'a> {
  107. pub fn token_name(&self) -> String {
  108. match self {
  109. Token::Var(v) => format!("variable `{}`", v),
  110. Token::Atom(a) => format!("atom `{}`", a),
  111. Token::Num(n) => format!("number `{}`", n),
  112. Token::Str(s) => format!("string `{}`", s),
  113. Token::LAngle => "`<`".to_string(),
  114. Token::RAngle => "`>`".to_string(),
  115. Token::LPar => "`(`".to_string(),
  116. Token::RPar => "`)`".to_string(),
  117. Token::LCurl => "`{`".to_string(),
  118. Token::RCurl => "`}`".to_string(),
  119. Token::LBrac => "`[`".to_string(),
  120. Token::RBrac => "`]`".to_string(),
  121. Token::Pipe => "`|`".to_string(),
  122. Token::Colon => "`:`".to_string(),
  123. Token::Comma => "`,`".to_string(),
  124. Token::Semi => "`;`".to_string(),
  125. Token::Dot => "`.`".to_string(),
  126. Token::Underscore => "`_`".to_string(),
  127. Token::DotDot => "`..`".to_string(),
  128. Token::Arrow => "`=>`".to_string(),
  129. Token::Assn => "`:=`".to_string(),
  130. Token::LitAssn => "`::=`".to_string(),
  131. Token::Puts => "`puts`".to_string(),
  132. Token::Case => "`case`".to_string(),
  133. Token::Let => "`let`".to_string(),
  134. Token::In => "`in`".to_string(),
  135. Token::Fix => "`fix`".to_string(),
  136. Token::Error => "error".to_string(),
  137. }
  138. }
  139. }
  140. #[derive(Debug)]
  141. pub struct LexerError {
  142. pub range: Span,
  143. }
  144. impl std::fmt::Display for LexerError {
  145. fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
  146. write!(f, "LexerError({}..{})", self.range.start, self.range.end)
  147. }
  148. }
  149. pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
  150. pub fn tokens(source: &str) -> impl Iterator<Item = Spanned<Token<'_>, usize, LexerError>> {
  151. Token::lexer(source)
  152. .spanned()
  153. .map(move |(token, range)| match token {
  154. Token::Error => Err(LexerError {
  155. range: Span {
  156. start: range.start as u32,
  157. end: range.end as u32,
  158. },
  159. }),
  160. token => Ok((range.start, token, range.end)),
  161. })
  162. }
  163. #[cfg(test)]
  164. mod test {
  165. use logos::Logos;
  166. use super::Token;
  167. #[test]
  168. fn simple_lexer_test() {
  169. let mut lex = Token::lexer("x := Foo (* ignore *) | \"bar\";");
  170. assert_eq!(lex.next(), Some(Token::Var("x")));
  171. assert_eq!(lex.next(), Some(Token::Assn));
  172. assert_eq!(lex.next(), Some(Token::Atom("Foo")));
  173. assert_eq!(lex.next(), Some(Token::Pipe));
  174. assert_eq!(lex.next(), Some(Token::Str("bar".to_owned())));
  175. assert_eq!(lex.next(), Some(Token::Semi));
  176. assert_eq!(lex.next(), None)
  177. }
  178. }