lexer.rs 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226
  1. use logos::{Lexer, Logos};
  2. pub use crate::core::{Span, FileRef};
  3. #[derive(Debug, Clone, Copy)]
  4. pub struct Located<T> {
  5. pub item: T,
  6. pub span: Span,
  7. pub file: FileRef,
  8. }
  9. impl<T> Located<T> {
  10. pub fn new(item: T, file: FileRef, span: Span) -> Located<T> {
  11. Located { span, file, item }
  12. }
  13. }
  14. impl<T: Clone> Located<T> {
  15. pub fn map<R>(&self, func: impl FnOnce(T) -> R) -> Located<R> {
  16. Located {
  17. item: func(self.item.clone()),
  18. span: self.span,
  19. file: self.file,
  20. }
  21. }
  22. }
  23. fn parse_num<'a>(lex: &mut Lexer<'a, Token<'a>>) -> Option<i64> {
  24. let slice = lex.slice();
  25. slice.parse().ok()
  26. }
  27. fn parse_str<'a>(lex: &mut Lexer<'a, Token<'a>>) -> Option<String> {
  28. let mut buf = String::new();
  29. let s = lex.slice();
  30. let mut src = s[1..s.len() - 1].chars();
  31. while let Some(c) = src.next() {
  32. if c == '\\' {
  33. match src.next() {
  34. Some('n') => buf.push('\n'),
  35. Some('t') => buf.push('\t'),
  36. Some('r') => buf.push('\r'),
  37. Some(c) => buf.push(c),
  38. None => return None,
  39. }
  40. } else {
  41. buf.push(c);
  42. }
  43. }
  44. Some(buf)
  45. }
  46. #[derive(Logos, Debug, PartialEq, Clone)]
  47. pub enum Token<'a> {
  48. #[token("<")]
  49. LAngle,
  50. #[token(">")]
  51. RAngle,
  52. #[token("(")]
  53. LPar,
  54. #[token(")")]
  55. RPar,
  56. #[token("{")]
  57. LCurl,
  58. #[token("}")]
  59. RCurl,
  60. #[token("[")]
  61. LBrac,
  62. #[token("]")]
  63. RBrac,
  64. #[token("|")]
  65. Pipe,
  66. #[token(":")]
  67. Colon,
  68. #[token(",")]
  69. Comma,
  70. #[token(";")]
  71. Semi,
  72. #[token(".")]
  73. Dot,
  74. #[token("_")]
  75. Underscore,
  76. #[token("..")]
  77. DotDot,
  78. #[token("=>")]
  79. Arrow,
  80. #[token(":=")]
  81. Assn,
  82. #[token("::=")]
  83. LitAssn,
  84. #[token("puts")]
  85. Puts,
  86. #[token("case")]
  87. Case,
  88. #[token("let")]
  89. Let,
  90. #[token("in")]
  91. In,
  92. #[token("fix")]
  93. Fix,
  94. #[regex(r"\p{Ll}(\pL|[0-9_/-])*")]
  95. Var(&'a str),
  96. #[regex(r"\p{Lu}(\pL|[0-9_/-])*")]
  97. Atom(&'a str),
  98. #[regex(r"[0-9]+", parse_num)]
  99. Num(i64),
  100. #[regex(r"'([^'\\]|\\.)*'", parse_str)]
  101. #[regex("\"([^\"\\\\]|\\\\.)*\"", parse_str)]
  102. Str(String),
  103. #[error]
  104. #[regex(r"[ \t\n\f]+", logos::skip)]
  105. #[regex(r"\(\*([^*]|\*[^)])*\*\)", logos::skip)]
  106. Error,
  107. }
  108. impl<'a> Token<'a> {
  109. pub fn token_name(&self) -> String {
  110. match self {
  111. Token::Var(v) => format!("variable `{}`", v),
  112. Token::Atom(a) => format!("atom `{}`", a),
  113. Token::Num(n) => format!("number `{}`", n),
  114. Token::Str(s) => format!("string `{}`", s),
  115. Token::LAngle => "`<`".to_string(),
  116. Token::RAngle => "`>`".to_string(),
  117. Token::LPar => "`(`".to_string(),
  118. Token::RPar => "`)`".to_string(),
  119. Token::LCurl => "`{`".to_string(),
  120. Token::RCurl => "`}`".to_string(),
  121. Token::LBrac => "`[`".to_string(),
  122. Token::RBrac => "`]`".to_string(),
  123. Token::Pipe => "`|`".to_string(),
  124. Token::Colon => "`:`".to_string(),
  125. Token::Comma => "`,`".to_string(),
  126. Token::Semi => "`;`".to_string(),
  127. Token::Dot => "`.`".to_string(),
  128. Token::Underscore => "`_`".to_string(),
  129. Token::DotDot => "`..`".to_string(),
  130. Token::Arrow => "`=>`".to_string(),
  131. Token::Assn => "`:=`".to_string(),
  132. Token::LitAssn => "`::=`".to_string(),
  133. Token::Puts => "`puts`".to_string(),
  134. Token::Case => "`case`".to_string(),
  135. Token::Let => "`let`".to_string(),
  136. Token::In => "`in`".to_string(),
  137. Token::Fix => "`fix`".to_string(),
  138. Token::Error => "error".to_string(),
  139. }
  140. }
  141. }
  142. #[derive(Debug)]
  143. pub struct LexerError {
  144. pub range: Span,
  145. }
  146. impl std::fmt::Display for LexerError {
  147. fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
  148. write!(f, "LexerError({}..{})", self.range.start, self.range.end)
  149. }
  150. }
  151. pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
  152. pub fn tokens(source: &str) -> impl Iterator<Item = Spanned<Token<'_>, usize, LexerError>> {
  153. Token::lexer(source)
  154. .spanned()
  155. .map(move |(token, range)| match token {
  156. Token::Error => Err(LexerError {
  157. range: Span {
  158. start: range.start as u32,
  159. end: range.end as u32,
  160. },
  161. }),
  162. token => Ok((range.start, token, range.end)),
  163. })
  164. }
  165. #[cfg(test)]
  166. mod test {
  167. use logos::Logos;
  168. use super::Token;
  169. #[test]
  170. fn simple_lexer_test() {
  171. let mut lex = Token::lexer("x := Foo (* ignore *) | \"bar\";");
  172. assert_eq!(lex.next(), Some(Token::Var("x")));
  173. assert_eq!(lex.next(), Some(Token::Assn));
  174. assert_eq!(lex.next(), Some(Token::Atom("Foo")));
  175. assert_eq!(lex.next(), Some(Token::Pipe));
  176. assert_eq!(lex.next(), Some(Token::Str("bar".to_owned())));
  177. assert_eq!(lex.next(), Some(Token::Semi));
  178. assert_eq!(lex.next(), None)
  179. }
  180. }