This library provides a very simple and lightweight parser (recursive descendant ll(1)) to combine and express a grammar.
The library uses Logos as a lexical analyzer and tokenizer.
This library major incentives were:
cargo
logos = "*"
```rust use logos::Logos;
enum Token { // Tokens can be literal strings, of any length. #[token("fast")] Fast,
#[token(".")]
Period,
// Or regular expressions.
#[regex("[a-zA-Z]+")]
Text,
// Logos requires one token variant to handle errors,
// it can be named anything you wish.
#[error]
// We can also use this variant to define whitespace,
// or any other matches we wish to skip.
#[regex(r"[ \t\n\f]+", logos::skip)]
Error,
} ```
The library provides ParseIt<'a,T>
instance that encompasses a set of tokens and auxiliary methods
```rust
struct Parser<'a> { inner: ParseIt<'a, Token<'a>>, }
```
ParseIt
instance and auxiliary methods from the Step
The helpers:
then
, then_zip
and others from Step
one_or_more
, zero_or_more
from ParseIt
Result<Structure, ParserError<'a>>
rust
fn text(&self, pos: usize) -> Result<Vec<Sentence<'a>>, ParseError<'a>> {
self.inner.zero_or_more(pos, |p| self.sentence(p)).into()
}
```rust use crate::parser::ParseIt; use crate::token; use crate::step::Step; use crate::parser::EmptyToken; use crate::error::ParseError; use logos::Logos;
pub enum Token<'a> { #[regex(r"[a-zA-Z-]+")] Word(&'a str),
#[token(",")]
Comma,
#[token(".")]
Dot,
#[token("!")]
Bang,
#[token("?")]
Question,
#[regex(r"[ \t\r\n\u000C\f]+", logos::skip)]
Whitespace,
#[error]
Error,
}
enum Item<'a> { Word(&'a str), Comma, }
enum Sentence<'a> {
Sentence(Vec
struct Parser<'a> { inner: ParseIt<'a, Token<'a>>, }
impl<'a> Parser<'a> {
fn new(text: &'a str) -> Parser<'a> {
let delegate: ParseIt
fn sentence(&self, pos: usize) -> Step<'a, Sentence<'a>> {
let items = |p| self.inner.one_or_more(p, |p| self.word(p));
let sentence = |p| items(p)
.then_zip(|p| token!(self.inner.token(p) => Token::Dot))
.take_left()
.map(Sentence::Sentence);
let exclamation = |p| items(p)
.then_zip(|p| token!(self.inner.token(p) => Token::Bang))
.take_left()
.map(Sentence::Exclamation);
let question = |p| items(p)
.then_zip(|p| token!(self.inner.token(p) => Token::Question))
.take_left()
.map(Sentence::Question);
sentence(pos)
.or_from(pos)
.or(exclamation)
.or(question)
.into()
}
fn word(&self, pos: usize) -> Step<'a, Item<'a>> {
token!(self.inner.token(pos) =>
Token::Word(v) => Item::Word(v),
Token::Comma => Item::Comma
)
}
fn text(&self, pos: usize) -> Result<Vec<Sentence<'a>>, ParseError<'a>> {
self.inner.zero_or_more(pos, |p| self.sentence(p)).into()
}
}
fn test() { let parser = Parser::new(r#" I have a strange addiction, It often sets off sparks! I really cannot seem to stop, Using exclamation marks! Anyone heard of the interrobang? The poem is for kids. "#);
let result = parser.text(0).unwrap();
println!("{:?}", result);
} ```
token
- gives a possibility to pull out a curren tokenone_or_more
- gives a one or more semanticzero_or_more
- gives a zero or more semanticvalidate_eof
- ensure the parser reaches end of the inputtoken!
- parses the current token. In general, it is used the following token!(p.token(pos) => T::Bang => "!")
wrap!
- implements a simple pattern in grammar like left value right
, for instance [1,2,3]
or (a,b)
wrap!(0 => left; value or default; right)
wrap!(0 => left; value ?; right)
seq!
- implements a simple pattern of sequence like el sep el ...
, for instance 1,2,3
,
at the end signaling the separator can be at the ned of the seq like 1,2,3 (,)?
or
- gives an alternative in a horizon of one tokenor_from
- gives a backtracking optionthen
- gives a basic combination with a next rule omitting the current onethen_zip
- combines a current result and a next one into a pairthen_skip
- parses the next one but drops the result keeping only current onethen_or_none
-combines a next one in an option with a current one or return a none otherwisetake_left
- drops a right value from a pairtake_right
- drops a left value from a pairmerge
- merge a value into a listto_map
- transforms a list of pairs into a mapor_val
- replaces a value with a default value if it is not presentedor_none
- replaces a value with a none if it is not presentedok
- transforms a value into an optionerror
- transforms an error into an optionmap
- transforms a valuecombine
- combines a value with another value from a given stepvalidate
- validates a given value and transforms into an error if a validation failedprint
- print a stepprint_with
- print a step with a given prefixprint_as
- print a step with a transformation of valueprint_with_as
- print a step with a transformation of value with a given prefixparit.env
- Prints a position and env from the source text(with a radius 2 and '>>' '<<' separating the offending
token )To test a lexer there are methods from crate::parsit::test::lexer_test::*
for service
```rust use logos::Logos; use crate::parsit::test::lexer_test::*;
pub enum T<'a> { #[regex(r"[a-zA-Z-]+")] Word(&'a str),
#[token(",")]
Comma,
#[token(".")]
Dot,
#[token("!")]
Bang,
#[token("?")]
Question,
#[regex(r"[ \t\r\n]+", logos::skip)]
Whitespace,
#[error]
Error,
}
fn test() {
expect::
To test a parser there are methods from crate::parsit::test::parser_test::*
for service
```rust use logos::Logos; use crate::parsit::test::parsertest::fail; use crate::parsit::test::parsertest::parsit; use crate::parsit::token; use crate::parsit::parser::ParseIt; use crate::parsit::step::Step;
pub enum T<'a> { #[regex(r"[a-zA-Z-]+")] Word(&'a str),
#[token(",")]
Comma,
#[token(".")]
Dot,
#[token("!")]
Bang,
#[token("?")]
Question,
#[regex(r"[ \t\r\n]+", logos::skip)]
Whitespace,
#[error]
Error,
}
fn testexpect() { let p = parsit("abc!"); let bang = |pos: usize| token!(p.token(pos) => T::Bang => "!"); let word = |pos: usize| token!(p.token(pos) => T::Word(v) => *v); let step = word(0) .thenorvalzip(bang, "") .map(|(a, b)| format!("{}{}", a, b));
expect(step, "abc!".to_string());
}
fn testpos() { let p = parsit("abc!"); let bang = |pos: usize| token!(p.token(pos) => T::Bang => "!"); let word = |pos: usize| token!(p.token(pos) => T::Word(v) => v); let step = word(0).thenorvalzip(bang, "");
expect_pos(step, 2); // the next position to parse
}
fn testfail() { let p = parsit("abc?!"); let bang = |pos: usize| token!(p.token(pos) => T::Bang => "!"); let word = |pos: usize| token!(p.token(pos) => T::Word(v) => v); let step = word(0).thenzip(bang);
fail(step);
}
fn testfailon() { let p = parsit("abc?!"); let bang = |pos: usize| token!(p.token(pos) => T::Bang => "!"); let word = |pos: usize| token!(p.token(pos) => T::Word(v) => v); let step = word(0).then_zip(bang);
fail_on(step, 1);
}
```