plugin based lexical reader
```rust extern crate lexer;
use std::fmt::{self, Write};
use lexer::{Input, Reader, ReaderResult, Readers, ReadersBuilder, State, TokenMeta};
pub enum TokenValue {
Number(isize),
String(String),
Keyword(String),
Identifier(String),
List(Vec
impl fmt::Display for TokenValue { fn fmt(&self, f: &mut fmt::Formatter<'>) -> fmt::Result { match self { &TokenValue::Number(ref n) => write!(f, "{}", n), &TokenValue::String(ref s) => write!(f, "{:?}", s), &TokenValue::Keyword(ref s) => write!(f, ":{}", s), &TokenValue::Identifier(ref s) => write!(f, "{}", s), &TokenValue::List(ref list) => { f.writechar('(')?;
let mut index = 0;
for token in list {
write!(f, "{}", token.value())?;
index += 1;
if index < list.len() {
f.write_str(", ")?;
}
}
f.write_char(')')
}
}
} }
pub type Token = lexer::Token
pub struct WhitespaceReader;
impl Reader
ReaderResult::Empty
} else {
ReaderResult::None
}
}
None => ReaderResult::None,
}
} }
pub struct NumberReader;
impl Reader
string.push(ch);
while let Some(ch) = input.peek(next, 0) {
if ch.is_numeric() || ch == '_' {
input.read(next);
string.push(ch);
} else {
break;
}
}
ReaderResult::Some(Token::new(
TokenMeta::new_state_meta(current, next),
TokenValue::Number(string.parse().unwrap()),
))
} else {
ReaderResult::None
}
}
None => ReaderResult::None,
}
} }
pub struct StringReader;
impl Reader
while let Some(ch) = input.read(next) {
if ch == '"' {
break;
} else {
string.push(ch);
}
}
ReaderResult::Some(Token::new(
TokenMeta::new_state_meta(current, next),
TokenValue::String(string),
))
} else {
ReaderResult::None
}
}
None => ReaderResult::None,
}
} }
pub struct KeywordReader;
impl Reader
while let Some(ch) = input.peek(next, 0) {
if is_closer(ch) || is_whitespace(ch) {
break;
} else {
input.read(next);
string.push(ch);
}
}
ReaderResult::Some(Token::new(
TokenMeta::new_state_meta(current, next),
TokenValue::Keyword(string),
))
} else {
ReaderResult::None
}
}
None => ReaderResult::None,
}
} }
pub struct ListReader;
impl Reader
while let Some(ch) = input.peek(next, 0) {
if ch == ')' {
input.read(next);
break;
} else {
match lexer::read(readers, input, next) {
Some(Ok(token)) => {
list.push(token);
}
Some(Err(error)) => {
return ReaderResult::Err(error);
}
_ => {
break;
}
}
}
}
ReaderResult::Some(Token::new(
TokenMeta::new_state_meta(current, next),
TokenValue::List(list),
))
} else {
ReaderResult::None
}
}
None => ReaderResult::None,
}
} }
pub struct IdentifierReader;
impl Reader
string.push(ch);
while let Some(ch) = input.peek(next, 0) {
if is_closer(ch) || is_whitespace(ch) {
break;
} else {
input.read(next);
string.push(ch);
}
}
ReaderResult::Some(Token::new(
TokenMeta::new_state_meta(current, next),
TokenValue::Identifier(string),
))
}
None => ReaderResult::None,
}
} }
fn iswhitespace(ch: char) -> bool { ch.iswhitespace() || ch == ',' }
fn is_closer(ch: char) -> bool { ch == ')' }
pub fn readers() -> lexer::Readers
fn main() { let readers = readers();
let string = "(def-fn hello () (println :Hello, \"World!\"))";
let tokens = readers.read(string.chars());
let tokens: Vec
assert_eq!(tokens.len(), 1);
if let Some(&TokenValue::List(ref tokens)) = tokens.get(0).map(Token::value) { let first = tokens.first().unwrap(); asserteq!(first.meta().colstart(), 1); asserteq!(first.meta().colend(), 7); asserteq!(first.meta().colcount(), 6); asserteq!(first.meta().linestart(), 1); asserteq!(first.meta().lineend(), 1); asserteq!(first.meta().linecount(), 0); assert_eq!(first.meta().len(), 6); } } ```