jayce is a tokenizer 🌌
```rust use jayce::{duos, Tokenizer}; use regex::Regex;
const SOURCE: &str = "Excalibur = 5000$; // Your own language!";
lazystatic::lazystatic! ( static ref DUOS: Vec<(&'static str, Regex)> = duos![ "whitespace", r"^[^\S\n]+", "commentline", r"^//(.*)", "commentblock", r"^/*(.|\n)*?*/", "newline", r"^\n",
"price", r"^[0-9]+\$",
"semicolon", r"^;",
"operator", r"^=",
"name", r"^[a-zA-Z_]+"
];
);
fn main() -> Result<(), Box
while let Some(token) = tokenizer.next()? {
println!("{:?}", token);
}
Ok(())
} ```
rust,ignore
Token { kind: "name", value: "Excalibur", pos: (1, 1) }
Token { kind: "whitespace", value: " ", pos: (1, 10) }
Token { kind: "operator", value: "=", pos: (1, 11) }
Token { kind: "whitespace", value: " ", pos: (1, 12) }
Token { kind: "price", value: "5000$", pos: (1, 13) }
Token { kind: "semicolon", value: ";", pos: (1, 18) }
Token { kind: "whitespace", value: " ", pos: (1, 19) }
Token { kind: "comment_line", value: "// Your own language!", pos: (1, 20) }
next
possible Result
Ok(Some(token))
Match is foundOk(None)
End of sourceErr(error)
An error occurstokenize_all
possible Result
Ok(Vec<Tokens>)
Tokens are foundErr(error)
An error occursinitialization in 1.83 nanoseconds
tokenization of 29 639 tokens in 4.6 milliseconds
7.0.2
is442%
faster than version4.0.1
from making everything precompiled
9.0.0
is30%
slower than version8.1.0
to support custom whitespaces & comments