Tokenizer test

attempt/001
Elijah Voigt 1 year ago
parent 957ec64ee0
commit 4bd5773dde

@ -0,0 +1,49 @@
use nom::bytes::complete::tag;
use nom::bytes::complete::take_until1;
use nom::sequence::tuple;
fn main() {
todo!()
}
#[derive(Debug, PartialEq)]
enum Token {
Tag(String),
Str(String),
Num(f32),
}
fn tokenize(line: &str) -> Vec<Token> {
line.split(" ")
.map(|piece| {
if let Ok(n) = piece.parse::<f32>() {
Token::Num(n)
} else if let Ok((_, (_, s, _))) = tuple((
tag::<&str, &str, ()>("\""),
take_until1("\""),
tag::<&str, &str, ()>("\""),
))(piece)
{
Token::Str(s.into())
} else {
Token::Tag(piece.into())
}
})
.collect()
}
#[test]
fn test_tokenize() {
let line = "foo \"bar\" 1.23 baz \"asdf\" etc";
assert_eq!(
tokenize(line),
vec![
Token::Tag("foo".into()),
Token::Str("bar".into()),
Token::Num(1.23),
Token::Tag("baz".into()),
Token::Str("asdf".into()),
Token::Tag("etc".into())
]
);
}
Loading…
Cancel
Save