Remove colon token, and update assign atom to use colon instead of equals

Signed-off-by: Alek Ratzloff <alekratz@gmail.com>
This commit is contained in:
2022-01-16 13:08:23 -08:00
parent c493ab69bb
commit cc1d55d826
2 changed files with 15 additions and 37 deletions

View File

@@ -7,12 +7,11 @@ lazy_static! {
r#"^( r#"^(
(?P<float>[-+]?[0-9]+\.[0-9]+([eE][+\-][0-9]+)?) (?P<float>[-+]?[0-9]+\.[0-9]+([eE][+\-][0-9]+)?)
| (?P<int>[-+]?[0-9]+) | (?P<int>[-+]?[0-9]+)
| (?P<assign>=[a-zA-Z_?\-*+/.'@$%^&|][0-9a-zA-Z_?\-*+/=.'@$%^&|]*) | (?P<assign>:[a-zA-Z_?\-*+/=.'@$%^&|][0-9a-zA-Z_?\-*+/=.'@$%^&|]*)
| (?P<meta>%[a-zA-Z0-9\-_]+) | (?P<meta>%[a-zA-Z0-9\-_]+)
| (?P<word>[a-zA-Z_?\-*+/=.'@$%^&|][0-9a-zA-Z_?\-*+/=.'@$%^&|]*) | (?P<word>[a-zA-Z_?\-*+/=.'@$%^&|][0-9a-zA-Z_?\-*+/=.'@$%^&|]*)
| (?P<lquote>\[) | (?P<lquote>\[)
| (?P<rquote>\]) | (?P<rquote>\])
| (?P<colon>:)
| (?P<apply>!) | (?P<apply>!)
| (?P<str>"([^"\\]|\\["'\\ntrb])*") | (?P<str>"([^"\\]|\\["'\\ntrb])*")
)"# )"#
@@ -104,8 +103,6 @@ impl<'t> Lexer<'t> {
self.make_token(Token::LQuote) self.make_token(Token::LQuote)
} else if let Some(_) = cap.name("rquote") { } else if let Some(_) = cap.name("rquote") {
self.make_token(Token::RQuote) self.make_token(Token::RQuote)
} else if let Some(_) = cap.name("colon") {
self.make_token(Token::Colon)
} else if let Some(_) = cap.name("apply") { } else if let Some(_) = cap.name("apply") {
self.make_token(Token::Apply) self.make_token(Token::Apply)
} else { } else {
@@ -168,7 +165,7 @@ mod test {
#[test] #[test]
fn test_word() { fn test_word() {
let mut lexer = Lexer::new(r"a b c d foo bar baz"); let mut lexer = Lexer::new(r"a b c d foo bar baz = == === =a ==a ===a");
assert_token!(lexer, Token::Word, "a"); assert_token!(lexer, Token::Word, "a");
assert_token!(lexer, Token::Word, "b"); assert_token!(lexer, Token::Word, "b");
assert_token!(lexer, Token::Word, "c"); assert_token!(lexer, Token::Word, "c");
@@ -178,21 +175,24 @@ mod test {
assert_token!(lexer, Token::Word, "bar"); assert_token!(lexer, Token::Word, "bar");
assert_token!(lexer, Token::Word, "baz"); assert_token!(lexer, Token::Word, "baz");
assert_token!(lexer, Token::Word, "=");
assert_token!(lexer, Token::Word, "==");
assert_token!(lexer, Token::Word, "===");
assert_token!(lexer, Token::Word, "=a");
assert_token!(lexer, Token::Word, "==a");
assert_token!(lexer, Token::Word, "===a");
assert!(lexer.is_eof()); assert!(lexer.is_eof());
} }
#[test] #[test]
fn test_assign() { fn test_assign() {
let mut lexer = Lexer::new(r"= == === =a ==a ===a = a"); let mut lexer = Lexer::new(r":a := :foo :foo-bar :foo-bar-baz");
assert_token!(lexer, Token::Word, "="); assert_token!(lexer, Token::Assign, ":a");
assert_token!(lexer, Token::Word, "=="); assert_token!(lexer, Token::Assign, ":=");
assert_token!(lexer, Token::Word, "==="); assert_token!(lexer, Token::Assign, ":foo");
assert_token!(lexer, Token::Assign, "=a"); assert_token!(lexer, Token::Assign, ":foo-bar");
assert_token!(lexer, Token::Word, "==a"); assert_token!(lexer, Token::Assign, ":foo-bar-baz");
assert_token!(lexer, Token::Word, "===a");
assert_token!(lexer, Token::Word, "=");
assert_token!(lexer, Token::Word, "a");
assert!(lexer.is_eof()); assert!(lexer.is_eof());
} }
@@ -240,22 +240,4 @@ mod test {
assert_token!(lexer, Token::LQuote); assert_token!(lexer, Token::LQuote);
assert!(lexer.is_eof()); assert!(lexer.is_eof());
} }
#[test]
fn test_colon() {
let mut lexer = Lexer::new(": :: ::: :::: some-name:");
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Colon);
assert_token!(lexer, Token::Word);
assert_token!(lexer, Token::Colon);
assert!(lexer.is_eof());
}
} }

View File

@@ -24,9 +24,6 @@ pub enum Token {
/// Quote end. /// Quote end.
RQuote, RQuote,
/// Colon.
Colon,
/// Apply. /// Apply.
Apply, Apply,
@@ -45,7 +42,6 @@ impl Token {
Str => "str", Str => "str",
LQuote => "quote begin", LQuote => "quote begin",
RQuote => "quote end", RQuote => "quote end",
Colon => "colon",
Apply => "apply", Apply => "apply",
Meta => "meta", Meta => "meta",
} }