diff --git a/src/syn/lexer.rs b/src/syn/lexer.rs index 1093f0b..6d6cc56 100644 --- a/src/syn/lexer.rs +++ b/src/syn/lexer.rs @@ -8,9 +8,9 @@ lazy_static! { r#"^( (?P[-+]?[0-9]+\.[0-9]+([eE][+\-][0-9]+)?) | (?P[-+]?[0-9]+) - | (?P:[a-zA-Z_?\-*+/=.'@$%^&|][0-9a-zA-Z_?\-*+/=.'@$%^&|]*) + | (?P:[a-zA-Z_?\-*+/=.'@$%^&|~][0-9a-zA-Z_?\-*+/=.'@$%^&|~]*) | (?P%[a-zA-Z0-9\-_]+) - | (?P[a-zA-Z_?\-*+/=.'@$%^&|][0-9a-zA-Z_?\-*+/=.'@$%^&|]*) + | (?P[a-zA-Z_?\-*+/=.'@$%^&|~][0-9a-zA-Z_?\-*+/=.'@$%^&|~]*) | (?P\[) | (?P\]) | (?P!) @@ -168,7 +168,7 @@ mod test { #[test] fn test_word() { - let mut lexer = Lexer::new("test", r"a b c d foo bar baz = == === =a ==a ===a"); + let mut lexer = Lexer::new("test", r"a b c d foo bar baz = == === =a ==a ===a ~= ~=="); assert_token!(lexer, Token::Word, "a"); assert_token!(lexer, Token::Word, "b"); assert_token!(lexer, Token::Word, "c"); @@ -184,18 +184,21 @@ mod test { assert_token!(lexer, Token::Word, "=a"); assert_token!(lexer, Token::Word, "==a"); assert_token!(lexer, Token::Word, "===a"); + assert_token!(lexer, Token::Word, "~="); + assert_token!(lexer, Token::Word, "~=="); assert!(lexer.is_eof()); } #[test] fn test_assign() { - let mut lexer = Lexer::new("test", r":a := :foo :foo-bar :foo-bar-baz"); + let mut lexer = Lexer::new("test", r":a := :foo :foo-bar :foo-bar-baz :foo~bar~baz"); assert_token!(lexer, Token::Assign, ":a"); assert_token!(lexer, Token::Assign, ":="); assert_token!(lexer, Token::Assign, ":foo"); assert_token!(lexer, Token::Assign, ":foo-bar"); assert_token!(lexer, Token::Assign, ":foo-bar-baz"); + assert_token!(lexer, Token::Assign, ":foo~bar~baz"); assert!(lexer.is_eof()); }