Remove lazy_static dependency
I just learned about thread local storage, so we really don't lazy_static for anything anymore - it was only being used for the lexer regex. Signed-off-by: Alek Ratzloff <alekratz@gmail.com>
This commit is contained in:
@@ -1,10 +1,9 @@
|
||||
use crate::syn::{error::*, span::*, token::*};
|
||||
use lazy_static::lazy_static;
|
||||
use regex::{Regex, RegexBuilder};
|
||||
use std::rc::Rc;
|
||||
|
||||
lazy_static! {
|
||||
static ref LEX_PAT: Regex = RegexBuilder::new(
|
||||
thread_local! {
|
||||
static LEX_PAT: Regex = RegexBuilder::new(
|
||||
r#"^(
|
||||
(?P<float>[-+]?[0-9]+\.[0-9]+([eE][+\-][0-9]+)?)
|
||||
| (?P<int>[-+]?[0-9]+)
|
||||
@@ -88,39 +87,41 @@ impl<'t> Lexer<'t> {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if let Some(cap) = LEX_PAT.captures(&self.text[self.start.byte..]) {
|
||||
self.end = self.end.next_str(cap.get(0).unwrap().as_str());
|
||||
let sp_token = if let Some(_) = cap.name("assign") {
|
||||
self.make_token(Token::Assign)
|
||||
} else if let Some(_) = cap.name("meta") {
|
||||
self.make_token(Token::Meta)
|
||||
} else if let Some(_) = cap.name("word") {
|
||||
self.make_token(Token::Word)
|
||||
} else if let Some(_) = cap.name("float") {
|
||||
self.make_token(Token::Float)
|
||||
} else if let Some(_) = cap.name("int") {
|
||||
self.make_token(Token::Int)
|
||||
} else if let Some(_) = cap.name("str") {
|
||||
self.make_token(Token::Str)
|
||||
} else if let Some(_) = cap.name("lquote") {
|
||||
self.make_token(Token::LQuote)
|
||||
} else if let Some(_) = cap.name("rquote") {
|
||||
self.make_token(Token::RQuote)
|
||||
} else if let Some(_) = cap.name("apply") {
|
||||
self.make_token(Token::Apply)
|
||||
LEX_PAT.with(|lex| {
|
||||
if let Some(cap) = lex.captures(&self.text[self.start.byte..]) {
|
||||
self.end = self.end.next_str(cap.get(0).unwrap().as_str());
|
||||
let sp_token = if let Some(_) = cap.name("assign") {
|
||||
self.make_token(Token::Assign)
|
||||
} else if let Some(_) = cap.name("meta") {
|
||||
self.make_token(Token::Meta)
|
||||
} else if let Some(_) = cap.name("word") {
|
||||
self.make_token(Token::Word)
|
||||
} else if let Some(_) = cap.name("float") {
|
||||
self.make_token(Token::Float)
|
||||
} else if let Some(_) = cap.name("int") {
|
||||
self.make_token(Token::Int)
|
||||
} else if let Some(_) = cap.name("str") {
|
||||
self.make_token(Token::Str)
|
||||
} else if let Some(_) = cap.name("lquote") {
|
||||
self.make_token(Token::LQuote)
|
||||
} else if let Some(_) = cap.name("rquote") {
|
||||
self.make_token(Token::RQuote)
|
||||
} else if let Some(_) = cap.name("apply") {
|
||||
self.make_token(Token::Apply)
|
||||
} else {
|
||||
panic!(
|
||||
"matched lex pattern, but did not catch this capture: {:?}",
|
||||
cap
|
||||
)
|
||||
};
|
||||
Ok(Some(sp_token))
|
||||
} else {
|
||||
panic!(
|
||||
"matched lex pattern, but did not catch this capture: {:?}",
|
||||
cap
|
||||
)
|
||||
};
|
||||
Ok(Some(sp_token))
|
||||
} else {
|
||||
Err(SyntaxError::ExpectedGot {
|
||||
expected: "word, literal, or quote".into(),
|
||||
got: expected_got_char(self.curr().unwrap()),
|
||||
})
|
||||
}
|
||||
Err(SyntaxError::ExpectedGot {
|
||||
expected: "word, literal, or quote".into(),
|
||||
got: expected_got_char(self.curr().unwrap()),
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user