Remove lazy_static dependency
I just learned about thread local storage, so we really don't lazy_static for anything anymore - it was only being used for the lexer regex. Signed-off-by: Alek Ratzloff <alekratz@gmail.com>
This commit is contained in:
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -203,7 +203,6 @@ name = "sybil"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gc",
|
"gc",
|
||||||
"lazy_static",
|
|
||||||
"regex",
|
"regex",
|
||||||
"structopt",
|
"structopt",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
|
|||||||
@@ -9,5 +9,4 @@ edition = "2021"
|
|||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
structopt = "0.3"
|
structopt = "0.3"
|
||||||
regex = "1.5"
|
regex = "1.5"
|
||||||
lazy_static = "1.4"
|
|
||||||
gc = { version = "0.4", features = ["derive"] }
|
gc = { version = "0.4", features = ["derive"] }
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
use crate::syn::{error::*, span::*, token::*};
|
use crate::syn::{error::*, span::*, token::*};
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use regex::{Regex, RegexBuilder};
|
use regex::{Regex, RegexBuilder};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
lazy_static! {
|
thread_local! {
|
||||||
static ref LEX_PAT: Regex = RegexBuilder::new(
|
static LEX_PAT: Regex = RegexBuilder::new(
|
||||||
r#"^(
|
r#"^(
|
||||||
(?P<float>[-+]?[0-9]+\.[0-9]+([eE][+\-][0-9]+)?)
|
(?P<float>[-+]?[0-9]+\.[0-9]+([eE][+\-][0-9]+)?)
|
||||||
| (?P<int>[-+]?[0-9]+)
|
| (?P<int>[-+]?[0-9]+)
|
||||||
@@ -88,39 +87,41 @@ impl<'t> Lexer<'t> {
|
|||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(cap) = LEX_PAT.captures(&self.text[self.start.byte..]) {
|
LEX_PAT.with(|lex| {
|
||||||
self.end = self.end.next_str(cap.get(0).unwrap().as_str());
|
if let Some(cap) = lex.captures(&self.text[self.start.byte..]) {
|
||||||
let sp_token = if let Some(_) = cap.name("assign") {
|
self.end = self.end.next_str(cap.get(0).unwrap().as_str());
|
||||||
self.make_token(Token::Assign)
|
let sp_token = if let Some(_) = cap.name("assign") {
|
||||||
} else if let Some(_) = cap.name("meta") {
|
self.make_token(Token::Assign)
|
||||||
self.make_token(Token::Meta)
|
} else if let Some(_) = cap.name("meta") {
|
||||||
} else if let Some(_) = cap.name("word") {
|
self.make_token(Token::Meta)
|
||||||
self.make_token(Token::Word)
|
} else if let Some(_) = cap.name("word") {
|
||||||
} else if let Some(_) = cap.name("float") {
|
self.make_token(Token::Word)
|
||||||
self.make_token(Token::Float)
|
} else if let Some(_) = cap.name("float") {
|
||||||
} else if let Some(_) = cap.name("int") {
|
self.make_token(Token::Float)
|
||||||
self.make_token(Token::Int)
|
} else if let Some(_) = cap.name("int") {
|
||||||
} else if let Some(_) = cap.name("str") {
|
self.make_token(Token::Int)
|
||||||
self.make_token(Token::Str)
|
} else if let Some(_) = cap.name("str") {
|
||||||
} else if let Some(_) = cap.name("lquote") {
|
self.make_token(Token::Str)
|
||||||
self.make_token(Token::LQuote)
|
} else if let Some(_) = cap.name("lquote") {
|
||||||
} else if let Some(_) = cap.name("rquote") {
|
self.make_token(Token::LQuote)
|
||||||
self.make_token(Token::RQuote)
|
} else if let Some(_) = cap.name("rquote") {
|
||||||
} else if let Some(_) = cap.name("apply") {
|
self.make_token(Token::RQuote)
|
||||||
self.make_token(Token::Apply)
|
} else if let Some(_) = cap.name("apply") {
|
||||||
|
self.make_token(Token::Apply)
|
||||||
|
} else {
|
||||||
|
panic!(
|
||||||
|
"matched lex pattern, but did not catch this capture: {:?}",
|
||||||
|
cap
|
||||||
|
)
|
||||||
|
};
|
||||||
|
Ok(Some(sp_token))
|
||||||
} else {
|
} else {
|
||||||
panic!(
|
Err(SyntaxError::ExpectedGot {
|
||||||
"matched lex pattern, but did not catch this capture: {:?}",
|
expected: "word, literal, or quote".into(),
|
||||||
cap
|
got: expected_got_char(self.curr().unwrap()),
|
||||||
)
|
})
|
||||||
};
|
}
|
||||||
Ok(Some(sp_token))
|
})
|
||||||
} else {
|
|
||||||
Err(SyntaxError::ExpectedGot {
|
|
||||||
expected: "word, literal, or quote".into(),
|
|
||||||
got: expected_got_char(self.curr().unwrap()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user