refact: release input from ParserContext.

This commit is contained in:
2024-12-01 22:00:51 +08:00
parent 0e87775943
commit dbc56a477b
15 changed files with 300 additions and 484 deletions

View File

@@ -75,9 +75,9 @@ fn file_lexical_parser_test() -> anyhow::Result<()> {
let (_, nom_tokens) =
nom_lexical_parser(source_file.content.as_str()).or_else(|e| Err(e.to_owned()))?;
let context = ParserContext::new_with_str(source_file.content.as_str(), ());
let borrowed_context = context.borrow();
let (_, zero_tokens) = zero_lexical_parser(context.clone(), borrowed_context.input_slice())
let context = ParserContext::new(());
let word: Vec<char> = source_file.content.chars().collect();
let (_, zero_tokens) = zero_lexical_parser(context.clone(), word.as_slice())
.or_else(|e| Err(anyhow!("{}", e)))?;
assert_eq!(nom_tokens.len(), zero_tokens.len());

View File

@@ -5,11 +5,11 @@ use zero_parser::parser::ParserContext;
mod tokenizer;
fn validate_tokens(input: &'static str, tokens: Vec<LexicalTokenType>) {
let context = ParserContext::new_with_str(input, ());
let borrowed_context = context.borrow();
let context = ParserContext::new(());
let word: Vec<char> = input.chars().collect();
let (_, actual_tokens) =
zero_lexical_parser(context.clone(), borrowed_context.input_slice()).unwrap();
zero_lexical_parser(context.clone(),word.as_slice()).unwrap();
dbg!(&tokens, &actual_tokens);
assert_eq!(tokens.len(), actual_tokens.len());

View File

@@ -109,7 +109,7 @@ pub fn nom_lexical_parser(mut input: &str) -> IResult<&str, Vec<LexicalToken>> {
}
pub fn zero_lexical_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
mut input: &[char],
) -> ParserResult<char, Vec<NewLexicalToken>> {
let mut array = vec![];

View File

@@ -9,7 +9,7 @@ use zero_parser::text::{char_parser, one_of, string_parser};
use zero_parser::{alternate, parser::satisfy};
pub fn keyword_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
tuple((
@@ -38,7 +38,7 @@ pub fn keyword_parser(
}
pub fn delimiter_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
alternate!(
@@ -60,7 +60,7 @@ pub fn delimiter_parser(
}
pub fn operator_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
alternate!(
@@ -89,7 +89,7 @@ pub fn operator_parser(
}
pub fn identifier_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
tuple((
@@ -105,7 +105,7 @@ pub fn identifier_parser(
}
pub fn decimal_integer_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
tuple((
@@ -126,7 +126,7 @@ pub fn decimal_integer_parser(
}
pub fn octal_integer_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
tuple((
@@ -147,7 +147,7 @@ pub fn octal_integer_parser(
}
pub fn hexadecimal_integer_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
tuple((
@@ -168,7 +168,7 @@ pub fn hexadecimal_integer_parser(
}
pub fn integer_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
alternate!(
@@ -180,7 +180,7 @@ pub fn integer_parser(
}
pub fn float_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
tuple((
@@ -202,7 +202,7 @@ pub fn float_parser(
}
pub fn literal_string_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
quote(char_parser('"'), any(), char_parser('"'))
@@ -218,7 +218,7 @@ pub fn literal_string_parser(
}
pub fn comments_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, ()> {
alternate!(
@@ -239,7 +239,7 @@ pub fn comments_parser(
}
pub fn junk_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, ()> {
alternate!(
@@ -250,7 +250,7 @@ pub fn junk_parser(
}
pub fn combine_parser(
context: Rc<RefCell<ParserContext<char, ()>>>,
context: Rc<RefCell<ParserContext<()>>>,
input: &[char],
) -> ParserResult<char, NewLexicalToken> {
alternate!(

View File

@@ -1,3 +1,5 @@
extern crate core;
use crate::tokenizer::zero_parsers::{
combine_parser, comments_parser, decimal_integer_parser, delimiter_parser, float_parser,
hexadecimal_integer_parser, identifier_parser, integer_parser, keyword_parser,
@@ -14,15 +16,14 @@ mod tokenizer;
fn assert_lexical_parser(
except: LexicalToken,
parser: fn(
Rc<RefCell<ParserContext<char, ()>>>,
Rc<RefCell<ParserContext<()>>>,
&[char],
) -> ParserResult<char, NewLexicalToken>,
input: &str,
) {
let context = ParserContext::new_with_str(input, ());
let borrowed_context = context.borrow();
let input = borrowed_context.input_slice();
let (_, token) = parser(context.clone(), input).unwrap();
let context = ParserContext::new(());
let word: Vec<char> = input.chars().collect();
let (_, token) = parser(context.clone(), word.as_slice()).unwrap();
assert_eq!(except, token);
}
@@ -30,11 +31,11 @@ fn assert_lexical_parser(
fn assert_parser<T, F>(except: T, parser: F, input: &str)
where
T: PartialEq + Debug,
F: Fn(Rc<RefCell<ParserContext<char, ()>>>, &[char]) -> ParserResult<char, T>,
F: Fn(Rc<RefCell<ParserContext<()>>>, &[char]) -> ParserResult<char, T>,
{
let context = ParserContext::new_with_str(input, ());
let borrowed_context = context.borrow();
let (_, token) = parser(context.clone(), borrowed_context.input_slice()).unwrap();
let context = ParserContext::new(());
let word: Vec<char> = input.chars().collect();
let (_, token) = parser(context.clone(), word.as_slice()).unwrap();
assert_eq!(except, token);
}