zero-parser/tests/zero_parser_tests.rs

367 lines
8.4 KiB
Rust
Raw Normal View History

extern crate core;
2024-11-19 19:08:05 +08:00
use crate::tokenizer::zero_parsers::{
combine_parser, comments_parser, decimal_integer_parser, delimiter_parser, float_parser,
hexadecimal_integer_parser, identifier_parser, integer_parser, keyword_parser,
literal_string_parser, octal_integer_parser, operator_parser,
};
use crate::tokenizer::{LexicalToken, LexicalTokenType, NewLexicalToken};
use std::cell::RefCell;
use std::fmt::Debug;
use std::rc::Rc;
use zero_parser::parser::{ParserContext, ParserResult};
mod tokenizer;
fn assert_lexical_parser(
except: LexicalToken,
parser: fn(Rc<RefCell<ParserContext<()>>>, &[char]) -> ParserResult<char, NewLexicalToken>,
2024-11-19 19:08:05 +08:00
input: &str,
) {
let context = ParserContext::new(());
let word: Vec<char> = input.chars().collect();
let (_, token) = parser(context.clone(), word.as_slice()).unwrap();
2024-11-19 19:08:05 +08:00
assert_eq!(except, token);
}
fn assert_parser<T, F>(except: T, parser: F, input: &str)
where
T: PartialEq + Debug,
F: Fn(Rc<RefCell<ParserContext<()>>>, &[char]) -> ParserResult<char, T>,
2024-11-19 19:08:05 +08:00
{
let context = ParserContext::new(());
let word: Vec<char> = input.chars().collect();
let (_, token) = parser(context.clone(), word.as_slice()).unwrap();
2024-11-19 19:08:05 +08:00
assert_eq!(except, token);
}
#[test]
fn keyword_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Keyword,
literal_value: "const",
},
keyword_parser,
"const int a = 3;",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Keyword,
literal_value: "return",
},
keyword_parser,
"return 0;",
);
}
#[test]
fn delimiter_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Delimiter,
literal_value: "{",
},
delimiter_parser,
"{ int i = 3;}",
);
}
#[test]
fn operator_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Operator,
literal_value: "!=",
},
operator_parser,
"!=",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Operator,
literal_value: "!",
},
operator_parser,
"!",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Operator,
literal_value: ">=",
},
operator_parser,
">=",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Operator,
literal_value: ">",
},
operator_parser,
"> 123",
);
}
#[test]
fn identifier_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Identifier,
literal_value: "a",
},
identifier_parser,
"a = 3",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Identifier,
literal_value: "_123",
},
identifier_parser,
"_123 = NULL",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Identifier,
literal_value: "test_123",
},
identifier_parser,
"test_123 += 3",
);
}
#[test]
fn decimal_integer_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(100),
literal_value: "100",
},
decimal_integer_parser,
"100",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(56),
literal_value: "56",
},
decimal_integer_parser,
"56 + 44",
);
}
#[test]
fn octal_integer_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(63),
literal_value: "077",
},
octal_integer_parser,
"077",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(0),
literal_value: "0",
},
octal_integer_parser,
"0",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(0),
literal_value: "00",
},
octal_integer_parser,
"00",
);
}
#[test]
fn hexadecimal_integer_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(0),
literal_value: "0x0",
},
hexadecimal_integer_parser,
"0x0",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(0),
literal_value: "0X00",
},
hexadecimal_integer_parser,
"0X00",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(15),
literal_value: "0xF",
},
hexadecimal_integer_parser,
"0xF",
);
}
#[test]
fn integer_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(0),
literal_value: "0",
},
integer_parser,
"0",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(45),
literal_value: "0055",
},
integer_parser,
"0055",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(291),
literal_value: "0X123",
},
integer_parser,
"0X123",
);
}
#[test]
fn float_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstFloat(100.0),
literal_value: "100.0",
},
float_parser,
"100.0",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstFloat(0.5),
literal_value: "0.5",
},
float_parser,
"0.5",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstFloat(123.456),
literal_value: "123.456",
},
float_parser,
"123.456",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstFloat(3.14),
literal_value: "03.14",
},
float_parser,
"03.14",
);
}
#[test]
fn literal_string_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::LiteralString,
literal_value: "abc",
},
literal_string_parser,
"\"abc\"",
);
}
#[test]
fn comments_test() {
assert_parser((), comments_parser, "//test while-if\n");
assert_parser((), comments_parser, "/* asasdasd */");
}
#[test]
fn combine_parser_test() {
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstInteger(120),
literal_value: "120",
},
combine_parser,
"120",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::ConstFloat(120.11),
literal_value: "120.110",
},
combine_parser,
"120.110",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::LiteralString,
literal_value: "Hello, world!\n",
},
combine_parser,
"\"Hello, world!\n\"",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Keyword,
literal_value: "const",
},
combine_parser,
"const",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Identifier,
literal_value: "const_number",
},
combine_parser,
"const_number",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Keyword,
literal_value: "int",
},
combine_parser,
"int",
);
assert_lexical_parser(
LexicalToken {
token_type: LexicalTokenType::Identifier,
literal_value: "int_a",
},
combine_parser,
"int_a",
);
}