106 lines
3.0 KiB
Rust
106 lines
3.0 KiB
Rust
|
mod tokenizer;
|
||
|
|
||
|
use crate::tokenizer::{nom_lexical_parser, zero_lexical_parser};
|
||
|
use anyhow::anyhow;
|
||
|
use std::fs::File;
|
||
|
use std::io::{BufReader, Read};
|
||
|
use std::path::PathBuf;
|
||
|
use std::{env, fs};
|
||
|
use zero_parser::parser::ParserContext;
|
||
|
|
||
|
pub fn ensure_sysy_sources() -> anyhow::Result<Vec<PathBuf>> {
|
||
|
let current_dir = env::current_dir()?;
|
||
|
|
||
|
for entry in fs::read_dir(¤t_dir)? {
|
||
|
let entry = entry?;
|
||
|
|
||
|
if entry.file_name() == "sysy_sets" {
|
||
|
let set_path = entry.path();
|
||
|
|
||
|
return Ok(fs::read_dir(&set_path)?
|
||
|
.filter_map(|f| {
|
||
|
f.ok().and_then(|f| {
|
||
|
f.file_name()
|
||
|
.into_string()
|
||
|
.ok()
|
||
|
.and_then(|file_name| file_name.ends_with(".sy").then_some(f.path()))
|
||
|
})
|
||
|
})
|
||
|
.collect());
|
||
|
}
|
||
|
}
|
||
|
|
||
|
Err(anyhow!(
|
||
|
"Failed to find `sys_sets` directory in current dir {:?}.",
|
||
|
current_dir.file_name().unwrap()
|
||
|
))
|
||
|
}
|
||
|
|
||
|
#[derive(Debug)]
|
||
|
pub struct SourceFile {
|
||
|
pub filename: String,
|
||
|
pub content: String,
|
||
|
}
|
||
|
|
||
|
impl SourceFile {
|
||
|
pub fn new(path: &PathBuf) -> anyhow::Result<Self> {
|
||
|
let file = File::open(path)?;
|
||
|
let mut reader = BufReader::new(file);
|
||
|
|
||
|
let mut content = String::new();
|
||
|
reader.read_to_string(&mut content)?;
|
||
|
|
||
|
Ok(Self {
|
||
|
filename: path
|
||
|
.file_name()
|
||
|
.and_then(|x| x.to_str())
|
||
|
.ok_or(anyhow!("Failed to get filename from path."))?
|
||
|
.to_owned(),
|
||
|
content,
|
||
|
})
|
||
|
}
|
||
|
}
|
||
|
|
||
|
#[test]
|
||
|
fn file_lexical_parser_test() -> anyhow::Result<()> {
|
||
|
let sysy_set = ensure_sysy_sources()?;
|
||
|
let source_files = sysy_set
|
||
|
.into_iter()
|
||
|
.map(|p| SourceFile::new(&p))
|
||
|
.collect::<Result<Vec<SourceFile>, anyhow::Error>>()?;
|
||
|
|
||
|
for source_file in source_files {
|
||
|
println!("Start to test file '{}'.", source_file.filename);
|
||
|
|
||
|
let (_, nom_tokens) =
|
||
|
nom_lexical_parser(source_file.content.as_str()).or_else(|e| Err(e.to_owned()))?;
|
||
|
|
||
|
let context = ParserContext::new_with_str(source_file.content.as_str(), ());
|
||
|
let borrowed_context = context.borrow();
|
||
|
let (_, zero_tokens) = zero_lexical_parser(context.clone(), borrowed_context.input_slice())
|
||
|
.or_else(|e| Err(anyhow!("{}", e.1)))?;
|
||
|
|
||
|
assert_eq!(nom_tokens.len(), zero_tokens.len());
|
||
|
|
||
|
for (except, actual) in nom_tokens.iter().zip(zero_tokens.iter()) {
|
||
|
assert_eq!(
|
||
|
except,
|
||
|
actual,
|
||
|
"The literal value of actual token is {}.",
|
||
|
actual
|
||
|
.literal_value
|
||
|
.iter()
|
||
|
.map(|x| x.clone())
|
||
|
.collect::<String>()
|
||
|
);
|
||
|
}
|
||
|
println!(
|
||
|
"Succeed to test file '{}', testing {} tokens.",
|
||
|
source_file.filename,
|
||
|
zero_tokens.len()
|
||
|
);
|
||
|
}
|
||
|
|
||
|
Ok(())
|
||
|
}
|