feat: add first benchmark

上级 0abe596d
use std::path::{Path, PathBuf};
use std::env;
use std::ffi::OsStr;
use scie_grammar::grammar::{Grammar, StackElement};
use std::fs::File;
use std::io::Read;
fn main() {
println!("Hello, world!");
let target_dir = get_target_dir();
let root_dir = get_top_dir(&*target_dir);
println!("{:?}", root_dir);
let lang_spec_dir = root_dir.join("extensions").join("json").join("syntaxes").join("JSON.tmLanguage.json");
let lang_test_dir = Path::new("fixtures").join("JavaScript.tmLanguage.json.txt");
let code = read_code(&lang_test_dir);
let mut grammar = Grammar::to_grammar(lang_spec_dir.to_str().unwrap());
let mut rule_stack = Some(StackElement::null());
for line in code.lines() {
println!("{:?}", line);
let result = grammar.tokenize_line(String::from(line), &mut rule_stack);
rule_stack = *result.rule_stack;
}
println!("{:?}", lang_spec_dir);
println!("{:?}", lang_test_dir);
}
fn read_code(lang_test_dir: &PathBuf) -> String {
let mut file = File::open(lang_test_dir).unwrap();
let mut code = String::new();
file.read_to_string(&mut code).unwrap();
code
}
// https://github.com/rust-lang/cargo/issues/2841
......
......@@ -11,6 +11,9 @@ use crate::rule::{
};
use core::cmp;
use scie_scanner::scanner::scanner::IOnigCaptureIndex;
use std::path::Path;
use std::fs::File;
use std::io::Read;
pub trait Matcher {}
......@@ -30,8 +33,8 @@ pub struct CheckWhileConditionResult {
#[derive(Debug, Clone)]
pub struct TokenizeResult {
tokens: Vec<IToken>,
rule_stack: Box<Option<StackElement>>,
pub tokens: Vec<IToken>,
pub rule_stack: Box<Option<StackElement>>,
}
#[derive(Debug, Clone)]
......@@ -567,6 +570,16 @@ impl Grammar {
}
pub fn tokenize_line2(&self, _line_text: String, _prev_state: Option<StackElement>) {}
pub fn to_grammar(grammar_path: &str) -> Grammar {
let path = Path::new(grammar_path);
let mut file = File::open(path).unwrap();
let mut data = String::new();
file.read_to_string(&mut data).unwrap();
let g: IRawGrammar = serde_json::from_str(&data).unwrap();
Grammar::new(g)
}
}
impl IRuleFactoryHelper for Grammar {}
......@@ -630,7 +643,7 @@ return 0;
#[test]
fn should_identify_c_include() {
let code = "#include <stdio.h>";
let mut grammar = to_grammar("test-cases/first-mate/fixtures/c.json");
let mut grammar = Grammar::to_grammar("test-cases/first-mate/fixtures/c.json");
let mut rule_stack = Some(StackElement::null());
let result = grammar.tokenize_line(String::from(code), &mut rule_stack);
......@@ -757,7 +770,7 @@ hellomake: $(OBJ)
}
fn to_grammar_with_code(grammar_path: &str, code: &str) -> Grammar {
let mut grammar = to_grammar(grammar_path);
let mut grammar = Grammar::to_grammar(grammar_path);
let c_code = String::from(code);
let mut rule_stack = Some(StackElement::null());
for line in c_code.lines() {
......@@ -781,14 +794,4 @@ hellomake: $(OBJ)
grammar
}
fn to_grammar(grammar_path: &str) -> Grammar {
let path = Path::new(grammar_path);
let mut file = File::open(path).unwrap();
let mut data = String::new();
file.read_to_string(&mut data).unwrap();
let g: IRawGrammar = serde_json::from_str(&data).unwrap();
Grammar::new(g)
}
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册