diff --git a/benchmark/src/main.rs b/benchmark/src/main.rs index c94d7ffb4f533c1a7423e53fa3580d89b116f689..2ed9d3f453ecac31184678993cc4f42b7cf814cc 100644 --- a/benchmark/src/main.rs +++ b/benchmark/src/main.rs @@ -1,12 +1,37 @@ use std::path::{Path, PathBuf}; use std::env; use std::ffi::OsStr; +use scie_grammar::grammar::{Grammar, StackElement}; +use std::fs::File; +use std::io::Read; fn main() { - println!("Hello, world!"); let target_dir = get_target_dir(); let root_dir = get_top_dir(&*target_dir); - println!("{:?}", root_dir); + + let lang_spec_dir = root_dir.join("extensions").join("json").join("syntaxes").join("JSON.tmLanguage.json"); + let lang_test_dir = Path::new("fixtures").join("JavaScript.tmLanguage.json.txt"); + + let code = read_code(&lang_test_dir); + + let mut grammar = Grammar::to_grammar(lang_spec_dir.to_str().unwrap()); + + let mut rule_stack = Some(StackElement::null()); + for line in code.lines() { + println!("{:?}", line); + let result = grammar.tokenize_line(String::from(line), &mut rule_stack); + rule_stack = *result.rule_stack; + } + + println!("{:?}", lang_spec_dir); + println!("{:?}", lang_test_dir); +} + +fn read_code(lang_test_dir: &PathBuf) -> String { + let mut file = File::open(lang_test_dir).unwrap(); + let mut code = String::new(); + file.read_to_string(&mut code).unwrap(); + code } // https://github.com/rust-lang/cargo/issues/2841 diff --git a/scie-grammar/src/grammar/grammar.rs b/scie-grammar/src/grammar/grammar.rs index 89246cd9b5ee4a5d3e846978fcdbcddaa7e50d99..5795550df3a7a9c7bb24181f31ae2247e38f38fb 100644 --- a/scie-grammar/src/grammar/grammar.rs +++ b/scie-grammar/src/grammar/grammar.rs @@ -11,6 +11,9 @@ use crate::rule::{ }; use core::cmp; use scie_scanner::scanner::scanner::IOnigCaptureIndex; +use std::path::Path; +use std::fs::File; +use std::io::Read; pub trait Matcher {} @@ -30,8 +33,8 @@ pub struct CheckWhileConditionResult { #[derive(Debug, Clone)] pub struct TokenizeResult { - tokens: Vec, - rule_stack: Box>, + pub tokens: Vec, + pub rule_stack: Box>, } #[derive(Debug, Clone)] @@ -567,6 +570,16 @@ impl Grammar { } pub fn tokenize_line2(&self, _line_text: String, _prev_state: Option) {} + + pub fn to_grammar(grammar_path: &str) -> Grammar { + let path = Path::new(grammar_path); + let mut file = File::open(path).unwrap(); + let mut data = String::new(); + file.read_to_string(&mut data).unwrap(); + + let g: IRawGrammar = serde_json::from_str(&data).unwrap(); + Grammar::new(g) + } } impl IRuleFactoryHelper for Grammar {} @@ -630,7 +643,7 @@ return 0; #[test] fn should_identify_c_include() { let code = "#include "; - let mut grammar = to_grammar("test-cases/first-mate/fixtures/c.json"); + let mut grammar = Grammar::to_grammar("test-cases/first-mate/fixtures/c.json"); let mut rule_stack = Some(StackElement::null()); let result = grammar.tokenize_line(String::from(code), &mut rule_stack); @@ -757,7 +770,7 @@ hellomake: $(OBJ) } fn to_grammar_with_code(grammar_path: &str, code: &str) -> Grammar { - let mut grammar = to_grammar(grammar_path); + let mut grammar = Grammar::to_grammar(grammar_path); let c_code = String::from(code); let mut rule_stack = Some(StackElement::null()); for line in c_code.lines() { @@ -781,14 +794,4 @@ hellomake: $(OBJ) grammar } - - fn to_grammar(grammar_path: &str) -> Grammar { - let path = Path::new(grammar_path); - let mut file = File::open(path).unwrap(); - let mut data = String::new(); - file.read_to_string(&mut data).unwrap(); - - let g: IRawGrammar = serde_json::from_str(&data).unwrap(); - Grammar::new(g) - } }