refactor: fix some cloned in error pos issues

上级 f2ad21e8
......@@ -11,7 +11,11 @@ fn main() {
let target_dir = get_target_dir();
let root_dir = get_top_dir(&*target_dir);
let lang_spec_dir = root_dir.join("extensions").join("json").join("syntaxes").join("JSON.tmLanguage.json");
let lang_spec_dir = root_dir
.join("extensions")
.join("json")
.join("syntaxes")
.join("JSON.tmLanguage.json");
let lang_test_dir = Path::new("fixtures").join("JavaScript.tmLanguage.json.txt");
let code = read_code(&lang_test_dir);
......@@ -28,7 +32,11 @@ fn main() {
}
if let Ok(n) = SystemTime::now().duration_since(start) {
println!("TOKENIZING {:?} length using grammar source.js {:?} ms", code.len(), n.as_millis())
println!(
"TOKENIZING {:?} length using grammar source.js {:?} ms",
code.len(),
n.as_millis()
)
}
}
......
......@@ -8,7 +8,33 @@ Date: 2020-09-09
## Context
Context here...
For now, we don't implement all cached logic from vscode-textmate, if we want to do better on it, we need this.
```
JSON
TOKENIZING 100210 lines using grammar source.js
Oniguruma: 216 ms., Onigasm: 117 ms. (1.8x faster)
```
we used:
```
➜ benchmark git:(master) ✗ cargo run benchmark
Compiling benchmark v0.1.0 (/Users/fdhuang/repractise/scie/benchmark)
Finished dev [unoptimized + debuginfo] target(s) in 1.26s
Running `/Users/fdhuang/repractise/scie/target/debug/benchmark benchmark`
TOKENIZING 100210 length using grammar source.js 107204 ms
```
with release version:
```
/Users/fdhuang/repractise/scie/target/release/benchmark
TOKENIZING 100210 length using grammar source.js 10306 ms
```
## Decision
......
......@@ -11,9 +11,9 @@ use crate::rule::{
};
use core::cmp;
use scie_scanner::scanner::scanner::IOnigCaptureIndex;
use std::path::Path;
use std::fs::File;
use std::io::Read;
use std::path::Path;
pub trait Matcher {}
......@@ -407,9 +407,9 @@ impl Grammar {
let capture_scope_name =
capture_rule.get_name(Some(line_text.clone()), Some(capture_indices.clone()));
if let Some(_name) = capture_scope_name.clone() {
let mut base = stack.clone().content_name_scopes_list;
let mut base = stack.content_name_scopes_list.clone();
if local_stack.len() > 0 {
base = local_stack[local_stack.len() - 1].clone().scopes;
base = local_stack[local_stack.len() - 1].scopes.clone();
}
let capture_rule_scopes_list = base.push(grammar, capture_scope_name.clone());
local_stack.push(LocalStackElement::new(
......@@ -617,11 +617,9 @@ impl IRuleRegistry for Grammar {
#[cfg(test)]
mod tests {
use std::fs::File;
use std::io::{Read, Write};
use std::path::Path;
use std::io::Write;
use crate::grammar::{Grammar, StackElement};
use crate::inter::IRawGrammar;
use crate::rule::abstract_rule::RuleEnum;
use crate::rule::IRuleRegistry;
......
......@@ -738,7 +738,7 @@ var Grammar = /** @class */ (function () {
// console.log(this._ruleId2desc.length);
// let fs = require('fs');
// let data = JSON.stringify(this._ruleId2desc, null, 2);
// fs.writeFileSync("testdata/c.out.json", data, 'utf8');
// fs.writeFileSync("fixtures/c.out.json", data, 'utf8');
var isFirstLine;
if (!prevState || prevState === StackElement.NULL) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册