fix: fix lint issues

上级 2ed7bbc1
use onig::*;
use crate::inter::IRawGrammar;
use crate::rule::RuleFactory;
use onig::*;
pub struct StackElement {}
......@@ -17,35 +17,35 @@ pub struct ITokenizeLineResult {
pub struct ITokenizeLineResult2 {
pub tokens: Vec<i32>,
pub rule_stack: Box<StackElement>
pub rule_stack: Box<StackElement>,
}
pub trait IGrammar {
fn tokenize_line(line_text: String, prev_state: Option<StackElement>) -> ITokenizeLineResult;
/**
* Tokenize `lineText` using previous line state `prevState`.
* The result contains the tokens in binary format, resolved with the following information:
* - language
* - token type (regex, string, comment, other)
* - font style
* - foreground color
* - background color
* e.g. for getting the languageId: `(metadata & MetadataConsts.LANGUAGEID_MASK) >>> MetadataConsts.LANGUAGEID_OFFSET`
*/
* Tokenize `lineText` using previous line state `prevState`.
* The result contains the tokens in binary format, resolved with the following information:
* - language
* - token type (regex, string, comment, other)
* - font style
* - foreground color
* - background color
* e.g. for getting the languageId: `(metadata & MetadataConsts.LANGUAGEID_MASK) >>> MetadataConsts.LANGUAGEID_OFFSET`
*/
fn tokenize_line2(line_text: String, prev_state: Option<StackElement>) -> ITokenizeLineResult2;
}
pub struct Grammar {
root_id: i32,
grammar: IRawGrammar
grammar: IRawGrammar,
}
impl Grammar {
pub fn new (grammar: IRawGrammar) -> Grammar {
pub fn new(grammar: IRawGrammar) -> Grammar {
Grammar {
grammar,
root_id: -1
root_id: -1,
}
}
// todo: refactor to callback ??
......@@ -59,7 +59,12 @@ impl Grammar {
Regex::new(sources.as_str()).unwrap()
}
fn tokenize(&self, line_text: String, prev_state: Option<StackElement>, emit_binary_tokens: bool) {
fn tokenize(
&self,
line_text: String,
prev_state: Option<StackElement>,
emit_binary_tokens: bool,
) {
if self.root_id == -1 {
RuleFactory::get_compiled_rule_id(self.grammar.repository.clone())
}
......@@ -69,7 +74,5 @@ impl Grammar {
self.tokenize(line_text, prev_state, false)
}
pub fn tokenize_line2(&self, line_text: String, prev_state: Option<StackElement>) {
}
}
\ No newline at end of file
pub fn tokenize_line2(&self, line_text: String, prev_state: Option<StackElement>) {}
}
......@@ -3,11 +3,11 @@ use crate::inter::IRawGrammar;
fn parse_raw_grammar(content: String, file_path: Option<String>) -> IRawGrammar {
if let Some(path) = file_path.clone() {
if path.ends_with(".json") {
return parse_json_grammar(content, path)
return parse_json_grammar(content, path);
}
}
return parse_plist_grammar(content, file_path.clone())
return parse_plist_grammar(content, file_path.clone());
}
// todo: in current, we don't need to impl it
......@@ -19,7 +19,6 @@ fn parse_json_grammar(content: String, file_path: String) -> IRawGrammar {
IRawGrammar::new()
}
#[cfg(test)]
mod tests {
use crate::grammar::grammar_reader::parse_raw_grammar;
......@@ -27,6 +26,9 @@ mod tests {
#[test]
fn should_run() {
let grammar = parse_raw_grammar(String::from("hello"), Some(String::from("world.json")));
assert_eq!(format!("{:?}", grammar.location), "ILocatable { textmate_location: None }");
assert_eq!(
format!("{:?}", grammar.location),
"ILocatable { textmate_location: None }"
);
}
}
pub mod grammar;
pub mod grammar_reader;
pub mod grammar;
\ No newline at end of file
use std::collections::HashMap;
pub struct IEmbeddedLanguagesMap {
map: HashMap<String, i32>
map: HashMap<String, i32>,
}
pub enum StandardTokenType {
Other,
Comment,
String,
RegEx
RegEx,
}
pub struct ITokenTypeMap {
map: HashMap<String, StandardTokenType>
map: HashMap<String, StandardTokenType>,
}
pub struct IGrammarConfiguration {
pub embedded_languages: IEmbeddedLanguagesMap,
pub tokenTypes: ITokenTypeMap
pub tokenTypes: ITokenTypeMap,
}
pub struct GrammarRegistry {
}
pub struct GrammarRegistry {}
impl GrammarRegistry {
pub fn load_grammar_with_configuration(&self, initial_scope_name: String, initial_language: i32, configuration: IGrammarConfiguration) {
pub fn load_grammar_with_configuration(
&self,
initial_scope_name: String,
initial_language: i32,
configuration: IGrammarConfiguration,
) {
}
}
\ No newline at end of file
}
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use serde_json::Result;
use std::collections::HashMap;
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct ILocation {
......@@ -36,7 +35,7 @@ impl ILocatable {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct IRawCapturesMap {
#[serde(flatten)]
capture_map: HashMap<String, IRawRule>
capture_map: HashMap<String, IRawRule>,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
......@@ -111,6 +110,8 @@ pub struct IRawRule {
pub patterns: Option<Vec<IRawRule>>,
pub repository: Option<IRawRepository>,
pub apply_end_pattern_last: Option<bool>,
pub information_for_contributors: Option<Vec<String>>,
}
impl IRawRule {
......@@ -132,6 +133,7 @@ impl IRawRule {
patterns: None,
repository: None,
apply_end_pattern_last: None,
information_for_contributors: None,
}
}
}
......@@ -139,7 +141,7 @@ impl IRawRule {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct InjectionMap {
#[serde(flatten)]
map: HashMap<String, IRawRule>
map: HashMap<String, IRawRule>,
}
pub struct IRawGrammar {
......@@ -157,7 +159,9 @@ pub struct IRawGrammar {
impl IRawGrammar {
pub fn new() -> IRawGrammar {
IRawGrammar {
location: ILocatable { textmate_location: None },
location: ILocatable {
textmate_location: None,
},
repository: IRawRepository::new(),
scope_name: "".to_string(),
patterns: vec![],
......@@ -172,9 +176,10 @@ impl IRawGrammar {
#[cfg(test)]
mod tests {
use crate::inter::{IRawCaptures, IRawRepository, IRawRule, InjectionMap};
use serde::{Deserialize, Serialize};
use serde_json::Result;
use crate::inter::{IRawCaptures, InjectionMap, IRawRule, IRawRepository};
use std::fs;
use std::path::Path;
#[derive(Serialize, Deserialize, Debug, Clone)]
struct Captures {
......@@ -193,7 +198,15 @@ mod tests {
}"#;
let p: Captures = serde_json::from_str(data).unwrap();
let name = p.captures.unwrap().map.capture_map.get("1").unwrap().name.clone();
let name = p
.captures
.unwrap()
.map
.capture_map
.get("1")
.unwrap()
.name
.clone();
assert_eq!("punctuation.definition.item.text", name.unwrap())
}
......@@ -213,9 +226,24 @@ mod tests {
}"#;
let p: InjectionMap = serde_json::from_str(data).unwrap();
let pattern = p.map.get("R:text.html - comment.block").unwrap().patterns.clone();
let pattern = p
.map
.get("R:text.html - comment.block")
.unwrap()
.patterns
.clone();
assert_eq!(1, pattern.clone().unwrap().len());
assert_eq!("<", pattern.clone().unwrap().first().unwrap().match_s.clone().unwrap())
assert_eq!(
"<",
pattern
.clone()
.unwrap()
.first()
.unwrap()
.match_s
.clone()
.unwrap()
)
}
#[test]
......@@ -250,7 +278,10 @@ mod tests {
let p: IRawRule = serde_json::from_str(data).unwrap();
let capture_map = p.end_captures.unwrap().map.capture_map;
assert_eq!("punctuation.definition.string.end.coffee", capture_map.get("0").unwrap().name.clone().unwrap());
assert_eq!(
"punctuation.definition.string.end.coffee",
capture_map.get("0").unwrap().name.clone().unwrap()
);
}
#[test]
......@@ -279,7 +310,22 @@ mod tests {
let p: IRawRule = serde_json::from_str(data).unwrap();
let repository_map = p.repository.unwrap().map.name_map.clone();
let pattern_len = repository_map.get("function_names").unwrap().patterns.clone().unwrap().len();
let pattern_len = repository_map
.get("function_names")
.unwrap()
.patterns
.clone()
.unwrap()
.len();
assert_eq!(3, pattern_len)
}
}
\ No newline at end of file
//
// #[test]
// fn should_convert_json_file() {
// let path = Path::new("../../../../extensions/json/syntaxes/JSON.tmLanguage.json");
// for entry in fs::read_dir(path).expect("Unable to list") {
// let entry = entry.expect("unable to get entry");
// println!("{}", entry.path().display());
// }
// }
}
extern crate onig;
pub mod inter;
pub mod grammar;
pub mod grammar_registry;
pub mod inter;
pub mod rule;
#[cfg(test)]
......@@ -14,10 +14,8 @@ mod tests {
let regex = Regex::new("e(l+)").unwrap();
for (i, pos) in regex.captures("hello").unwrap().iter_pos().enumerate() {
match pos {
Some((beg, end)) =>
println!("Group {} captured in position {}:{}", i, beg, end),
None =>
println!("Group {} is not captured", i)
Some((beg, end)) => println!("Group {} captured in position {}:{}", i, beg, end),
None => println!("Group {} is not captured", i),
}
}
}
......
use crate::inter::IRawRepository;
pub struct RuleFactory {
}
pub struct RuleFactory {}
impl RuleFactory {
pub fn get_compiled_rule_id(repository: IRawRepository) {
}
pub fn get_compiled_rule_id(repository: IRawRepository) {}
pub fn create_capture_rule() {
}
}
\ No newline at end of file
pub fn create_capture_rule() {}
}
use scie_grammar::inter::{IRawCaptures, ILocatable, ILocation};
use scie_grammar::inter::{ILocatable, ILocation, IRawCaptures};
use std::collections::HashMap;
fn main() {
let location = ILocation {
filename: "".to_string(),
line: "".to_string(),
chart: "".to_string()
chart: "".to_string(),
};
println!("{:?}", location)
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册