提交 65bcaa88 编写于 作者: M Martin Aeschlimann

more tests

上级 74c44d63
......@@ -108,6 +108,8 @@ export interface ITokenClassificationRegistry {
getTokenClassificationFromString(str: TokenClassificationString): TokenClassification | undefined;
getTokenClassification(type: string, modifiers: string[]): TokenClassification | undefined;
getTokenStylingRule(classification: TokenClassification | string | undefined, value: TokenStyle): TokenStylingRule | undefined;
/**
* Register a TokenStyle default to the registry.
* @param selector The rule selector
......@@ -197,9 +199,18 @@ class TokenClassificationRegistry implements ITokenClassificationRegistry {
return undefined;
}
public getTokenStylingRule(classification: TokenClassification | string | undefined, value: TokenStyle): TokenStylingRule | undefined {
if (typeof classification === 'string') {
classification = this.getTokenClassificationFromString(classification);
}
if (classification) {
return { classification, matchScore: getTokenStylingScore(classification), value };
}
return undefined;
}
public registerTokenStyleDefault(classification: TokenClassification, defaults: TokenStyleDefaults): void {
const matchScore = bitCount(classification.modifiers) + ((classification.type !== TOKEN_TYPE_WILDCARD_NUM) ? 1 : 0);
this.tokenStylingDefaultRules.push({ classification, matchScore, defaults });
this.tokenStylingDefaultRules.push({ classification, matchScore: getTokenStylingScore(classification), defaults });
}
public deregisterTokenType(id: string): void {
......@@ -233,12 +244,20 @@ class TokenClassificationRegistry implements ITokenClassificationRegistry {
};
function _processStyle(matchScore: number, style: TokenStyle) {
for (let p in result) {
if (style.foreground && score.foreground <= matchScore) {
score.foreground = matchScore;
result.foreground = style.foreground;
}
for (let p of ['bold', 'underline', 'italic']) {
const property = p as keyof TokenStyle;
const info = style[property];
if (info !== undefined && score[property] <= matchScore) {
score[property] = matchScore;
result[property] = info;
if (info !== undefined) {
if (score[property] < matchScore) {
score[property] = matchScore;
result[property] = info;
} else if (score[property] === matchScore) {
result[property] = result[property] || info;
}
}
}
}
......@@ -300,7 +319,7 @@ class TokenClassificationRegistry implements ITokenClassificationRegistry {
function match(themeSelector: TokenStylingRule | TokenStylingDefaultRule, classification: TokenClassification): number {
const selectorType = themeSelector.classification.type;
if (selectorType !== TOKEN_TYPE_WILDCARD_NUM && selectorType === classification.type) {
if (selectorType !== TOKEN_TYPE_WILDCARD_NUM && selectorType !== classification.type) {
return -1;
}
const selectorModifier = themeSelector.classification.modifiers;
......@@ -373,3 +392,7 @@ function bitCount(u: number) {
const uCount = u - ((u >> 1) & 0o33333333333) - ((u >> 2) & 0o11111111111);
return ((uCount + (uCount >> 3)) & 0o30707070707) % 63;
}
function getTokenStylingScore(classification: TokenClassification) {
return bitCount(classification.modifiers) + ((classification.type !== TOKEN_TYPE_WILDCARD_NUM) ? 1 : 0);
}
......@@ -20,12 +20,12 @@ import { URI } from 'vs/base/common/uri';
import { IFileService } from 'vs/platform/files/common/files';
import { parse as parsePList } from 'vs/workbench/services/themes/common/plistParser';
import { startsWith } from 'vs/base/common/strings';
import { Extensions as TokenStyleRegistryExtensions, TokenStyle, TokenClassification, ITokenClassificationRegistry, ProbeScope, TokenStylingRule } from 'vs/platform/theme/common/tokenClassificationRegistry';
import { TokenStyle, TokenClassification, ProbeScope, TokenStylingRule, getTokenClassificationRegistry } from 'vs/platform/theme/common/tokenClassificationRegistry';
import { MatcherWithPriority, Matcher, createMatchers } from 'vs/workbench/services/themes/common/textMateScopeMatcher';
let colorRegistry = Registry.as<IColorRegistry>(ColorRegistryExtensions.ColorContribution);
let tokenClassificationRegistry = Registry.as<ITokenClassificationRegistry>(TokenStyleRegistryExtensions.TokenClassificationContribution);
let tokenClassificationRegistry = getTokenClassificationRegistry();
const tokenGroupToScopesMap = {
comments: ['comment'],
......@@ -115,7 +115,7 @@ export class ColorThemeData implements IColorTheme {
public getTokenStyle(tokenClassification: TokenClassification, useDefault?: boolean): TokenStyle | undefined {
// todo: cache results
return tokenClassificationRegistry.resolveTokenStyle(tokenClassification, this.tokenStylingRules, !!useDefault, this);
return tokenClassificationRegistry.resolveTokenStyle(tokenClassification, this.tokenStylingRules, useDefault !== false, this);
}
public getDefault(colorId: ColorIdentifier): Color | undefined {
......@@ -200,6 +200,10 @@ export class ColorThemeData implements IColorTheme {
}
}
public setTokenStyleRules(tokenStylingRules: TokenStylingRule[]) {
this.tokenStylingRules = tokenStylingRules;
}
private addCustomTokenColors(customTokenColors: ITokenColorCustomizations) {
// Put the general customizations such as comments, strings, etc. first so that
// they can be overridden by specific customizations like "string.interpolated"
......@@ -489,9 +493,14 @@ function getScopeMatcher(rule: ITokenColorizationRule): Matcher<ProbeScope> {
return noMatch;
}
const matchers: MatcherWithPriority<ProbeScope>[] = [];
for (let rs of ruleScope) {
matchers.push(...createMatchers(rs, nameMatcher));
if (Array.isArray(ruleScope)) {
for (let rs of ruleScope) {
createMatchers(rs, nameMatcher, matchers);
}
} else {
createMatchers(ruleScope, nameMatcher, matchers);
}
if (matchers.length === 0) {
return noMatch;
}
......
......@@ -14,8 +14,7 @@ export interface Matcher<T> {
(matcherInput: T): number;
}
export function createMatchers<T>(selector: string, matchesName: (names: string[], matcherInput: T) => number): MatcherWithPriority<T>[] {
const results = <MatcherWithPriority<T>[]>[];
export function createMatchers<T>(selector: string, matchesName: (names: string[], matcherInput: T) => number, results: MatcherWithPriority<T>[]): void {
const tokenizer = newTokenizer(selector);
let token = tokenizer.next();
while (token !== null) {
......@@ -38,7 +37,6 @@ export function createMatchers<T>(selector: string, matchesName: (names: string[
}
token = tokenizer.next();
}
return results;
function parseOperand(): Matcher<T> | null {
if (token === '-') {
......
......@@ -6,7 +6,7 @@
import { ColorThemeData } from 'vs/workbench/services/themes/common/colorThemeData';
import * as assert from 'assert';
import { ITokenColorCustomizations } from 'vs/workbench/services/themes/common/workbenchThemeService';
import { Extensions as TokenStyleRegistryExtensions, ITokenClassificationRegistry, TokenStyle, comments, variables, types, functions, keywords, numbers, strings } from 'vs/platform/theme/common/tokenClassificationRegistry';
import { TokenStyle, comments, variables, types, functions, keywords, numbers, strings, getTokenClassificationRegistry, TokenStylingRule } from 'vs/platform/theme/common/tokenClassificationRegistry';
import { Color } from 'vs/base/common/color';
import { isString } from 'vs/base/common/types';
import { FileService } from 'vs/platform/files/common/fileService';
......@@ -15,26 +15,15 @@ import { DiskFileSystemProvider } from 'vs/platform/files/node/diskFileSystemPro
import { Schemas } from 'vs/base/common/network';
import { URI } from 'vs/base/common/uri';
import { getPathFromAmdModule } from 'vs/base/common/amd';
import { Registry } from 'vs/platform/registry/common/platform';
let tokenClassificationRegistry = Registry.as<ITokenClassificationRegistry>(TokenStyleRegistryExtensions.TokenClassificationContribution);
let tokenClassificationRegistry = getTokenClassificationRegistry();
const enum TokenStyleBits {
BOLD = 0x01,
UNDERLINE = 0x02,
ITALIC = 0x04
}
const unsetStyle = { bold: false, underline: false, italic: false };
function ts(foreground: string | undefined, styleFlags: number | undefined): TokenStyle {
function ts(foreground: string | undefined, styleFlags: { bold?: boolean; underline?: boolean; italic?: boolean } | undefined): TokenStyle {
const foregroundColor = isString(foreground) ? Color.fromHex(foreground) : undefined;
let bold, underline, italic;
if (styleFlags !== undefined) {
bold = (styleFlags & TokenStyleBits.BOLD) !== 0;
underline = (styleFlags & TokenStyleBits.UNDERLINE) !== 0;
italic = (styleFlags & TokenStyleBits.ITALIC) !== 0;
}
return new TokenStyle(foregroundColor, bold, underline, italic);
return new TokenStyle(foregroundColor, styleFlags && styleFlags.bold, styleFlags && styleFlags.underline, styleFlags && styleFlags.italic);
}
function tokenStyleAsString(ts: TokenStyle | undefined | null) {
......@@ -43,17 +32,25 @@ function tokenStyleAsString(ts: TokenStyle | undefined | null) {
}
let str = ts.foreground ? ts.foreground.toString() : 'no-foreground';
if (ts.bold !== undefined) {
str = ts.bold ? '+B' : '-B';
str += ts.bold ? '+B' : '-B';
}
if (ts.underline !== undefined) {
str = ts.underline ? '+U' : '-U';
str += ts.underline ? '+U' : '-U';
}
if (ts.italic !== undefined) {
str = ts.italic ? '+I' : '-I';
str += ts.italic ? '+I' : '-I';
}
return str;
}
function getTokenStyleRules(rules: [string, TokenStyle][]): TokenStylingRule[] {
return rules.map(e => {
const rule = tokenClassificationRegistry.getTokenStylingRule(e[0], e[1]);
assert.ok(rule);
return rule!;
});
}
function assertTokenStyle(actual: TokenStyle | undefined | null, expected: TokenStyle | undefined | null, message?: string) {
assert.equal(tokenStyleAsString(actual), tokenStyleAsString(expected), message);
}
......@@ -69,6 +66,8 @@ function assertTokenStyles(themeData: ColorThemeData, expected: { [qualifiedClas
}
suite('Themes - TokenStyleResolving', () => {
const fileService = new FileService(new NullLogService());
const diskFileSystemProvider = new DiskFileSystemProvider(new NullLogService());
fileService.registerProvider(Schemas.file, diskFileSystemProvider);
......@@ -83,13 +82,13 @@ suite('Themes - TokenStyleResolving', () => {
assert.equal(themeData.isLoaded, true);
assertTokenStyles(themeData, {
[comments]: ts('#75715E', 0),
[variables]: ts('#F8F8F2', 0),
[types]: ts('#A6E22E', TokenStyleBits.UNDERLINE),
[functions]: ts('#A6E22E', 0),
[strings]: ts('#E6DB74', 0),
[numbers]: ts('#AE81FF', 0),
[keywords]: ts('#F92672', 0)
[comments]: ts('#75715E', unsetStyle),
[variables]: ts('#F8F8F2', unsetStyle),
[types]: ts('#A6E22E', { underline: true, bold: false, italic: false }),
[functions]: ts('#A6E22E', unsetStyle),
[strings]: ts('#E6DB74', unsetStyle),
[numbers]: ts('#AE81FF', unsetStyle),
[keywords]: ts('#F92672', unsetStyle)
});
});
......@@ -103,13 +102,13 @@ suite('Themes - TokenStyleResolving', () => {
assert.equal(themeData.isLoaded, true);
assertTokenStyles(themeData, {
[comments]: ts('#6A9955', 0),
[variables]: ts('#9CDCFE', 0),
[types]: ts('#4EC9B0', 0),
[functions]: ts('#DCDCAA', 0),
[strings]: ts('#CE9178', 0),
[numbers]: ts('#B5CEA8', 0),
[keywords]: ts('#C586C0', 0)
[comments]: ts('#6A9955', unsetStyle),
[variables]: ts('#9CDCFE', unsetStyle),
[types]: ts('#4EC9B0', unsetStyle),
[functions]: ts('#DCDCAA', unsetStyle),
[strings]: ts('#CE9178', unsetStyle),
[numbers]: ts('#B5CEA8', unsetStyle),
[keywords]: ts('#C586C0', unsetStyle)
});
});
......@@ -123,13 +122,13 @@ suite('Themes - TokenStyleResolving', () => {
assert.equal(themeData.isLoaded, true);
assertTokenStyles(themeData, {
[comments]: ts('#008000', 0),
[variables]: ts('#000000', 0),
[types]: ts('#000000', 0),
[functions]: ts('#000000', 0),
[strings]: ts('#a31515', 0),
[numbers]: ts('#09885a', 0),
[keywords]: ts('#0000ff', 0)
[comments]: ts('#008000', unsetStyle),
[variables]: ts('#000000', unsetStyle),
[types]: ts('#000000', unsetStyle),
[functions]: ts('#000000', unsetStyle),
[strings]: ts('#a31515', unsetStyle),
[numbers]: ts('#09885a', unsetStyle),
[keywords]: ts('#0000ff', unsetStyle)
});
});
......@@ -143,13 +142,13 @@ suite('Themes - TokenStyleResolving', () => {
assert.equal(themeData.isLoaded, true);
assertTokenStyles(themeData, {
[comments]: ts('#7ca668', 0),
[variables]: ts('#9CDCFE', 0),
[types]: ts('#4EC9B0', 0),
[functions]: ts('#DCDCAA', 0),
[strings]: ts('#ce9178', 0),
[numbers]: ts('#b5cea8', 0),
[keywords]: ts('#C586C0', 0)
[comments]: ts('#7ca668', unsetStyle),
[variables]: ts('#9CDCFE', unsetStyle),
[types]: ts('#4EC9B0', unsetStyle),
[functions]: ts('#DCDCAA', unsetStyle),
[strings]: ts('#ce9178', unsetStyle),
[numbers]: ts('#b5cea8', unsetStyle),
[keywords]: ts('#C586C0', unsetStyle)
});
});
......@@ -163,13 +162,13 @@ suite('Themes - TokenStyleResolving', () => {
assert.equal(themeData.isLoaded, true);
assertTokenStyles(themeData, {
[comments]: ts('#a57a4c', 0),
[variables]: ts('#dc3958', 0),
[types]: ts('#f06431', 0),
[functions]: ts('#8ab1b0', 0),
[strings]: ts('#889b4a', 0),
[numbers]: ts('#f79a32', 0),
[keywords]: ts('#98676a', 0)
[comments]: ts('#a57a4c', unsetStyle),
[variables]: ts('#dc3958', unsetStyle),
[types]: ts('#f06431', unsetStyle),
[functions]: ts('#8ab1b0', unsetStyle),
[strings]: ts('#889b4a', unsetStyle),
[numbers]: ts('#f79a32', unsetStyle),
[keywords]: ts('#98676a', unsetStyle)
});
});
......@@ -183,13 +182,13 @@ suite('Themes - TokenStyleResolving', () => {
assert.equal(themeData.isLoaded, true);
assertTokenStyles(themeData, {
[comments]: ts('#384887', 0),
[variables]: ts('#6688cc', 0),
[types]: ts('#ffeebb', TokenStyleBits.UNDERLINE),
[functions]: ts('#ddbb88', 0),
[strings]: ts('#22aa44', 0),
[numbers]: ts('#f280d0', 0),
[keywords]: ts('#225588', 0)
[comments]: ts('#384887', unsetStyle),
[variables]: ts('#6688cc', unsetStyle),
[types]: ts('#ffeebb', { underline: true, bold: false, italic: false }),
[functions]: ts('#ddbb88', unsetStyle),
[strings]: ts('#22aa44', unsetStyle),
[numbers]: ts('#f280d0', unsetStyle),
[keywords]: ts('#225588', unsetStyle)
});
});
......@@ -242,34 +241,52 @@ suite('Themes - TokenStyleResolving', () => {
let defaultTokenStyle = undefined;
tokenStyle = themeData.resolveScopes([['variable']]);
assertTokenStyle(tokenStyle, ts('#F8F8F2', 0), 'variable');
assertTokenStyle(tokenStyle, ts('#F8F8F2', unsetStyle), 'variable');
tokenStyle = themeData.resolveScopes([['keyword.operator']]);
assertTokenStyle(tokenStyle, ts('#F92672', TokenStyleBits.ITALIC | TokenStyleBits.BOLD | TokenStyleBits.UNDERLINE), 'keyword');
assertTokenStyle(tokenStyle, ts('#F92672', { italic: true, bold: true, underline: true }), 'keyword');
tokenStyle = themeData.resolveScopes([['keyword']]);
assertTokenStyle(tokenStyle, defaultTokenStyle, 'keyword');
tokenStyle = themeData.resolveScopes([['keyword.operator']]);
assertTokenStyle(tokenStyle, ts('#F92672', TokenStyleBits.ITALIC | TokenStyleBits.BOLD | TokenStyleBits.UNDERLINE), 'keyword.operator');
assertTokenStyle(tokenStyle, ts('#F92672', { italic: true, bold: true, underline: true }), 'keyword.operator');
tokenStyle = themeData.resolveScopes([['keyword.operators']]);
assertTokenStyle(tokenStyle, defaultTokenStyle, 'keyword.operators');
tokenStyle = themeData.resolveScopes([['storage']]);
assertTokenStyle(tokenStyle, ts('#F92672', TokenStyleBits.ITALIC), 'storage');
assertTokenStyle(tokenStyle, ts('#F92672', { italic: true, underline: false, bold: false }), 'storage');
tokenStyle = themeData.resolveScopes([['storage.type']]);
assertTokenStyle(tokenStyle, ts('#66D9EF', TokenStyleBits.ITALIC), 'storage.type');
assertTokenStyle(tokenStyle, ts('#66D9EF', { italic: true, underline: false, bold: false }), 'storage.type');
tokenStyle = themeData.resolveScopes([['entity.name.class']]);
assertTokenStyle(tokenStyle, ts('#A6E22E', TokenStyleBits.UNDERLINE), 'entity.name.class');
assertTokenStyle(tokenStyle, ts('#A6E22E', { underline: true, italic: false, bold: false }), 'entity.name.class');
tokenStyle = themeData.resolveScopes([['meta.structure.dictionary.json', 'string.quoted.double.json']]);
assertTokenStyle(tokenStyle, ts('#66D9EF', undefined), 'json property');
tokenStyle = themeData.resolveScopes([['keyword'], ['storage.type'], ['entity.name.class']]);
assertTokenStyle(tokenStyle, ts('#66D9EF', TokenStyleBits.ITALIC), 'storage.type');
assertTokenStyle(tokenStyle, ts('#66D9EF', { italic: true, underline: false, bold: false }), 'storage.type');
});
test('rule matching', async () => {
const themeData = ColorThemeData.createLoadedEmptyTheme('test', 'test');
themeData.setCustomColors({ 'editor.foreground': '#000000' });
themeData.setTokenStyleRules(getTokenStyleRules([
['types', ts('#ff0000', undefined)],
['classes', ts('#0000ff', undefined)],
['*.static', ts(undefined, { bold: true })],
['*.declaration', ts(undefined, { italic: true })]
]));
assertTokenStyles(themeData, {
'types': ts('#ff0000', unsetStyle),
'types.static': ts('#ff0000', { bold: true, italic: false, underline: false }),
'types.static.declaration': ts('#ff0000', { bold: true, italic: true, underline: false })
});
});
});
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册