More fixes to tokens adjusting

上级 0fae04b8
......@@ -121,7 +121,7 @@ export interface IEncodedTokens {
getMetadata(tokenIndex: number): number;
clear(): void;
acceptDeleteRange(startDeltaLine: number, startCharacter: number, endDeltaLine: number, endCharacter: number): void;
acceptDeleteRange(horizontalShiftForFirstLineTokens: number, startDeltaLine: number, startCharacter: number, endDeltaLine: number, endCharacter: number): void;
acceptInsertText(deltaLine: number, character: number, eolCount: number, firstLineLength: number, lastLineLength: number, firstCharCode: number): void;
}
......@@ -173,7 +173,7 @@ export class SparseEncodedTokens implements IEncodedTokens {
this._tokenCount = 0;
}
public acceptDeleteRange(startDeltaLine: number, startCharacter: number, endDeltaLine: number, endCharacter: number): void {
public acceptDeleteRange(horizontalShiftForFirstLineTokens: number, startDeltaLine: number, startCharacter: number, endDeltaLine: number, endCharacter: number): void {
// This is a bit complex, here are the cases I used to think about this:
//
// 1. The token starts before the deletion range
......@@ -292,14 +292,13 @@ export class SparseEncodedTokens implements IEncodedTokens {
tokenDeltaLine -= deletedLineCount;
} else if (tokenDeltaLine === endDeltaLine && tokenStartCharacter >= endCharacter) {
// 4. (continued) The token starts after the deletion range, on the last line where a deletion occurs
tokenDeltaLine -= deletedLineCount;
if (deletedLineCount === 0) {
tokenStartCharacter -= (endCharacter - startCharacter);
tokenEndCharacter -= (endCharacter - startCharacter);
} else {
tokenStartCharacter -= endCharacter;
tokenEndCharacter -= endCharacter;
if (horizontalShiftForFirstLineTokens && tokenDeltaLine === 0) {
tokenStartCharacter += horizontalShiftForFirstLineTokens;
tokenEndCharacter += horizontalShiftForFirstLineTokens;
}
tokenDeltaLine -= deletedLineCount;
tokenStartCharacter -= (endCharacter - startCharacter);
tokenEndCharacter -= (endCharacter - startCharacter);
} else {
throw new Error(`Not possible!`);
}
......@@ -378,8 +377,8 @@ export class SparseEncodedTokens implements IEncodedTokens {
}
}
// => the token must move and keep its size constant
tokenDeltaLine += eolCount;
if (tokenDeltaLine === deltaLine) {
tokenDeltaLine += eolCount;
// this token is on the line where the insertion is taking place
if (eolCount === 0) {
tokenStartCharacter += firstLineLength;
......@@ -389,6 +388,8 @@ export class SparseEncodedTokens implements IEncodedTokens {
tokenStartCharacter = lastLineLength + (tokenStartCharacter - character);
tokenEndCharacter = tokenStartCharacter + tokenLength;
}
} else {
tokenDeltaLine += eolCount;
}
}
......@@ -532,9 +533,9 @@ export class MultilineTokens2 {
const deletedBefore = -firstLineIndex;
this.startLineNumber -= deletedBefore;
this.tokens.acceptDeleteRange(0, 0, lastLineIndex, range.endColumn - 1);
this.tokens.acceptDeleteRange(range.startColumn - 1, 0, 0, lastLineIndex, range.endColumn - 1);
} else {
this.tokens.acceptDeleteRange(firstLineIndex, range.startColumn - 1, lastLineIndex, range.endColumn - 1);
this.tokens.acceptDeleteRange(0, firstLineIndex, range.startColumn - 1, lastLineIndex, range.endColumn - 1);
}
}
......
......@@ -14,9 +14,10 @@ suite('TokensStore', () => {
const SEMANTIC_COLOR = 5;
function parseTokensState(state: string[]): { text: string; tokens: number[]; } {
function parseTokensState(state: string[]): { text: string; tokens: MultilineTokens2; } {
let text: string[] = [];
let tokens: number[] = [];
let baseLine = 1;
for (let i = 0; i < state.length; i++) {
const line = state[i];
......@@ -43,7 +44,10 @@ suite('TokensStore', () => {
const tokenLength = secondPipeOffset - firstPipeOffset - 1;
const metadata = (SEMANTIC_COLOR << MetadataConsts.FOREGROUND_OFFSET);
tokens.push(i, tokenStartCharacter, tokenStartCharacter + tokenLength, metadata);
if (tokens.length === 0) {
baseLine = i + 1;
}
tokens.push(i + 1 - baseLine, tokenStartCharacter, tokenStartCharacter + tokenLength, metadata);
lineText += line.substr(firstPipeOffset + 1, tokenLength);
startOffset = secondPipeOffset + 1;
......@@ -56,7 +60,7 @@ suite('TokensStore', () => {
return {
text: text.join('\n'),
tokens: tokens
tokens: new MultilineTokens2(baseLine, new SparseEncodedTokens(new Uint32Array(tokens)))
};
}
......@@ -90,7 +94,7 @@ suite('TokensStore', () => {
function testTokensAdjustment(rawInitialState: string[], edits: IIdentifiedSingleEditOperation[], rawFinalState: string[]) {
const initialState = parseTokensState(rawInitialState);
const model = TextModel.createFromString(initialState.text);
model.setSemanticTokens([new MultilineTokens2(1, new SparseEncodedTokens(new Uint32Array(initialState.tokens)))]);
model.setSemanticTokens([initialState.tokens]);
model.applyEdits(edits);
......@@ -116,4 +120,49 @@ suite('TokensStore', () => {
);
});
test('deleting a newline', () => {
testTokensAdjustment(
[
`import { |URI| } from 'vs/base/common/uri';`,
`const foo = |URI|.parse('hey');`
],
[
{ range: new Range(1, 42, 2, 1), text: '' }
],
[
`import { |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
]
);
});
test('inserting a newline', () => {
testTokensAdjustment(
[
`import { |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
],
[
{ range: new Range(1, 42, 1, 42), text: '\n' }
],
[
`import { |URI| } from 'vs/base/common/uri';`,
`const foo = |URI|.parse('hey');`
]
);
});
test('deleting a newline 2', () => {
testTokensAdjustment(
[
`import { `,
` |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
],
[
{ range: new Range(1, 10, 2, 5), text: '' }
],
[
`import { |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
]
);
});
});
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册