提交 756469a6 编写于 作者: A Allison Chou

Code review feedback

上级 1cad71e8
......@@ -11,7 +11,6 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.VisualStudio.LanguageServer.Protocol;
using Roslyn.Utilities;
using LSP = Microsoft.VisualStudio.LanguageServer.Protocol;
......@@ -29,7 +28,7 @@ internal class SemanticTokensCache
/// <summary>
/// Maps a LSP token type to its respective index recognized by LSP.
/// </summary>
public static readonly Dictionary<string, int> TokenTypesToIndex;
public static readonly Dictionary<string, int> TokenTypeToIndex;
/// <summary>
/// Number of cached token sets we store per document. Must be >= 1.
......@@ -37,7 +36,8 @@ internal class SemanticTokensCache
private readonly int _maxCachesPerDoc = 5;
/// <summary>
/// The next resultId available to use.
/// The next resultId available to use. Atomically incremented with Interlocked,
/// so this doesn't need to be protected by _semaphore.
/// </summary>
private long _nextResultId;
......@@ -59,10 +59,10 @@ internal class SemanticTokensCache
static SemanticTokensCache()
{
// Computes the mapping between a LSP token type and its respective index recognized by LSP.
TokenTypesToIndex = new Dictionary<string, int>();
TokenTypeToIndex = new Dictionary<string, int>();
for (var i = 0; i < LSP.SemanticTokenTypes.AllTypes.Count; i++)
{
TokenTypesToIndex.Add(LSP.SemanticTokenTypes.AllTypes[i], i);
TokenTypeToIndex.Add(LSP.SemanticTokenTypes.AllTypes[i], i);
}
}
......@@ -81,12 +81,7 @@ public SemanticTokensCache()
LSP.SemanticTokens tokens,
CancellationToken cancellationToken)
{
// If the resultId of the semantic tokens is null, don't cache anything since we'll
// be unable to retrieve the results later.
if (tokens.ResultId == null)
{
return;
}
Contract.ThrowIfNull(tokens.ResultId);
using (await _semaphore.DisposableWaitAsync(cancellationToken).ConfigureAwait(false))
{
......
......@@ -7,6 +7,7 @@
using System;
using System.Collections.Generic;
using System.Composition;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
......@@ -47,7 +48,7 @@ internal class SemanticTokensEditsHandler : AbstractRequestHandler<LSP.SemanticT
// Even though we want to ultimately pass edits back to LSP, we still need to compute all semantic tokens,
// both for caching purposes and in order to have a baseline comparison when computing the edits.
var newSemanticTokensData = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
request.TextDocument, context.ClientName, SolutionProvider, SemanticTokensCache.TokenTypesToIndex,
request.TextDocument, context.ClientName, SolutionProvider, SemanticTokensCache.TokenTypeToIndex,
range: null, cancellationToken).ConfigureAwait(false);
Contract.ThrowIfNull(newSemanticTokensData);
......@@ -67,9 +68,6 @@ internal class SemanticTokensEditsHandler : AbstractRequestHandler<LSP.SemanticT
return newSemanticTokens;
}
// The Data property is always populated on the server side, so it should never be null.
Contract.ThrowIfNull(oldSemanticTokensData);
var edits = new SemanticTokensEdits
{
Edits = ComputeSemanticTokensEdits(oldSemanticTokensData, newSemanticTokensData),
......@@ -109,7 +107,7 @@ private static SemanticTokensEdit[] ConvertToSemanticTokenEdits(SemanticToken[]
// may have both an insertion and deletion, in which case we can combine the two into a
// single update. We use the dictionary below to keep track of whether an index contains
// an insertion, deletion, or both.
var indexToEditKinds = new Dictionary<int, SemanticTokenEditKind>();
using var _ = PooledDictionary<int, SemanticTokenEditKind>.GetInstance(out var indexToEditKinds);
foreach (var edit in edits)
{
......@@ -119,13 +117,11 @@ private static SemanticTokensEdit[] ConvertToSemanticTokenEdits(SemanticToken[]
{
case EditKind.Insert:
indexToEditKinds.TryGetValue(edit.NewIndex, out var editKindWithoutInsert);
indexToEditKinds[edit.NewIndex] = editKindWithoutInsert |= SemanticTokenEditKind.Insert;
indexToEditKinds[edit.NewIndex] = editKindWithoutInsert == default ? SemanticTokenEditKind.Insert : SemanticTokenEditKind.Update;
break;
case EditKind.Delete:
indexToEditKinds.TryGetValue(edit.OldIndex, out var editKindWithoutDelete);
indexToEditKinds[edit.OldIndex] = editKindWithoutDelete |= SemanticTokenEditKind.Delete;
break;
default:
indexToEditKinds[edit.OldIndex] = editKindWithoutDelete == default ? SemanticTokenEditKind.Delete : SemanticTokenEditKind.Update;
break;
}
}
......@@ -137,101 +133,92 @@ private static SemanticTokensEdit[] ConvertToSemanticTokenEdits(SemanticToken[]
SemanticToken[] newGroupedSemanticTokens,
Dictionary<int, SemanticTokenEditKind> indexToEditKinds)
{
// This method combines the edits into the minimal possible edits.
// For example, if an index contains both an insertion and deletion, we can combine the two
// This method combines the edits into the minimal possible edits (for the most part).
// For example, if an index contains both an insertion and deletion, we combine the two
// edits into one.
// We can also combine edits if we have consecutive edits of certain types:
// Delete->Delete, Insert->Insert, Update->Update, Update->Insert, and Update->Delete.
// Note for the Update->Insert and Update->Delete cases, any further edits we combine can
// only be an Insert or Delete, respectively.
// We also combine edits if we have consecutive edits of the same types, i.e.
// Delete->Delete, Insert->Insert, and Update->Update.
// Technically, we could combine Update->Insert, and Update->Delete, but those cases have
// special rules and would complicate the logic. They also generally do not result in a
// huge reduction in the total number of edits, so we leave them out for now.
using var _ = ArrayBuilder<LSP.SemanticTokensEdit>.GetInstance(out var semanticTokensEdits);
// We sort the edit kinds by index since we need to know what kind of edits surround
// a given index in order to potentially combine them into one edit.
var editIndexes = indexToEditKinds.Keys.ToArray();
Array.Sort(editIndexes);
var editIndices = indexToEditKinds.Keys.ToArray();
// The indices in indexToEdit kinds are not guaranteed to be in chronological order when we
// extract them from the dictionary. We must sort the edit kinds by index since we need to
// know what kind of edits surround a given index in order to potentially combine them into
// one edit.
Array.Sort(editIndices);
for (var i = 0; i < editIndexes.Length; i++)
// Example to give clarity to orderedEditNumber and orderedTokenNumber variables defined below:
// Non-grouped semantic tokens: 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14
// orderedEditNumber: 0 1
// orderedTokenNumber: 0 1 2
for (var orderedEditNumber = 0; orderedEditNumber < editIndices.Length; orderedEditNumber++)
{
var initialEditIndex = editIndexes[i];
var initialEditKind = indexToEditKinds[initialEditIndex];
var orderedTokenNumber = editIndices[orderedEditNumber];
var initialEditKind = indexToEditKinds[orderedTokenNumber];
if (initialEditKind.HasFlag(SemanticTokenEditKind.Insert) && initialEditKind.HasFlag(SemanticTokenEditKind.Delete))
if (initialEditKind == SemanticTokenEditKind.Update)
{
i = AddInsertionDeletionEdit(
newGroupedSemanticTokens, indexToEditKinds, semanticTokensEdits, editIndexes, i, initialEditIndex);
orderedEditNumber = AddUpdateEdit(
newGroupedSemanticTokens, indexToEditKinds, semanticTokensEdits, editIndices, orderedEditNumber,
groupedSemanticToken: newGroupedSemanticTokens[orderedTokenNumber],
editStartPosition: orderedTokenNumber * 5);
}
else if (initialEditKind.HasFlag(SemanticTokenEditKind.Insert))
else if (initialEditKind == SemanticTokenEditKind.Insert)
{
i = AddInsertionEdit(
newGroupedSemanticTokens, indexToEditKinds, semanticTokensEdits, editIndexes, i, initialEditIndex);
orderedEditNumber = AddInsertionEdit(
newGroupedSemanticTokens, indexToEditKinds, semanticTokensEdits, editIndices, orderedEditNumber,
groupedSemanticToken: newGroupedSemanticTokens[orderedTokenNumber],
editStartPosition: orderedTokenNumber * 5);
}
else
{
i = AddDeletionEdit(
indexToEditKinds, semanticTokensEdits, editIndexes, i, initialEditIndex);
Contract.ThrowIfFalse(initialEditKind == SemanticTokenEditKind.Delete);
orderedEditNumber = AddDeletionEdit(
indexToEditKinds, semanticTokensEdits, editIndices, orderedEditNumber,
editStartPosition: orderedTokenNumber * 5);
}
}
return semanticTokensEdits.ToArray();
static int AddInsertionDeletionEdit(
// Local functions
static int AddUpdateEdit(
SemanticToken[] newGroupedSemanticTokens,
Dictionary<int, SemanticTokenEditKind> indexToEditKinds,
ArrayBuilder<SemanticTokensEdit> semanticTokensEdits,
int[] editIndexes,
int[] editIndices,
int i,
int initialEditIndex)
SemanticToken groupedSemanticToken,
int editStartPosition)
{
var deleteCount = 5;
var _ = ArrayBuilder<int>.GetInstance(out var tokensToInsert);
tokensToInsert.AddRange(newGroupedSemanticTokens[initialEditIndex].ToArray());
tokensToInsert.AddRange(groupedSemanticToken.ConvertToArray());
// An "update" (i.e. a dual insertion/deletion) can be combined with an update,
// deletion, or insertion that directly follows it. If combined with an insertion
// or deletion, only that type is allowed in the combined edit afterwards.
var editKind = SemanticTokenEditKind.None;
// For simplicitly, we only allow an "update" (i.e. a dual insertion/deletion) to be
// combined with other updates.
// To continue combining edits, we need to ensure:
// 1) There is an edit following the current edit
// 2) The current edit and next edit involve tokens that are located right next to
// each other in the file.
while (i + 1 < editIndexes.Length && editIndexes[i + 1] == editIndexes[i] + 1)
// The two above criteria are also true for the similar loops in the local functions below,
// AddInsertionEdit and AddDeletionEdit.
while (i + 1 < editIndices.Length && indexToEditKinds[editIndices[i + 1]] == SemanticTokenEditKind.Update &&
editIndices[i + 1] == editIndices[i] + 1)
{
var currentEditKind = indexToEditKinds[editIndexes[i + 1]];
var isCurrentEditKindUpdate = currentEditKind.HasFlag(SemanticTokenEditKind.Insert) &&
currentEditKind.HasFlag(SemanticTokenEditKind.Delete);
// If the current edit is combined with an insertion or deletion, only that type is allowed
// in the combined edit afterwards. The check below will return true if this is not the case.
if (editKind != SemanticTokenEditKind.None && currentEditKind != editKind)
{
break;
}
if (!isCurrentEditKindUpdate)
{
// From now on, all future edits combined with the current edit must be of the same edit
// kind as the current edit.
editKind = currentEditKind;
}
if (isCurrentEditKindUpdate || currentEditKind == SemanticTokenEditKind.Insert)
{
tokensToInsert.AddRange(newGroupedSemanticTokens[editIndexes[i + 1]].ToArray());
}
if (isCurrentEditKindUpdate || currentEditKind == SemanticTokenEditKind.Delete)
{
deleteCount += 5;
}
tokensToInsert.AddRange(newGroupedSemanticTokens[editIndices[i + 1]].ConvertToArray());
deleteCount += 5;
i++;
}
semanticTokensEdits.Add(
GenerateEdit(start: initialEditIndex * 5, deleteCount: deleteCount, data: tokensToInsert.ToArray()));
GenerateEdit(start: editStartPosition, deleteCount: deleteCount, data: tokensToInsert.ToArray()));
return i;
}
......@@ -240,45 +227,46 @@ private static SemanticTokensEdit[] ConvertToSemanticTokenEdits(SemanticToken[]
SemanticToken[] newGroupedSemanticTokens,
Dictionary<int, SemanticTokenEditKind> indexToEditKinds,
ArrayBuilder<SemanticTokensEdit> semanticTokensEdits,
int[] editIndexes,
int[] editIndices,
int i,
int initialEditIndex)
SemanticToken groupedSemanticToken,
int editStartPosition)
{
var _ = ArrayBuilder<int>.GetInstance(out var tokensToInsert);
tokensToInsert.AddRange(newGroupedSemanticTokens[initialEditIndex].ToArray());
tokensToInsert.AddRange(groupedSemanticToken.ConvertToArray());
// An insert can only be combined with other inserts that directly follow it.
while (i + 1 < editIndexes.Length && indexToEditKinds[editIndexes[i + 1]] == SemanticTokenEditKind.Insert &&
editIndexes[i + 1] == editIndexes[i] + 1)
while (i + 1 < editIndices.Length && indexToEditKinds[editIndices[i + 1]] == SemanticTokenEditKind.Insert &&
editIndices[i + 1] == editIndices[i] + 1)
{
tokensToInsert.AddRange(newGroupedSemanticTokens[editIndexes[i + 1]].ToArray());
tokensToInsert.AddRange(newGroupedSemanticTokens[editIndices[i + 1]].ConvertToArray());
i++;
}
semanticTokensEdits.Add(
GenerateEdit(start: initialEditIndex * 5, deleteCount: 0, data: tokensToInsert.ToArray()));
GenerateEdit(start: editStartPosition, deleteCount: 0, data: tokensToInsert.ToArray()));
return i;
}
static int AddDeletionEdit(
Dictionary<int, SemanticTokenEditKind> indexToEditKinds,
ArrayBuilder<SemanticTokensEdit> semanticTokensEdits,
int[] editIndexes,
int[] editIndices,
int i,
int initialEditIndex)
int editStartPosition)
{
var deleteCount = 5;
// A deletion can only be combined with other deletions that directly follow it.
while (i + 1 < editIndexes.Length && indexToEditKinds[editIndexes[i + 1]] == SemanticTokenEditKind.Delete &&
editIndexes[i + 1] == editIndexes[i] + 1)
while (i + 1 < editIndices.Length && indexToEditKinds[editIndices[i + 1]] == SemanticTokenEditKind.Delete &&
editIndices[i + 1] == editIndices[i] + 1)
{
deleteCount += 5;
i++;
}
semanticTokensEdits.Add(
GenerateEdit(start: initialEditIndex * 5, deleteCount: deleteCount, data: Array.Empty<int>()));
GenerateEdit(start: editStartPosition, deleteCount: deleteCount, data: Array.Empty<int>()));
return i;
}
}
......@@ -324,7 +312,9 @@ private sealed class LongestCommonSemanticTokensSubsequence : LongestCommonSubse
/// <summary>
/// Stores the values that make up the LSP representation of an individual semantic token.
/// </summary>
private readonly struct SemanticToken
#pragma warning disable CA1067 // Override Object.Equals(object) when implementing IEquatable<T>
private readonly struct SemanticToken : IEquatable<SemanticToken>
#pragma warning restore CA1067 // Override Object.Equals(object) when implementing IEquatable<T>
{
private readonly int _deltaLine;
private readonly int _deltaStartCharacter;
......@@ -341,37 +331,27 @@ public SemanticToken(int deltaLine, int deltaStartCharacter, int length, int tok
_tokenModifiers = tokenModifiers;
}
public int[] ToArray()
public int[] ConvertToArray()
{
return new int[] { _deltaLine, _deltaStartCharacter, _length, _tokenType, _tokenModifiers };
}
public override bool Equals(object? obj)
{
return obj is SemanticToken token &&
_deltaLine == token._deltaLine &&
_deltaStartCharacter == token._deltaStartCharacter &&
_length == token._length &&
_tokenType == token._tokenType &&
_tokenModifiers == token._tokenModifiers;
}
public override int GetHashCode()
public bool Equals([AllowNull] SemanticToken otherToken)
{
return Hash.Combine(_deltaLine.GetHashCode(),
Hash.Combine(_deltaStartCharacter.GetHashCode(),
Hash.Combine(_length.GetHashCode(),
Hash.Combine(_tokenType.GetHashCode(),
_tokenModifiers.GetHashCode()))));
return _deltaLine == otherToken._deltaLine &&
_deltaStartCharacter == otherToken._deltaStartCharacter &&
_length == otherToken._length &&
_tokenType == otherToken._tokenType &&
_tokenModifiers == otherToken._tokenModifiers;
}
}
[Flags]
private enum SemanticTokenEditKind
{
None = 0,
Insert = 1,
Delete = 2,
Update = 3
}
}
}
......@@ -47,7 +47,7 @@ internal class SemanticTokensHandler : AbstractRequestHandler<LSP.SemanticTokens
Contract.ThrowIfNull(request.TextDocument);
var resultId = _tokensCache.GetNextResultId();
var tokensData = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
request.TextDocument, context.ClientName, SolutionProvider, SemanticTokensCache.TokenTypesToIndex,
request.TextDocument, context.ClientName, SolutionProvider, SemanticTokensCache.TokenTypeToIndex,
range: null, cancellationToken).ConfigureAwait(false);
var tokens = new LSP.SemanticTokens { ResultId = resultId, Data = tokensData };
......
......@@ -4,9 +4,7 @@
#nullable enable
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
......@@ -114,29 +112,31 @@ internal class SemanticTokensHelpers
var classifiedSpans = await Classifier.GetClassifiedSpansAsync(document, textSpan, cancellationToken).ConfigureAwait(false);
Contract.ThrowIfNull(classifiedSpans);
// A TextSpan can be associated with multiple ClassifiedSpans (i.e. if a token has
// modifiers). We perform this group by since LSP requires that each token is
// reported together with all its modifiers.
var groupedSpans = classifiedSpans.GroupBy(s => s.TextSpan);
// TO-DO: We should implement support for streaming once this LSP bug is fixed:
// https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1132601
return ComputeTokens(text.Lines, groupedSpans, tokenTypesToIndex);
return ComputeTokens(text.Lines, classifiedSpans.ToArray(), tokenTypesToIndex);
}
private static int[] ComputeTokens(
TextLineCollection lines,
IEnumerable<IGrouping<TextSpan, ClassifiedSpan>> groupedSpans,
ClassifiedSpan[] classifiedSpans,
Dictionary<string, int> tokenTypesToIndex)
{
using var _ = ArrayBuilder<int>.GetInstance(out var data);
// A TextSpan can be associated with multiple ClassifiedSpans (i.e. if a token has
// modifiers). We perform this grouping since LSP requires that each token is
// reported together with all its modifiers.
using var _1 = PooledDictionary<TextSpan, List<string>>.GetInstance(out var textSpanToClassificationTypes);
GroupClassificationTypesByTextSpan(classifiedSpans, textSpanToClassificationTypes);
using var _2 = ArrayBuilder<int>.GetInstance(out var data);
var lastLineNumber = 0;
var lastStartCharacter = 0;
foreach (var span in groupedSpans)
foreach (var textSpanToClassificationType in textSpanToClassificationTypes)
{
ComputeNextToken(lines, ref lastLineNumber, ref lastStartCharacter, span, tokenTypesToIndex,
ComputeNextToken(lines, ref lastLineNumber, ref lastStartCharacter, textSpanToClassificationType.Key,
textSpanToClassificationType.Value, tokenTypesToIndex,
out var deltaLine, out var startCharacterDelta, out var tokenLength,
out var tokenType, out var tokenModifiers);
......@@ -144,13 +144,32 @@ internal class SemanticTokensHelpers
}
return data.ToArray();
// Local functions
static void GroupClassificationTypesByTextSpan(
ClassifiedSpan[] classifiedSpans,
PooledDictionary<TextSpan, List<string>> textSpanToClassificationTypes)
{
foreach (var classifiedSpan in classifiedSpans)
{
if (!textSpanToClassificationTypes.TryGetValue(classifiedSpan.TextSpan, out var classificationTypes))
{
textSpanToClassificationTypes.Add(classifiedSpan.TextSpan, new List<string> { classifiedSpan.ClassificationType });
}
else
{
classificationTypes.Add(classifiedSpan.ClassificationType);
}
}
}
}
private static void ComputeNextToken(
TextLineCollection lines,
ref int lastLineNumber,
ref int lastStartCharacter,
IGrouping<TextSpan, ClassifiedSpan> textSpanToClassifiedSpans,
TextSpan textSpan,
List<string> classificationTypes,
Dictionary<string, int> tokenTypesToIndex,
// Out params
out int deltaLineOut,
......@@ -166,7 +185,6 @@ internal class SemanticTokensHelpers
// 4. Token type (index) - looked up in SemanticTokensLegend.tokenTypes
// 5. Token modifiers - each set bit will be looked up in SemanticTokensLegend.tokenModifiers
var textSpan = textSpanToClassifiedSpans.Key;
var linePosition = lines.GetLinePositionSpan(textSpan).Start;
var lineNumber = linePosition.Line;
var startCharacter = linePosition.Character;
......@@ -190,13 +208,13 @@ internal class SemanticTokensHelpers
// modifiers are added in the future.
var modifierBits = TokenModifiers.None;
var tokenTypeIndex = 0;
foreach (var classifiedSpan in textSpanToClassifiedSpans)
foreach (var classificationType in classificationTypes)
{
if (classifiedSpan.ClassificationType != ClassificationTypeNames.StaticSymbol)
if (classificationType != ClassificationTypeNames.StaticSymbol)
{
// 4. Token type - looked up in SemanticTokensLegend.tokenTypes (language server defined mapping
// from integer to LSP token types).
tokenTypeIndex = GetTokenTypeIndex(classifiedSpan, tokenTypesToIndex);
tokenTypeIndex = GetTokenTypeIndex(classificationType, tokenTypesToIndex);
}
else
{
......@@ -215,9 +233,9 @@ internal class SemanticTokensHelpers
tokenModifiersOut = (int)modifierBits;
}
private static int GetTokenTypeIndex(ClassifiedSpan tokenTypeClassifiedSpan, Dictionary<string, int> tokenTypesToIndex)
private static int GetTokenTypeIndex(string classificationType, Dictionary<string, int> tokenTypesToIndex)
{
s_classificationTypeToSemanticTokenTypeMap.TryGetValue(tokenTypeClassifiedSpan.ClassificationType, out var tokenTypeStr);
s_classificationTypeToSemanticTokenTypeMap.TryGetValue(classificationType, out var tokenTypeStr);
Contract.ThrowIfNull(tokenTypeStr);
if (!tokenTypesToIndex.TryGetValue(tokenTypeStr, out var tokenTypeIndex))
......
......@@ -50,7 +50,7 @@ internal class SemanticTokensRangeHandler : AbstractRequestHandler<LSP.SemanticT
// document request, so caching range results is unnecessary since the whole document
// handler will cache the results anyway.
var tokensData = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
request.TextDocument, context.ClientName, SolutionProvider, SemanticTokensCache.TokenTypesToIndex,
request.TextDocument, context.ClientName, SolutionProvider, SemanticTokensCache.TokenTypeToIndex,
request.Range, cancellationToken).ConfigureAwait(false);
return new LSP.SemanticTokens { ResultId = resultId, Data = tokensData };
}
......
......@@ -24,12 +24,12 @@ public class SemanticTokensEditsTests : AbstractSemanticTokensTests
private static int[] StandardCase()
=> new int[] {
// Line | Char | Len | Token type | Modifier
0, 0, 10, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
0, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
};
/*
......@@ -39,7 +39,7 @@ private static int[] StandardCase()
private static int[] SingleLineCase()
=> new int[] {
// Line | Char | Len | Token type | Modifier
0, 0, 10, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
0, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
};
[Fact]
......@@ -113,7 +113,7 @@ static class C { }
workspace, locations["caret"].First(), StandardCase(), previousResultId: "1", cache);
var expectedEdit = SemanticTokensEditsHandler.GenerateEdit(
start: 30, deleteCount: 0, data: new int[] { 1, 0, 10, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Comment], 0 });
start: 30, deleteCount: 0, data: new int[] { 1, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0 });
Assert.Equal(expectedEdit, ((LSP.SemanticTokensEdits)results).Edits.First());
Assert.Equal("2", ((LSP.SemanticTokensEdits)results).ResultId);
......@@ -144,8 +144,8 @@ public async Task TestGetSemanticTokensEdits_ReturnMinimalEdits()
start: 0, deleteCount: 5,
data: new int[]
{
0, 0, 5, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0,
1, 0, 10, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Comment], 0
0, 0, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0,
1, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0
});
Assert.Equal(expectedEdit, ((LSP.SemanticTokensEdits)results).Edits[0]);
......
......@@ -33,11 +33,11 @@ static class C { }
Data = new int[]
{
// Line | Char | Len | Token type | Modifier
1, 0, 6, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
},
ResultId = "1"
};
......
......@@ -33,12 +33,12 @@ static class C { }";
Data = new int[]
{
// Line | Char | Len | Token type | Modifier
0, 0, 10, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
0, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
},
ResultId = "1"
};
......@@ -70,11 +70,11 @@ static class C { }
Data = new int[]
{
// Line | Char | Len | Token type | Modifier
1, 0, 6, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
},
ResultId = "1"
};
......@@ -89,12 +89,12 @@ static class C { }
Data = new int[]
{
// Line | Char | Len | Token type | Modifier
0, 0, 10, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
0, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
},
ResultId = "2"
};
......@@ -127,12 +127,12 @@ static class C { }
Data = new int[]
{
// Line | Char | Len | Token type | Modifier
1, 0, 10, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypesToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
1, 0, 10, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Comment], 0, // '// Comment'
1, 0, 6, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'static'
0, 7, 5, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Keyword], 0, // 'class'
0, 6, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Class], (int)TokenModifiers.Static, // 'C'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '{'
0, 2, 1, SemanticTokensCache.TokenTypeToIndex[LSP.SemanticTokenTypes.Operator], 0, // '}'
},
ResultId = "4"
};
......
......@@ -14,7 +14,8 @@ public static class ClassificationTypeNames
/// Additive classifications types supply additional context to other classifications.
/// </summary>
/// <remarks>
/// NOTE: Any updates to this class should also be reflected in LSP.
/// NOTE: Any updates to this class should also be reflected in LSP - see
/// Features/LanguageServer/Protocol/Handler/SemanticTokens/SemanticTokensHelpers.cs
/// </remarks>
public static ImmutableArray<string> AdditiveTypeNames { get; } = ImmutableArray.Create(StaticSymbol);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册