Revert commits intended for PR

上级 a0f92e28
......@@ -40,7 +40,6 @@ const vscodeEntryPoints = _.flatten([
buildfile.workerExtensionHost,
buildfile.workerNotebook,
buildfile.workerLanguageDetection,
buildfile.workerLocalFileSearch,
buildfile.workbenchDesktop,
buildfile.code
]);
......
......@@ -15,7 +15,7 @@ exports.base = [{
exports.workerExtensionHost = [createEditorWorkerModuleDescription('vs/workbench/services/extensions/worker/extensionHostWorker')];
exports.workerNotebook = [createEditorWorkerModuleDescription('vs/workbench/contrib/notebook/common/services/notebookSimpleWorker')];
exports.workerLocalFileSearch = [createModuleDescription('vs/workbench/services/search/worker/localFileSearch', ['vs/base/common/worker/simpleWorker'])];
exports.workerLanguageDetection = [createEditorWorkerModuleDescription('vs/workbench/services/languageDetection/browser/languageDetectionSimpleWorker')];
exports.workbenchDesktop = require('./vs/workbench/buildfile.desktop').collectModules();
exports.workbenchWeb = require('./vs/workbench/buildfile.web').collectModules();
......
......@@ -365,7 +365,7 @@ export class HTMLFileSystemProvider implements IFileSystemProviderWithFileReadWr
}
}
async getDirectoryHandle(resource: URI): Promise<FileSystemDirectoryHandle | undefined> {
private async getDirectoryHandle(resource: URI): Promise<FileSystemDirectoryHandle | undefined> {
const handle = this.getHandleSync(resource);
if (handle instanceof FileSystemDirectoryHandle) {
return handle;
......
......@@ -488,10 +488,7 @@ export class AnythingQuickAccessProvider extends PickerQuickAccessProvider<IAnyt
private createFileQueryCache(): FileQueryCacheState {
return new FileQueryCacheState(
cacheKey => this.fileQueryBuilder.file(this.contextService.getWorkspace().folders, this.getFileQueryOptions({
cacheKey,
maxResults: AnythingQuickAccessProvider.MAX_RESULTS,
})),
cacheKey => this.fileQueryBuilder.file(this.contextService.getWorkspace().folders, this.getFileQueryOptions({ cacheKey })),
query => this.searchService.fileSearch(query),
cacheKey => this.searchService.clearCache(cacheKey),
this.pickState.fileQueryCache
......
......@@ -3,27 +3,16 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { CancellationToken } from 'vs/base/common/cancellation';
import { IModelService } from 'vs/editor/common/services/modelService';
import { IFileService } from 'vs/platform/files/common/files';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { registerSingleton } from 'vs/platform/instantiation/common/extensions';
import { ILogService } from 'vs/platform/log/common/log';
import { ITelemetryService } from 'vs/platform/telemetry/common/telemetry';
import { IEditorService } from 'vs/workbench/services/editor/common/editorService';
import { IExtensionService } from 'vs/workbench/services/extensions/common/extensions';
import { IFileMatch, IFileQuery, ISearchComplete, ISearchProgressItem, ISearchResultProvider, ISearchService, ITextQuery } from 'vs/workbench/services/search/common/search';
import { ISearchService } from 'vs/workbench/services/search/common/search';
import { SearchService } from 'vs/workbench/services/search/common/searchService';
import { IUriIdentityService } from 'vs/workbench/services/uriIdentity/common/uriIdentity';
import { IWorkerClient, logOnceWebWorkerWarning, SimpleWorkerClient } from 'vs/base/common/worker/simpleWorker';
import { Disposable, DisposableStore } from 'vs/base/common/lifecycle';
import { DefaultWorkerFactory } from 'vs/base/worker/defaultWorkerFactory';
import { registerSingleton } from 'vs/platform/instantiation/common/extensions';
import { ILocalFileSearchSimpleWorker, ILocalFileSearchSimpleWorkerHost } from 'vs/workbench/services/search/common/localFileSearchWorkerTypes';
import { memoize } from 'vs/base/common/decorators';
import { HTMLFileSystemProvider } from 'vs/platform/files/browser/htmlFileSystemProvider';
import { Schemas } from 'vs/base/common/network';
import { URI, UriComponents } from 'vs/base/common/uri';
import { Emitter, Event } from 'vs/base/common/event';
export class RemoteSearchService extends SearchService {
constructor(
......@@ -33,141 +22,9 @@ export class RemoteSearchService extends SearchService {
@ILogService logService: ILogService,
@IExtensionService extensionService: IExtensionService,
@IFileService fileService: IFileService,
@IInstantiationService readonly instantiationService: IInstantiationService,
@IUriIdentityService uriIdentityService: IUriIdentityService,
) {
super(modelService, editorService, telemetryService, logService, extensionService, fileService, uriIdentityService);
this.diskSearch = this.instantiationService.createInstance(LocalFileSearchWorkerClient);
}
}
export class LocalFileSearchWorkerClient extends Disposable implements ISearchResultProvider, ILocalFileSearchSimpleWorkerHost {
protected _worker: IWorkerClient<ILocalFileSearchSimpleWorker> | null;
protected readonly _workerFactory: DefaultWorkerFactory;
private readonly _onDidRecieveTextSearchMatch = new Emitter<{ match: IFileMatch<UriComponents>, queryId: number }>();
readonly onDidRecieveTextSearchMatch: Event<{ match: IFileMatch<UriComponents>, queryId: number }> = this._onDidRecieveTextSearchMatch.event;
private cache: { key: string, cache: ISearchComplete } | undefined;
private queryId: number = 0;
constructor(
@IFileService private fileService: IFileService,
) {
super();
this._worker = null;
this._workerFactory = new DefaultWorkerFactory('localFileSearchWorker');
}
sendTextSearchMatch(match: IFileMatch<UriComponents>, queryId: number): void {
this._onDidRecieveTextSearchMatch.fire({ match, queryId });
}
@memoize
private get fileSystemProvider(): HTMLFileSystemProvider {
return this.fileService.getProvider(Schemas.file) as HTMLFileSystemProvider;
}
private async cancelQuery(queryId: number) {
const proxy = await this._getOrCreateWorker().getProxyObject();
proxy.cancelQuery(queryId);
}
async textSearch(query: ITextQuery, onProgress?: (p: ISearchProgressItem) => void, token?: CancellationToken): Promise<ISearchComplete> {
const queryDisposables = new DisposableStore();
const proxy = await this._getOrCreateWorker().getProxyObject();
const results: IFileMatch[] = [];
let limitHit = false;
await Promise.all(query.folderQueries.map(async fq => {
const queryId = this.queryId++;
queryDisposables.add(token?.onCancellationRequested(e => this.cancelQuery(queryId)) || Disposable.None);
const handle = await this.fileSystemProvider.getDirectoryHandle(fq.folder);
if (!handle) {
console.error('Could not get directory handle for ', fq);
return;
}
const reviveMatch = (result: IFileMatch<UriComponents>): IFileMatch => ({
resource: URI.revive(result.resource),
results: result.results
});
queryDisposables.add(this.onDidRecieveTextSearchMatch(e => {
if (e.queryId === queryId) {
onProgress?.(reviveMatch(e.match));
}
}));
const folderResults = await proxy.searchDirectory(handle, query, fq, queryId);
for (const folderResult of folderResults.results) {
results.push(reviveMatch(folderResult));
}
if (folderResults.limitHit) {
limitHit = true;
}
}));
queryDisposables.dispose();
const result = { messages: [], results, limitHit };
return result;
}
async fileSearch(query: IFileQuery, token?: CancellationToken): Promise<ISearchComplete> {
const queryDisposables = new DisposableStore();
let limitHit = false;
const proxy = await this._getOrCreateWorker().getProxyObject();
const results: IFileMatch[] = [];
await Promise.all(query.folderQueries.map(async fq => {
const queryId = this.queryId++;
queryDisposables.add(token?.onCancellationRequested(e => this.cancelQuery(queryId)) || Disposable.None);
const handle = await this.fileSystemProvider.getDirectoryHandle(fq.folder);
if (!handle) {
console.error('Could not get directory handle for ', fq);
return;
}
const folderResults = await proxy.listDirectory(handle, query, fq, queryId);
for (const folderResult of folderResults.results) {
results.push({ resource: URI.joinPath(fq.folder, folderResult) });
}
if (folderResults.limitHit) { limitHit = true; }
}));
queryDisposables.dispose();
const result = { messages: [], results, limitHit };
return result;
}
async clearCache(cacheKey: string): Promise<void> {
if (this.cache?.key === cacheKey) { this.cache = undefined; }
}
private _getOrCreateWorker(): IWorkerClient<ILocalFileSearchSimpleWorker> {
if (!this._worker) {
try {
this._worker = this._register(new SimpleWorkerClient<ILocalFileSearchSimpleWorker, ILocalFileSearchSimpleWorkerHost>(
this._workerFactory,
'vs/workbench/services/search/worker/localFileSearch',
this,
));
} catch (err) {
logOnceWebWorkerWarning(err);
throw err;
}
}
return this._worker;
}
}
......
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { ITextSearchResult } from 'vs/workbench/services/search/common/search';
import { TextSearchPreviewOptions } from 'vs/workbench/services/search/common/searchExtTypes';
import { Range } from 'vs/editor/common/core/range';
export const getFileResults = (
bytes: Uint8Array,
pattern: RegExp,
options: {
beforeContext: number;
afterContext: number;
previewOptions: TextSearchPreviewOptions | undefined;
remainingResultQuota: number;
}
): ITextSearchResult[] => {
let text: string;
if (bytes[0] === 0xff && bytes[1] === 0xfe) {
text = new TextDecoder('utf-16le').decode(bytes);
} else if (bytes[0] === 0xfe && bytes[1] === 0xff) {
text = new TextDecoder('utf-16be').decode(bytes);
} else {
text = new TextDecoder('utf8').decode(bytes);
if (text.slice(0, 1000).includes('') && bytes.includes(0)) {
return [];
}
}
const results: ITextSearchResult[] = [];
const patternIndecies: { matchStartIndex: number; matchedText: string; }[] = [];
let patternMatch: RegExpExecArray | null = null;
let remainingResultQuota = options.remainingResultQuota;
while (remainingResultQuota >= 0 && (patternMatch = pattern.exec(text))) {
patternIndecies.push({ matchStartIndex: patternMatch.index, matchedText: patternMatch[0] });
remainingResultQuota--;
}
if (patternIndecies.length) {
const contextLinesNeeded = new Set<number>();
const resultLines = new Set<number>();
const lineRanges: { start: number; end: number; }[] = [];
const readLine = (lineNumber: number) => text.slice(lineRanges[lineNumber].start, lineRanges[lineNumber].end);
let prevLineEnd = 0;
let lineEndingMatch: RegExpExecArray | null = null;
const lineEndRegex = /\r?\n/g;
while ((lineEndingMatch = lineEndRegex.exec(text))) {
lineRanges.push({ start: prevLineEnd, end: lineEndingMatch.index });
prevLineEnd = lineEndingMatch.index + lineEndingMatch[0].length;
}
if (prevLineEnd < text.length) { lineRanges.push({ start: prevLineEnd, end: text.length }); }
let startLine = 0;
for (const { matchStartIndex, matchedText } of patternIndecies) {
if (remainingResultQuota < 0) {
break;
}
while (Boolean(lineRanges[startLine + 1]) && matchStartIndex > lineRanges[startLine].end) {
startLine++;
}
let endLine = startLine;
while (Boolean(lineRanges[endLine + 1]) && matchStartIndex + matchedText.length > lineRanges[endLine].end) {
endLine++;
}
if (options.beforeContext) {
for (let contextLine = Math.max(0, startLine - options.beforeContext); contextLine < startLine; contextLine++) {
contextLinesNeeded.add(contextLine);
}
}
let previewText = '';
let offset = 0;
for (let matchLine = startLine; matchLine <= endLine; matchLine++) {
let previewLine = readLine(matchLine);
if (options.previewOptions?.charsPerLine && previewLine.length > options.previewOptions.charsPerLine) {
offset = Math.max(matchStartIndex - lineRanges[startLine].start - 20, 0);
previewLine = previewLine.substr(offset, options.previewOptions.charsPerLine);
}
previewText += `${previewLine}\n`;
resultLines.add(matchLine);
}
const fileRange = new Range(
startLine,
matchStartIndex - lineRanges[startLine].start,
endLine,
matchStartIndex + matchedText.length - lineRanges[endLine].start
);
const previewRange = new Range(
0,
matchStartIndex - lineRanges[startLine].start - offset,
endLine - startLine,
matchStartIndex + matchedText.length - lineRanges[endLine].start - (endLine === startLine ? offset : 0)
);
const match: ITextSearchResult = {
ranges: fileRange,
preview: { text: previewText, matches: previewRange },
};
results.push(match);
if (options.afterContext) {
for (let contextLine = endLine + 1; contextLine <= Math.min(endLine + options.afterContext, lineRanges.length - 1); contextLine++) {
contextLinesNeeded.add(contextLine);
}
}
}
for (const contextLine of contextLinesNeeded) {
if (!resultLines.has(contextLine)) {
results.push({
text: readLine(contextLine),
lineNumber: contextLine + 1,
});
}
}
}
return results;
};
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { UriComponents } from 'vs/base/common/uri';
import { IFileMatch, IFileQueryProps, IFolderQuery, ITextQueryProps } from 'vs/workbench/services/search/common/search';
export interface IWorkerTextSearchComplete {
results: IFileMatch<UriComponents>[];
limitHit?: boolean;
}
export interface IWorkerFileSearchComplete {
results: string[];
limitHit?: boolean;
}
export interface ILocalFileSearchSimpleWorker {
_requestHandlerBrand: any;
cancelQuery(queryId: number): void;
listDirectory(handle: FileSystemDirectoryHandle, queryProps: IFileQueryProps<UriComponents>, folderQuery: IFolderQuery, queryId: number): Promise<IWorkerFileSearchComplete>
searchDirectory(handle: FileSystemDirectoryHandle, queryProps: ITextQueryProps<UriComponents>, folderQuery: IFolderQuery, queryId: number): Promise<IWorkerTextSearchComplete>
}
export interface ILocalFileSearchSimpleWorkerHost {
sendTextSearchMatch(match: IFileMatch<UriComponents>, queryId: number): void
}
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as glob from 'vs/base/common/glob';
// TODO: this doesn't properly support a lot of the intricacies of .gitignore, for intance
// vscode's root gitignore has:
// extensions/**/dist/
// /out*/
// /extensions/**/out/
// but paths like /extensions/css-language-features/client/dist/browser/cssClientMain.js.map are being searched
export function parseIgnoreFile(ignoreContents: string) {
const ignoreLines = ignoreContents.split('\n').map(line => line.trim()).filter(line => line[0] !== '#');
const ignoreExpression = Object.create(null);
for (const line of ignoreLines) {
ignoreExpression[line] = true;
}
const checker = glob.parse(ignoreExpression);
return checker;
}
......@@ -89,4 +89,4 @@ function getMatchStartEnd(match: ITextSearchMatch): { start: number, end: number
start: matchStartLine,
end: matchEndLine
};
}
}
\ No newline at end of file
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { IPatternInfo } from 'vs/workbench/services/search/common/search';
function escapeRegExpCharacters(value: string): string {
return value.replace(/[-\\{}*+?|^$.[\]()#]/g, '\\$&');
}
export function createRegExp(options: IPatternInfo): RegExp {
let searchString = options.pattern;
if (!searchString) {
throw new Error('Cannot create regex from empty string');
}
if (!options.isRegExp) {
searchString = escapeRegExpCharacters(searchString);
}
if (options.isWordMatch) {
if (!/\B/.test(searchString.charAt(0))) {
searchString = `\\b${searchString} `;
}
if (!/\B/.test(searchString.charAt(searchString.length - 1))) {
searchString = `${searchString} \\b`;
}
}
let modifiers = 'gmu';
if (!options.isCaseSensitive) {
modifiers += 'i';
}
return new RegExp(searchString, modifiers);
}
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as glob from 'vs/base/common/glob';
import { UriComponents, URI } from 'vs/base/common/uri';
import { IRequestHandler } from 'vs/base/common/worker/simpleWorker';
import { ILocalFileSearchSimpleWorker, ILocalFileSearchSimpleWorkerHost, IWorkerFileSearchComplete, IWorkerTextSearchComplete } from 'vs/workbench/services/search/common/localFileSearchWorkerTypes';
import { ICommonQueryProps, IFileMatch, IFileQueryProps, IFolderQuery, ITextQueryProps, } from 'vs/workbench/services/search/common/search';
import * as extpath from 'vs/base/common/extpath';
import * as paths from 'vs/base/common/path';
import { CancellationToken, CancellationTokenSource } from 'vs/base/common/cancellation';
import { getFileResults } from 'vs/workbench/services/search/common/getFileResults';
import { createRegExp } from 'vs/workbench/services/search/common/searchRegexp';
import { parseIgnoreFile } from '../common/parseIgnoreFile';
const PERF = false;
type FileNode = {
type: 'file',
name: string,
path: string,
resolve: () => Promise<ArrayBuffer>
};
type DirNode = {
type: 'dir',
name: string,
entries: Promise<(DirNode | FileNode)[]>
};
const globalStart = +new Date();
const itrcount: Record<string, number> = {};
const time = async <T>(name: string, task: () => Promise<T> | T) => {
if (!PERF) { return task(); }
const start = Date.now();
const itr = (itrcount[name] ?? 0) + 1;
console.info(name, itr, 'starting', Math.round((start - globalStart) * 10) / 10000);
itrcount[name] = itr;
const r = await task();
const end = Date.now();
console.info(name, itr, 'took', end - start);
return r;
};
/**
* Called on the worker side
* @internal
*/
export function create(host: ILocalFileSearchSimpleWorkerHost): IRequestHandler {
return new LocalFileSearchSimpleWorker(host);
}
export class LocalFileSearchSimpleWorker implements ILocalFileSearchSimpleWorker, IRequestHandler {
_requestHandlerBrand: any;
cancellationTokens: Map<number, CancellationTokenSource> = new Map();
constructor(private host: ILocalFileSearchSimpleWorkerHost) { }
cancelQuery(queryId: number): void {
this.cancellationTokens.get(queryId)?.cancel();
}
private registerCancellationToken(queryId: number): CancellationTokenSource {
const source = new CancellationTokenSource();
this.cancellationTokens.set(queryId, source);
return source;
}
async listDirectory(handle: FileSystemDirectoryHandle, query: IFileQueryProps<UriComponents>, folderQuery: IFolderQuery<UriComponents>, queryId: number): Promise<IWorkerFileSearchComplete> {
const token = this.registerCancellationToken(queryId);
const entries: string[] = [];
let limitHit = false;
let count = 0;
const filePatternMatcher = query.filePattern
? (name: string) => query.filePattern!.split('').every(c => name.includes(c))
: (name: string) => true;
await time('listDirectory', () => this.walkFolderQuery(handle, query, folderQuery, file => {
if (!filePatternMatcher(file.name)) {
return;
}
count++;
if (query.maxResults && count > query.maxResults) {
limitHit = true;
token.cancel();
}
return entries.push(file.path);
}, token.token));
return {
results: entries,
limitHit
};
}
async searchDirectory(handle: FileSystemDirectoryHandle, query: ITextQueryProps<UriComponents>, folderQuery: IFolderQuery<UriComponents>, queryId: number): Promise<IWorkerTextSearchComplete> {
return time('searchInFiles', async () => {
const token = this.registerCancellationToken(queryId);
const results: IFileMatch[] = [];
const pattern = createRegExp(query.contentPattern);
const onGoingProcesses: Promise<void>[] = [];
let fileCount = 0;
let resultCount = 0;
let limitHit = false;
const processFile = async (file: FileNode) => {
if (token.token.isCancellationRequested) {
return;
}
fileCount++;
const contents = await file.resolve();
if (token.token.isCancellationRequested) {
return;
}
const bytes = new Uint8Array(contents);
const fileResults = getFileResults(bytes, pattern, {
afterContext: query.afterContext ?? 0,
beforeContext: query.beforeContext ?? 0,
previewOptions: query.previewOptions,
remainingResultQuota: query.maxResults ? (query.maxResults - resultCount) : 10000,
});
if (fileResults.length) {
resultCount += fileResults.length;
if (query.maxResults && resultCount > query.maxResults) {
token.cancel();
}
const match = {
resource: URI.joinPath(URI.revive(folderQuery.folder), file.path),
results: fileResults,
};
this.host.sendTextSearchMatch(match, queryId);
results.push(match);
}
};
await time('walkFolderToResolve', () =>
this.walkFolderQuery(handle, query, folderQuery, async file => onGoingProcesses.push(processFile(file)), token.token)
);
await time('resolveOngoingProcesses', () => Promise.all(onGoingProcesses));
if (PERF) { console.log('Searched in', fileCount, 'files'); }
return {
results,
limitHit,
};
});
}
private async walkFolderQuery(handle: FileSystemDirectoryHandle, queryProps: ICommonQueryProps<UriComponents>, folderQuery: IFolderQuery<UriComponents>, onFile: (file: FileNode) => any, token: CancellationToken): Promise<void> {
const globalFolderExcludes = glob.parse(folderQuery.excludePattern ?? {}) as unknown as (path: string) => boolean;
// For folders, only check if the folder is explicitly excluded so walking continues.
const isFolderExcluded = (path: string, folderExcludes: (path: string) => boolean) => {
if (folderExcludes(path)) { return true; }
if (pathExcludedInQuery(queryProps, path)) { return true; }
return false;
};
// For files ensure the full check takes place.
const isFileIncluded = (path: string, folderExcludes: (path: string) => boolean) => {
if (folderExcludes(path)) { return false; }
if (!pathIncludedInQuery(queryProps, path)) { return false; }
return true;
};
const proccessFile = (file: FileSystemFileHandle, prior: string): FileNode => {
const resolved: FileNode = {
type: 'file',
name: file.name,
path: prior,
resolve: () => file.getFile().then(r => r.arrayBuffer())
} as const;
return resolved;
};
const processDirectory = async (directory: FileSystemDirectoryHandle, prior: string, priorFolderExcludes: (path: string) => boolean): Promise<DirNode> => {
const ignoreFiles = await Promise.all([
directory.getFileHandle('.gitignore').catch(e => undefined),
directory.getFileHandle('.ignore').catch(e => undefined),
]);
let folderExcludes = priorFolderExcludes;
await Promise.all(ignoreFiles.map(async file => {
if (!file) { return; }
const ignoreContents = new TextDecoder('utf8').decode(new Uint8Array(await (await file.getFile()).arrayBuffer()));
const checker = parseIgnoreFile(ignoreContents);
priorFolderExcludes = folderExcludes;
folderExcludes = (path: string) => {
if (checker('/' + path)) {
return false;
}
return priorFolderExcludes(path);
};
}));
const entries = new Promise<(FileNode | DirNode)[]>(async c => {
const files: FileNode[] = [];
const dirs: Promise<DirNode>[] = [];
for await (const entry of directory.entries()) {
if (token.isCancellationRequested) {
break;
}
const path = prior ? prior + '/' + entry[0] : entry[0];
if (entry[1].kind === 'directory' && !isFolderExcluded(path, folderExcludes)) {
dirs.push(processDirectory(entry[1], path, folderExcludes));
} else if (entry[1].kind === 'file' && isFileIncluded(path, folderExcludes)) {
files.push(proccessFile(entry[1], path));
}
}
c([...await Promise.all(dirs), ...files]);
});
return {
type: 'dir',
name: directory.name,
entries
};
};
const resolveDirectory = async (directory: DirNode, onFile: (f: FileNode) => any) => {
if (token.isCancellationRequested) { return; }
await Promise.all(
(await directory.entries)
.sort((a, b) => -(a.type === 'dir' ? 0 : 1) + (b.type === 'dir' ? 0 : 1))
.map(async entry => {
if (entry.type === 'dir') {
await resolveDirectory(entry, onFile);
}
else {
await onFile(entry);
}
}));
};
const processed = await time('process', () => processDirectory(handle, '', globalFolderExcludes));
await time('resolve', () => resolveDirectory(processed, onFile));
}
}
export function pathExcludedInQuery(queryProps: ICommonQueryProps<UriComponents>, fsPath: string): boolean {
if (queryProps.excludePattern && glob.match(queryProps.excludePattern, fsPath)) {
return true;
}
return false;
}
export function pathIncludedInQuery(queryProps: ICommonQueryProps<UriComponents>, fsPath: string): boolean {
if (queryProps.excludePattern && glob.match(queryProps.excludePattern, fsPath)) {
return false;
}
if (queryProps.includePattern || queryProps.usingSearchPaths) {
if (queryProps.includePattern && glob.match(queryProps.includePattern, fsPath)) {
return true;
}
// If searchPaths are being used, the extra file must be in a subfolder and match the pattern, if present
if (queryProps.usingSearchPaths) {
return !!queryProps.folderQueries && queryProps.folderQueries.some(fq => {
const searchPath = fq.folder.path;
if (extpath.isEqualOrParent(fsPath, searchPath)) {
const relPath = paths.relative(searchPath, fsPath);
return !fq.includePattern || !!glob.match(fq.includePattern, relPath);
} else {
return false;
}
});
}
return false;
}
return true;
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册