提交 808c99e6 编写于 作者: R roblou

Persist the search worker processes for 30s after doing a search, to fix #16347 and #16306

上级 fbaf5b21
......@@ -20,6 +20,7 @@ import { PPromise, TPromise } from 'vs/base/common/winjs.base';
import { MAX_FILE_SIZE } from 'vs/platform/files/common/files';
import { FileWalker, Engine as FileSearchEngine } from 'vs/workbench/services/search/node/fileSearch';
import { Engine as TextSearchEngine } from 'vs/workbench/services/search/node/textSearch';
import { TextSearchWorkerProvider } from 'vs/workbench/services/search/node/textSearchWorkerProvider';
import { IRawSearchService, IRawSearch, IRawFileMatch, ISerializedFileMatch, ISerializedSearchProgressItem, ISerializedSearchComplete, ISearchEngine } from './search';
import { ICachedSearchStats, IProgress } from 'vs/platform/search/common/search';
......@@ -31,19 +32,28 @@ export class SearchService implements IRawSearchService {
private caches: { [cacheKey: string]: Cache; } = Object.create(null);
private textSearchWorkerProvider: TextSearchWorkerProvider;
public fileSearch(config: IRawSearch): PPromise<ISerializedSearchComplete, ISerializedSearchProgressItem> {
return this.doFileSearch(FileSearchEngine, config, SearchService.BATCH_SIZE);
}
public textSearch(config: IRawSearch): PPromise<ISerializedSearchComplete, ISerializedSearchProgressItem> {
let engine = new TextSearchEngine(config, new FileWalker({
rootFolders: config.rootFolders,
extraFiles: config.extraFiles,
includePattern: config.includePattern,
excludePattern: config.excludePattern,
filePattern: config.filePattern,
maxFilesize: MAX_FILE_SIZE
}));
if (!this.textSearchWorkerProvider) {
this.textSearchWorkerProvider = new TextSearchWorkerProvider();
}
let engine = new TextSearchEngine(
config,
new FileWalker({
rootFolders: config.rootFolders,
extraFiles: config.extraFiles,
includePattern: config.includePattern,
excludePattern: config.excludePattern,
filePattern: config.filePattern,
maxFilesize: MAX_FILE_SIZE
}),
this.textSearchWorkerProvider);
return this.doSearchWithBatchTimeout(engine, SearchService.BATCH_SIZE);
}
......
......@@ -5,20 +5,15 @@
'use strict';
import uri from 'vs/base/common/uri';
import * as os from 'os';
import * as path from 'path';
import * as ipc from 'vs/base/parts/ipc/common/ipc';
import { onUnexpectedError } from 'vs/base/common/errors';
import { IProgress } from 'vs/platform/search/common/search';
import { FileWalker } from 'vs/workbench/services/search/node/fileSearch';
import { ISerializedFileMatch, ISerializedSearchComplete, IRawSearch, ISearchEngine } from './search';
import { ISearchWorkerConfig, ISearchWorker, ISearchWorkerChannel, SearchWorkerChannelClient } from './worker/searchWorkerIpc';
import { Client } from 'vs/base/parts/ipc/node/ipc.cp';
import { ISerializedFileMatch, ISerializedSearchComplete, IRawSearch, ISearchEngine } from './search';
import { ISearchWorker, ISearchWorkerConfig } from './worker/searchWorkerIpc';
import { ITextSearchWorkerProvider } from './textSearchWorkerProvider';
export class Engine implements ISearchEngine<ISerializedFileMatch> {
......@@ -37,13 +32,15 @@ export class Engine implements ISearchEngine<ISerializedFileMatch> {
private limitReached = false;
private numResults = 0;
private workerProvider: ITextSearchWorkerProvider;
private workers: ISearchWorker[];
private nextWorker = 0;
private workers: ISearchWorker[] = [];
private workerClients: Client[] = [];
constructor(config: IRawSearch, walker: FileWalker) {
constructor(config: IRawSearch, walker: FileWalker, workerProvider: ITextSearchWorkerProvider) {
this.config = config;
this.walker = walker;
this.workerProvider = workerProvider;
}
cancel(): void {
......@@ -56,8 +53,18 @@ export class Engine implements ISearchEngine<ISerializedFileMatch> {
});
}
initializeWorkers(): void {
this.workers.forEach(w => {
const config: ISearchWorkerConfig = { pattern: this.config.contentPattern, fileEncoding: this.config.fileEncoding };
w.initialize(config)
.then(null, onUnexpectedError);
});
}
search(onResult: (match: ISerializedFileMatch) => void, onProgress: (progress: IProgress) => void, done: (error: Error, complete: ISerializedSearchComplete) => void): void {
this.startWorkers();
this.workers = this.workerProvider.getWorkers();
this.initializeWorkers();
const progress = () => {
if (++this.progressed % Engine.PROGRESS_FLUSH_CHUNK_SIZE === 0) {
onProgress({ total: this.totalBytes, worked: this.processedBytes }); // buffer progress in chunks to reduce pressure
......@@ -75,7 +82,6 @@ export class Engine implements ISearchEngine<ISerializedFileMatch> {
// Emit done()
if (!this.isDone && this.processedBytes === this.totalBytes && this.walkerIsDone) {
this.isDone = true;
this.disposeWorkers();
done(this.walkerError, {
limitHit: this.limitReached,
stats: this.walker.getStats()
......@@ -153,39 +159,4 @@ export class Engine implements ISearchEngine<ISerializedFileMatch> {
this.walkerError = error;
});
}
private startWorkers(): void {
// If the CPU has hyperthreading enabled, this will report (# of physical cores)*2.
const numWorkers = os.cpus().length;
for (let i = 0; i < numWorkers; i++) {
this.createWorker(i);
}
}
private createWorker(id: number): void {
let client = new Client(
uri.parse(require.toUrl('bootstrap')).fsPath,
{
serverName: 'Search Worker ' + id,
args: ['--type=searchWorker'],
env: {
AMD_ENTRYPOINT: 'vs/workbench/services/search/node/worker/searchWorkerApp',
PIPE_LOGGING: 'true',
VERBOSE_LOGGING: process.env.VERBOSE_LOGGING
}
});
// Make async?
const channel = ipc.getNextTickChannel(client.getChannel<ISearchWorkerChannel>('searchWorker'));
const channelClient = new SearchWorkerChannelClient(channel);
const config: ISearchWorkerConfig = { pattern: this.config.contentPattern, id, fileEncoding: this.config.fileEncoding };
channelClient.initialize(config).then(null, onUnexpectedError);
this.workers.push(channelClient);
this.workerClients.push(client);
}
private disposeWorkers(): void {
this.workerClients.forEach(c => c.dispose());
}
}
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as os from 'os';
import uri from 'vs/base/common/uri';
import * as ipc from 'vs/base/parts/ipc/common/ipc';
import { Client } from 'vs/base/parts/ipc/node/ipc.cp';
import { ISearchWorker, ISearchWorkerChannel, SearchWorkerChannelClient } from './worker/searchWorkerIpc';
export interface ITextSearchWorkerProvider {
getWorkers(): ISearchWorker[];
}
export class TextSearchWorkerProvider implements ITextSearchWorkerProvider {
private workers: ISearchWorker[] = [];
getWorkers(): ISearchWorker[] {
const numWorkers = os.cpus().length;
while (this.workers.length < numWorkers) {
this.createWorker();
}
return this.workers;
}
private createWorker(): void {
let client = new Client(
uri.parse(require.toUrl('bootstrap')).fsPath,
{
serverName: 'Search Worker ' + this.workers.length,
args: ['--type=searchWorker'],
timeout: 30 * 1000,
env: {
AMD_ENTRYPOINT: 'vs/workbench/services/search/node/worker/searchWorkerApp',
PIPE_LOGGING: 'true',
VERBOSE_LOGGING: process.env.VERBOSE_LOGGING
}
});
const channel = ipc.getNextTickChannel(client.getChannel<ISearchWorkerChannel>('searchWorker'));
const channelClient = new SearchWorkerChannelClient(channel);
this.workers.push(channelClient);
}
}
\ No newline at end of file
......@@ -25,9 +25,6 @@ interface ReadLinesOptions {
encoding: string;
}
// Global isCanceled flag for the process. It's only set once and this avoids awkwardness in passing it around.
let isCanceled = false;
const MAX_FILE_ERRORS = 5; // Don't report more than this number of errors, 1 per file, to avoid flooding the log when there's a general issue
let numErrorsLogged = 0;
function onError(error: any): void {
......@@ -36,71 +33,27 @@ function onError(error: any): void {
}
}
export class SearchWorker implements ISearchWorker {
private contentPattern: RegExp;
private nextSearch = TPromise.wrap(null);
private config: ISearchWorkerConfig;
private fileEncoding: string;
export class SearchWorkerManager implements ISearchWorker {
private currentSearchEngine: SearchWorkerEngine;
initialize(config: ISearchWorkerConfig): TPromise<void> {
this.contentPattern = strings.createRegExp(config.pattern.pattern, config.pattern.isRegExp, { matchCase: config.pattern.isCaseSensitive, wholeWord: config.pattern.isWordMatch, multiline: false, global: true });
this.config = config;
this.fileEncoding = encodingExists(config.fileEncoding) ? config.fileEncoding : UTF8;
this.currentSearchEngine = new SearchWorkerEngine(config);
return TPromise.wrap<void>(undefined);
}
cancel(): TPromise<void> {
isCanceled = true;
// Cancel the current search. It will stop searching and close its open files.
this.currentSearchEngine.cancel();
return TPromise.wrap<void>(null);
}
search(args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult> {
// Queue this search to run after the current one
return this.nextSearch = this.nextSearch
.then(() => searchBatch(args.absolutePaths, this.contentPattern, this.fileEncoding, args.maxResults));
}
}
if (!this.currentSearchEngine) {
return TPromise.wrapError(new Error('SearchWorker is not initialized'));
}
/**
* Searches some number of the given paths concurrently, and starts searches in other paths when those complete.
*/
function searchBatch(absolutePaths: string[], contentPattern: RegExp, fileEncoding: string, maxResults?: number): TPromise<ISearchWorkerSearchResult> {
if (isCanceled) {
return TPromise.wrap(null);
return this.currentSearchEngine.searchBatch(args);
}
return new TPromise(batchDone => {
const result: ISearchWorkerSearchResult = {
matches: [],
numMatches: 0,
limitReached: false
};
// Search in the given path, and when it's finished, search in the next path in absolutePaths
const startSearchInFile = (absolutePath: string): TPromise<void> => {
return searchInFile(absolutePath, contentPattern, fileEncoding, maxResults && (maxResults - result.numMatches)).then(fileResult => {
// Finish early if search is canceled
if (isCanceled) {
return;
}
if (fileResult) {
result.numMatches += fileResult.numMatches;
result.matches.push(fileResult.match.serialize());
if (fileResult.limitReached) {
// If the limit was reached, terminate early with the results so far and cancel in-progress searches.
isCanceled = true;
result.limitReached = true;
return batchDone(result);
}
}
}, onError);
};
TPromise.join(absolutePaths.map(startSearchInFile)).then(() => {
batchDone(result);
});
});
}
interface IFileSearchResult {
......@@ -109,160 +62,225 @@ interface IFileSearchResult {
limitReached?: boolean;
}
function searchInFile(absolutePath: string, contentPattern: RegExp, fileEncoding: string, maxResults?: number): TPromise<IFileSearchResult> {
let fileMatch: FileMatch = null;
let limitReached = false;
let numMatches = 0;
const perLineCallback = (line: string, lineNumber: number) => {
let lineMatch: LineMatch = null;
let match = contentPattern.exec(line);
export class SearchWorkerEngine {
private contentPattern: RegExp;
private fileEncoding: string;
private nextSearch = TPromise.wrap(null);
// Record all matches into file result
while (match !== null && match[0].length > 0 && !isCanceled && !limitReached) {
if (fileMatch === null) {
fileMatch = new FileMatch(absolutePath);
}
private isCanceled = false;
if (lineMatch === null) {
lineMatch = new LineMatch(line, lineNumber);
fileMatch.addMatch(lineMatch);
}
constructor(config: ISearchWorkerConfig) {
this.contentPattern = strings.createRegExp(config.pattern.pattern, config.pattern.isRegExp, { matchCase: config.pattern.isCaseSensitive, wholeWord: config.pattern.isWordMatch, multiline: false, global: true });
this.fileEncoding = encodingExists(config.fileEncoding) ? config.fileEncoding : UTF8;
}
lineMatch.addMatch(match.index, match[0].length);
/**
* Searches some number of the given paths concurrently, and starts searches in other paths when those complete.
*/
searchBatch(args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult> {
return this.nextSearch =
this.nextSearch.then(() => this._searchBatch(args));
}
numMatches++;
if (maxResults && numMatches >= maxResults) {
limitReached = true;
}
match = contentPattern.exec(line);
private _searchBatch(args: ISearchWorkerSearchArgs): TPromise<ISearchWorkerSearchResult> {
if (this.isCanceled) {
return TPromise.wrap(null);
}
};
// Read lines buffered to support large files
return readlinesAsync(absolutePath, perLineCallback, { bufferLength: 8096, encoding: fileEncoding }).then(
() => fileMatch ? { match: fileMatch, limitReached, numMatches } : null);
}
return new TPromise(batchDone => {
const result: ISearchWorkerSearchResult = {
matches: [],
numMatches: 0,
limitReached: false
};
function readlinesAsync(filename: string, perLineCallback: (line: string, lineNumber: number) => void, options: ReadLinesOptions): TPromise<void> {
return new TPromise<void>((resolve, reject) => {
fs.open(filename, 'r', null, (error: Error, fd: number) => {
if (error) {
return reject(error);
}
// Search in the given path, and when it's finished, search in the next path in absolutePaths
const startSearchInFile = (absolutePath: string): TPromise<void> => {
return this.searchInFile(absolutePath, this.contentPattern, this.fileEncoding, args.maxResults && (args.maxResults - result.numMatches)).then(fileResult => {
// Finish early if search is canceled
if (this.isCanceled) {
return;
}
if (fileResult) {
result.numMatches += fileResult.numMatches;
result.matches.push(fileResult.match.serialize());
if (fileResult.limitReached) {
// If the limit was reached, terminate early with the results so far and cancel in-progress searches.
this.cancel();
result.limitReached = true;
return batchDone(result);
}
}
}, onError);
};
TPromise.join(args.absolutePaths.map(startSearchInFile)).then(() => {
batchDone(result);
});
});
}
cancel(): void {
this.isCanceled = true;
}
let buffer = new Buffer(options.bufferLength);
let pos: number;
let i: number;
let line = '';
let lineNumber = 0;
let lastBufferHadTraillingCR = false;
private searchInFile(absolutePath: string, contentPattern: RegExp, fileEncoding: string, maxResults?: number): TPromise<IFileSearchResult> {
let fileMatch: FileMatch = null;
let limitReached = false;
let numMatches = 0;
const perLineCallback = (line: string, lineNumber: number) => {
let lineMatch: LineMatch = null;
let match = contentPattern.exec(line);
const decodeBuffer = (buffer: NodeBuffer, start, end): string => {
if (options.encoding === UTF8 || options.encoding === UTF8_with_bom) {
return buffer.toString(undefined, start, end); // much faster to use built in toString() when encoding is default
// Record all matches into file result
while (match !== null && match[0].length > 0 && !this.isCanceled && !limitReached) {
if (fileMatch === null) {
fileMatch = new FileMatch(absolutePath);
}
return decode(buffer.slice(start, end), options.encoding);
};
if (lineMatch === null) {
lineMatch = new LineMatch(line, lineNumber);
fileMatch.addMatch(lineMatch);
}
const lineFinished = (offset: number): void => {
line += decodeBuffer(buffer, pos, i + offset);
perLineCallback(line, lineNumber);
line = '';
lineNumber++;
pos = i + offset;
};
lineMatch.addMatch(match.index, match[0].length);
numMatches++;
if (maxResults && numMatches >= maxResults) {
limitReached = true;
}
match = contentPattern.exec(line);
}
};
// Read lines buffered to support large files
return this.readlinesAsync(absolutePath, perLineCallback, { bufferLength: 8096, encoding: fileEncoding }).then(
() => fileMatch ? { match: fileMatch, limitReached, numMatches } : null);
}
const readFile = (isFirstRead: boolean, clb: (error: Error) => void): void => {
if (isCanceled) {
return clb(null); // return early if canceled or limit reached
private readlinesAsync(filename: string, perLineCallback: (line: string, lineNumber: number) => void, options: ReadLinesOptions): TPromise<void> {
return new TPromise<void>((resolve, reject) => {
fs.open(filename, 'r', null, (error: Error, fd: number) => {
if (error) {
return reject(error);
}
fs.read(fd, buffer, 0, buffer.length, null, (error: Error, bytesRead: number, buffer: NodeBuffer) => {
if (error || bytesRead === 0 || isCanceled) {
return clb(error); // return early if canceled or limit reached or no more bytes to read
let buffer = new Buffer(options.bufferLength);
let pos: number;
let i: number;
let line = '';
let lineNumber = 0;
let lastBufferHadTraillingCR = false;
const decodeBuffer = (buffer: NodeBuffer, start, end): string => {
if (options.encoding === UTF8 || options.encoding === UTF8_with_bom) {
return buffer.toString(undefined, start, end); // much faster to use built in toString() when encoding is default
}
pos = 0;
i = 0;
return decode(buffer.slice(start, end), options.encoding);
};
// Detect encoding and mime when this is the beginning of the file
if (isFirstRead) {
let mimeAndEncoding = detectMimeAndEncodingFromBuffer(buffer, bytesRead);
if (mimeAndEncoding.mimes[mimeAndEncoding.mimes.length - 1] !== baseMime.MIME_TEXT) {
return clb(null); // skip files that seem binary
}
const lineFinished = (offset: number): void => {
line += decodeBuffer(buffer, pos, i + offset);
perLineCallback(line, lineNumber);
line = '';
lineNumber++;
pos = i + offset;
};
// Check for BOM offset
switch (mimeAndEncoding.encoding) {
case UTF8:
pos = i = 3;
options.encoding = UTF8;
break;
case UTF16be:
pos = i = 2;
options.encoding = UTF16be;
break;
case UTF16le:
pos = i = 2;
options.encoding = UTF16le;
break;
}
const readFile = (isFirstRead: boolean, clb: (error: Error) => void): void => {
if (this.isCanceled) {
return clb(null); // return early if canceled or limit reached
}
if (lastBufferHadTraillingCR) {
if (buffer[i] === 0x0a) { // LF (Line Feed)
lineFinished(1);
i++;
} else {
lineFinished(0);
fs.read(fd, buffer, 0, buffer.length, null, (error: Error, bytesRead: number, buffer: NodeBuffer) => {
if (error || bytesRead === 0 || this.isCanceled) {
return clb(error); // return early if canceled or limit reached or no more bytes to read
}
lastBufferHadTraillingCR = false;
}
pos = 0;
i = 0;
// Detect encoding and mime when this is the beginning of the file
if (isFirstRead) {
let mimeAndEncoding = detectMimeAndEncodingFromBuffer(buffer, bytesRead);
if (mimeAndEncoding.mimes[mimeAndEncoding.mimes.length - 1] !== baseMime.MIME_TEXT) {
return clb(null); // skip files that seem binary
}
// Check for BOM offset
switch (mimeAndEncoding.encoding) {
case UTF8:
pos = i = 3;
options.encoding = UTF8;
break;
case UTF16be:
pos = i = 2;
options.encoding = UTF16be;
break;
case UTF16le:
pos = i = 2;
options.encoding = UTF16le;
break;
}
}
for (; i < bytesRead; ++i) {
if (buffer[i] === 0x0a) { // LF (Line Feed)
lineFinished(1);
} else if (buffer[i] === 0x0d) { // CR (Carriage Return)
if (i + 1 === bytesRead) {
lastBufferHadTraillingCR = true;
} else if (buffer[i + 1] === 0x0a) { // LF (Line Feed)
lineFinished(2);
if (lastBufferHadTraillingCR) {
if (buffer[i] === 0x0a) { // LF (Line Feed)
lineFinished(1);
i++;
} else {
lineFinished(1);
lineFinished(0);
}
}
}
line += decodeBuffer(buffer, pos, bytesRead);
lastBufferHadTraillingCR = false;
}
readFile(/*isFirstRead=*/false, clb); // Continue reading
});
};
for (; i < bytesRead; ++i) {
if (buffer[i] === 0x0a) { // LF (Line Feed)
lineFinished(1);
} else if (buffer[i] === 0x0d) { // CR (Carriage Return)
if (i + 1 === bytesRead) {
lastBufferHadTraillingCR = true;
} else if (buffer[i + 1] === 0x0a) { // LF (Line Feed)
lineFinished(2);
i++;
} else {
lineFinished(1);
}
}
}
readFile(/*isFirstRead=*/true, (error: Error) => {
if (error) {
return reject(error);
}
line += decodeBuffer(buffer, pos, bytesRead);
if (line.length) {
perLineCallback(line, lineNumber); // handle last line
}
readFile(/*isFirstRead=*/false, clb); // Continue reading
});
};
fs.close(fd, (error: Error) => {
readFile(/*isFirstRead=*/true, (error: Error) => {
if (error) {
reject(error);
} else {
resolve(null);
return reject(error);
}
if (line.length) {
perLineCallback(line, lineNumber); // handle last line
}
fs.close(fd, (error: Error) => {
if (error) {
reject(error);
} else {
resolve(null);
}
});
});
});
});
});
}
}
export class FileMatch implements ISerializedFileMatch {
......
......@@ -7,9 +7,9 @@
import { Server } from 'vs/base/parts/ipc/node/ipc.cp';
import { SearchWorkerChannel } from './searchWorkerIpc';
import { SearchWorker } from './searchWorker';
import { SearchWorkerManager } from './searchWorker';
const server = new Server();
const worker = new SearchWorker();
const worker = new SearchWorkerManager();
const channel = new SearchWorkerChannel(worker);
server.registerChannel('searchWorker', channel);
......@@ -9,12 +9,11 @@ import { TPromise } from 'vs/base/common/winjs.base';
import { IChannel } from 'vs/base/parts/ipc/common/ipc';
import { ISerializedFileMatch } from '../search';
import { IPatternInfo } from 'vs/platform/search/common/search';
import { SearchWorker } from './searchWorker';
import { SearchWorkerManager } from './searchWorker';
export interface ISearchWorkerConfig {
pattern: IPatternInfo;
fileEncoding: string;
id: number;
}
export interface ISearchWorkerSearchArgs {
......@@ -42,7 +41,7 @@ export interface ISearchWorkerChannel extends IChannel {
}
export class SearchWorkerChannel implements ISearchWorkerChannel {
constructor(private worker: SearchWorker) {
constructor(private worker: SearchWorkerManager) {
}
call(command: string, arg?: any): TPromise<any> {
......
......@@ -16,6 +16,7 @@ import { LineMatch } from 'vs/platform/search/common/search';
import { FileWalker, Engine as FileSearchEngine } from 'vs/workbench/services/search/node/fileSearch';
import { IRawFileMatch } from 'vs/workbench/services/search/node/search';
import { Engine as TextSearchEngine } from 'vs/workbench/services/search/node/textSearch';
import { TextSearchWorkerProvider } from 'vs/workbench/services/search/node/textSearchWorkerProvider';
function count(lineMatches: LineMatch[]): number {
let count = 0;
......@@ -34,6 +35,8 @@ function rootfolders() {
return [path.normalize(require.toUrl('./fixtures'))];
}
const textSearchWorkerProvider = new TextSearchWorkerProvider();
suite('Search', () => {
test('Files: *.js', function (done: () => void) {
......@@ -622,7 +625,7 @@ suite('Search', () => {
contentPattern: { pattern: 'GameOfLife', modifiers: 'i' }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -643,7 +646,7 @@ suite('Search', () => {
contentPattern: { pattern: 'Game.?fL\\w?fe', isRegExp: true }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -664,7 +667,7 @@ suite('Search', () => {
contentPattern: { pattern: 'GameOfLife', isWordMatch: true, isCaseSensitive: true }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -685,7 +688,7 @@ suite('Search', () => {
contentPattern: { pattern: 'Helvetica', modifiers: 'i' }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -706,7 +709,7 @@ suite('Search', () => {
contentPattern: { pattern: 'e', modifiers: 'i' }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -728,7 +731,7 @@ suite('Search', () => {
excludePattern: { '**/examples': true }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -750,7 +753,7 @@ suite('Search', () => {
includePattern: { '**/examples/**': true }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -773,7 +776,7 @@ suite('Search', () => {
excludePattern: { '**/examples/small.js': true }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -795,7 +798,7 @@ suite('Search', () => {
maxResults: 520
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......@@ -819,7 +822,7 @@ suite('Search', () => {
contentPattern: { pattern: 'ahsogehtdas', modifiers: 'i' }
};
let engine = new TextSearchEngine(config, new FileWalker(config));
let engine = new TextSearchEngine(config, new FileWalker(config), textSearchWorkerProvider);
engine.search((result) => {
if (result && result.lineMatches) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册