From 6f823003a7a7dbb2bfaf326ac2ce641d090bd9c6 Mon Sep 17 00:00:00 2001 From: Benjamin Pasero Date: Mon, 4 Nov 2019 11:57:38 +0100 Subject: [PATCH] files - speed up reads and writes For reading, introduce the readFileStream capability that reduces the overhead of low level file reading by switching to an event based model. For writing, try to consume up to N chunks of data from the underlying source and directly call writeFile if the source is consumed. This will avoid the overhead of low level file reading for small to medium files. --- src/vs/base/common/buffer.ts | 4 +- src/vs/base/common/stream.ts | 152 +++++- src/vs/base/test/common/stream.test.ts | 176 ++++++ src/vs/platform/files/common/fileService.ts | 209 +++----- src/vs/platform/files/common/files.ts | 79 ++- src/vs/platform/files/common/io.ts | 114 ++++ .../files/node/diskFileSystemProvider.ts | 32 +- .../files/test/node/diskFileService.test.ts | 505 +++++++++++------- .../userData/common/fileUserDataProvider.ts | 16 +- 9 files changed, 928 insertions(+), 359 deletions(-) create mode 100644 src/vs/base/test/common/stream.test.ts create mode 100644 src/vs/platform/files/common/io.ts diff --git a/src/vs/base/common/buffer.ts b/src/vs/base/common/buffer.ts index f2c2fab1b7c..25371f5ec79 100644 --- a/src/vs/base/common/buffer.ts +++ b/src/vs/base/common/buffer.ts @@ -164,8 +164,8 @@ export function bufferToStream(buffer: VSBuffer): streams.ReadableStream(buffer, chunks => VSBuffer.concat(chunks)); } -export function streamToBufferReadableStream(stream: streams.ReadableStream): streams.ReadableStream { - return streams.transform(stream, data => typeof data === 'string' ? VSBuffer.fromString(data) : VSBuffer.wrap(data), chunks => VSBuffer.concat(chunks)); +export function streamToBufferReadableStream(stream: streams.ReadableStreamEvents): streams.ReadableStream { + return streams.transform(stream, { data: data => typeof data === 'string' ? VSBuffer.fromString(data) : VSBuffer.wrap(data) }, chunks => VSBuffer.concat(chunks)); } export function newWriteableBufferStream(): streams.WriteableStream { diff --git a/src/vs/base/common/stream.ts b/src/vs/base/common/stream.ts index 102e5ce8f43..1172496a897 100644 --- a/src/vs/base/common/stream.ts +++ b/src/vs/base/common/stream.ts @@ -4,16 +4,17 @@ *--------------------------------------------------------------------------------------------*/ /** - * A interface that emulates the API shape of a node.js readable - * stream for use in desktop and web environments. + * The payload that flows in readable stream events. */ -export interface ReadableStream { +export type ReadableStreamEventPayload = T | Error | 'end'; + +export interface ReadableStreamEvents { /** * The 'data' event is emitted whenever the stream is * relinquishing ownership of a chunk of data to a consumer. */ - on(event: 'data', callback: (chunk: T) => void): void; + on(event: 'data', callback: (data: T) => void): void; /** * Emitted when any error occurs. @@ -26,6 +27,13 @@ export interface ReadableStream { * not be emitted unless the data is completely consumed. */ on(event: 'end', callback: () => void): void; +} + +/** + * A interface that emulates the API shape of a node.js readable + * stream for use in desktop and web environments. + */ +export interface ReadableStream extends ReadableStreamEvents { /** * Stops emitting any events until resume() is called. @@ -97,11 +105,20 @@ export interface IReducer { (data: T[]): T; } -export interface ITransformer { - (source: S): T; +export interface IDataTransformer { + (data: Original): Transformed; +} + +export interface IErrorTransformer { + (error: Error): Error; } -export function newWriteableStream(reducer: IReducer) { +export interface ITransformer { + data: IDataTransformer; + error?: IErrorTransformer; +} + +export function newWriteableStream(reducer: IReducer): WriteableStream { return new WriteableStreamImpl(reducer); } @@ -119,7 +136,7 @@ class WriteableStreamImpl implements WriteableStream { }; private readonly listeners = { - data: [] as { (chunk: T): void }[], + data: [] as { (data: T): void }[], error: [] as { (error: Error): void }[], end: [] as { (): void }[] }; @@ -302,13 +319,61 @@ export function consumeReadable(readable: Readable, reducer: IReducer): const chunks: T[] = []; let chunk: T | null; - while (chunk = readable.read()) { + while ((chunk = readable.read()) !== null) { chunks.push(chunk); } return reducer(chunks); } +/** + * Helper to read a T readable up to a maximum of chunks. If the limit is + * reached, will return a readable instead to ensure all data can still + * be read. + */ +export function consumeReadableWithLimit(readable: Readable, reducer: IReducer, maxChunks: number): T | Readable { + const chunks: T[] = []; + + let chunk: T | null | undefined = undefined; + while ((chunk = readable.read()) !== null && chunks.length < maxChunks) { + chunks.push(chunk); + } + + // If the last chunk is null, it means we reached the end of + // the readable and return all the data at once + if (chunk === null && chunks.length > 0) { + return reducer(chunks); + } + + // Otherwise, we still have a chunk, it means we reached the maxChunks + // value and as such we return a new Readable that first returns + // the existing read chunks and then continues with reading from + // the underlying readable. + return { + read: () => { + + // First consume chunks from our array + if (chunks.length > 0) { + return chunks.shift()!; + } + + // Then ensure to return our last read chunk + if (typeof chunk !== 'undefined') { + const lastReadChunk = chunk; + + // explicitly use undefined here to indicate that we consumed + // the chunk, which could have either been null or valued. + chunk = undefined; + + return lastReadChunk; + } + + // Finally delegate back to the Readable + return readable.read(); + } + }; +} + /** * Helper to fully read a T stream into a T. */ @@ -316,12 +381,68 @@ export function consumeStream(stream: ReadableStream, reducer: IReducer return new Promise((resolve, reject) => { const chunks: T[] = []; - stream.on('data', chunk => chunks.push(chunk)); + stream.on('data', data => chunks.push(data)); stream.on('error', error => reject(error)); stream.on('end', () => resolve(reducer(chunks))); }); } +/** + * Helper to read a T stream up to a maximum of chunks. If the limit is + * reached, will return a stream instead to ensure all data can still + * be read. + */ +export function consumeStreamWithLimit(stream: ReadableStream, reducer: IReducer, maxChunks: number): Promise> { + return new Promise((resolve, reject) => { + const chunks: T[] = []; + + let wrapperStream: WriteableStream | undefined = undefined; + + stream.on('data', data => { + + // If we reach maxChunks, we start to return a stream + // and make sure that any data we have already read + // is in it as well + if (!wrapperStream && chunks.length === maxChunks) { + wrapperStream = newWriteableStream(reducer); + while (chunks.length) { + wrapperStream.write(chunks.shift()!); + } + + wrapperStream.write(data); + + return resolve(wrapperStream); + } + + if (wrapperStream) { + wrapperStream.write(data); + } else { + chunks.push(data); + } + }); + + stream.on('error', error => { + if (wrapperStream) { + wrapperStream.error(error); + } else { + return reject(error); + } + }); + + stream.on('end', () => { + if (wrapperStream) { + while (chunks.length) { + wrapperStream.write(chunks.shift()!); + } + + wrapperStream.end(); + } else { + return resolve(reducer(chunks)); + } + }); + }); +} + /** * Helper to create a readable stream from an existing T. */ @@ -352,12 +473,15 @@ export function toReadable(t: T): Readable { }; } -export function transform(stream: ReadableStream, transformer: ITransformer, reducer: IReducer): ReadableStream { - const target = newWriteableStream(reducer); +/** + * Helper to transform a readable stream into another stream. + */ +export function transform(stream: ReadableStreamEvents, transformer: ITransformer, reducer: IReducer): ReadableStream { + const target = newWriteableStream(reducer); - stream.on('data', data => target.write(transformer(data))); + stream.on('data', data => target.write(transformer.data(data))); stream.on('end', () => target.end()); - stream.on('error', error => target.error(error)); + stream.on('error', error => target.error(transformer.error ? transformer.error(error) : error)); return target; } diff --git a/src/vs/base/test/common/stream.test.ts b/src/vs/base/test/common/stream.test.ts new file mode 100644 index 00000000000..6d86fbc2eb2 --- /dev/null +++ b/src/vs/base/test/common/stream.test.ts @@ -0,0 +1,176 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import * as assert from 'assert'; +import { isReadableStream, newWriteableStream, Readable, consumeReadable, consumeReadableWithLimit, consumeStream, ReadableStream, toStream, toReadable, transform, consumeStreamWithLimit } from 'vs/base/common/stream'; + +suite('Stream', () => { + + test('isReadableStream', () => { + assert.ok(!isReadableStream(Object.create(null))); + assert.ok(isReadableStream(newWriteableStream(d => d))); + }); + + test('WriteableStream', () => { + const stream = newWriteableStream(strings => strings.join()); + + let error = false; + stream.on('error', e => { + error = true; + }); + + let end = false; + stream.on('end', () => { + end = true; + }); + + stream.write('Hello'); + + const chunks: string[] = []; + stream.on('data', data => { + chunks.push(data); + }); + + assert.equal(chunks[0], 'Hello'); + + stream.write('World'); + assert.equal(chunks[1], 'World'); + + assert.equal(error, false); + assert.equal(end, false); + + stream.pause(); + stream.write('1'); + stream.write('2'); + stream.write('3'); + + assert.equal(chunks.length, 2); + + stream.resume(); + + assert.equal(chunks.length, 3); + assert.equal(chunks[2], '1,2,3'); + + stream.error(new Error()); + assert.equal(error, true); + + stream.end('Final Bit'); + assert.equal(chunks.length, 4); + assert.equal(chunks[3], 'Final Bit'); + + stream.destroy(); + + stream.write('Unexpected'); + assert.equal(chunks.length, 4); + }); + + test('consumeReadable', () => { + const readable = arrayToReadable(['1', '2', '3', '4', '5']); + const consumed = consumeReadable(readable, strings => strings.join()); + assert.equal(consumed, '1,2,3,4,5'); + }); + + test('consumeReadableWithLimit', () => { + for (let i = 0; i < 5; i++) { + const readable = arrayToReadable(['1', '2', '3', '4', '5']); + + const consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), i); + if (typeof consumedOrReadable === 'string') { + assert.fail('Unexpected result'); + } else { + const consumed = consumeReadable(consumedOrReadable, strings => strings.join()); + assert.equal(consumed, '1,2,3,4,5'); + } + } + + let readable = arrayToReadable(['1', '2', '3', '4', '5']); + let consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), 5); + assert.equal(consumedOrReadable, '1,2,3,4,5'); + + readable = arrayToReadable(['1', '2', '3', '4', '5']); + consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), 6); + assert.equal(consumedOrReadable, '1,2,3,4,5'); + }); + + function arrayToReadable(array: T[]): Readable { + return { + read: () => array.shift() || null + }; + } + + function readableToStream(readable: Readable): ReadableStream { + const stream = newWriteableStream(strings => strings.join()); + + // Simulate async behavior + setTimeout(() => { + let chunk: string | null = null; + while ((chunk = readable.read()) !== null) { + stream.write(chunk); + } + + stream.end(); + }, 0); + + return stream; + } + + test('consumeStream', async () => { + const stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5'])); + const consumed = await consumeStream(stream, strings => strings.join()); + assert.equal(consumed, '1,2,3,4,5'); + }); + + test('consumeStreamWithLimit', async () => { + for (let i = 0; i < 5; i++) { + const readable = readableToStream(arrayToReadable(['1', '2', '3', '4', '5'])); + + const consumedOrStream = await consumeStreamWithLimit(readable, strings => strings.join(), i); + if (typeof consumedOrStream === 'string') { + assert.fail('Unexpected result'); + } else { + const consumed = await consumeStream(consumedOrStream, strings => strings.join()); + assert.equal(consumed, '1,2,3,4,5'); + } + } + + let stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5'])); + let consumedOrStream = await consumeStreamWithLimit(stream, strings => strings.join(), 5); + assert.equal(consumedOrStream, '1,2,3,4,5'); + + stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5'])); + consumedOrStream = await consumeStreamWithLimit(stream, strings => strings.join(), 6); + assert.equal(consumedOrStream, '1,2,3,4,5'); + }); + + test('toStream', async () => { + const stream = toStream('1,2,3,4,5', strings => strings.join()); + const consumed = await consumeStream(stream, strings => strings.join()); + assert.equal(consumed, '1,2,3,4,5'); + }); + + test('toReadable', async () => { + const readable = toReadable('1,2,3,4,5'); + const consumed = await consumeReadable(readable, strings => strings.join()); + assert.equal(consumed, '1,2,3,4,5'); + }); + + test('transform', async () => { + const source = newWriteableStream(strings => strings.join()); + + const result = transform(source, { data: string => string + string }, strings => strings.join()); + + // Simulate async behavior + setTimeout(() => { + source.write('1'); + source.write('2'); + source.write('3'); + source.write('4'); + source.end('5'); + }, 0); + + const consumed = await consumeStream(result, strings => strings.join()); + assert.equal(consumed, '11,22,33,44,55'); + }); +}); diff --git a/src/vs/platform/files/common/fileService.ts b/src/vs/platform/files/common/fileService.ts index bfa0f9a236f..b789748febe 100644 --- a/src/vs/platform/files/common/fileService.ts +++ b/src/vs/platform/files/common/fileService.ts @@ -4,7 +4,7 @@ *--------------------------------------------------------------------------------------------*/ import { Disposable, IDisposable, toDisposable, dispose, DisposableStore } from 'vs/base/common/lifecycle'; -import { IFileService, IResolveFileOptions, FileChangesEvent, FileOperationEvent, IFileSystemProviderRegistrationEvent, IFileSystemProvider, IFileStat, IResolveFileResult, ICreateFileOptions, IFileSystemProviderActivationEvent, FileOperationError, FileOperationResult, FileOperation, FileSystemProviderCapabilities, FileType, toFileSystemProviderErrorCode, FileSystemProviderErrorCode, IStat, IFileStatWithMetadata, IResolveMetadataFileOptions, etag, hasReadWriteCapability, hasFileFolderCopyCapability, hasOpenReadWriteCloseCapability, toFileOperationResult, IFileSystemProviderWithOpenReadWriteCloseCapability, IFileSystemProviderWithFileReadWriteCapability, IResolveFileResultWithMetadata, IWatchOptions, IWriteFileOptions, IReadFileOptions, IFileStreamContent, IFileContent, ETAG_DISABLED } from 'vs/platform/files/common/files'; +import { IFileService, IResolveFileOptions, FileChangesEvent, FileOperationEvent, IFileSystemProviderRegistrationEvent, IFileSystemProvider, IFileStat, IResolveFileResult, ICreateFileOptions, IFileSystemProviderActivationEvent, FileOperationError, FileOperationResult, FileOperation, FileSystemProviderCapabilities, FileType, toFileSystemProviderErrorCode, FileSystemProviderErrorCode, IStat, IFileStatWithMetadata, IResolveMetadataFileOptions, etag, hasReadWriteCapability, hasFileFolderCopyCapability, hasOpenReadWriteCloseCapability, toFileOperationResult, IFileSystemProviderWithOpenReadWriteCloseCapability, IFileSystemProviderWithFileReadWriteCapability, IResolveFileResultWithMetadata, IWatchOptions, IWriteFileOptions, IReadFileOptions, IFileStreamContent, IFileContent, ETAG_DISABLED, hasFileReadStreamCapability, IFileSystemProviderWithFileReadStreamCapability, ensureFileSystemProviderError } from 'vs/platform/files/common/files'; import { URI } from 'vs/base/common/uri'; import { Event, Emitter } from 'vs/base/common/event'; import { isAbsolutePath, dirname, basename, joinPath, isEqual, isEqualOrParent } from 'vs/base/common/resources'; @@ -13,12 +13,13 @@ import { TernarySearchTree } from 'vs/base/common/map'; import { isNonEmptyArray, coalesce } from 'vs/base/common/arrays'; import { getBaseLabel } from 'vs/base/common/labels'; import { ILogService } from 'vs/platform/log/common/log'; -import { VSBuffer, VSBufferReadable, readableToBuffer, bufferToReadable, streamToBuffer, bufferToStream, VSBufferReadableStream, newWriteableBufferStream, VSBufferWriteableStream } from 'vs/base/common/buffer'; -import { isReadableStream } from 'vs/base/common/stream'; +import { VSBuffer, VSBufferReadable, readableToBuffer, bufferToReadable, streamToBuffer, bufferToStream, VSBufferReadableStream } from 'vs/base/common/buffer'; +import { isReadableStream, transform, ReadableStreamEvents, consumeReadableWithLimit, consumeStreamWithLimit } from 'vs/base/common/stream'; import { Queue } from 'vs/base/common/async'; import { CancellationTokenSource, CancellationToken } from 'vs/base/common/cancellation'; import { Schemas } from 'vs/base/common/network'; import { assign } from 'vs/base/common/objects'; +import { createReadStream } from 'vs/platform/files/common/io'; export class FileService extends Disposable implements IFileService { @@ -120,14 +121,24 @@ export class FileService extends Disposable implements IFileService { return provider; } - private async withReadWriteProvider(resource: URI): Promise { + private async withReadProvider(resource: URI): Promise { + const provider = await this.withProvider(resource); + + if (hasOpenReadWriteCloseCapability(provider) || hasReadWriteCapability(provider) || hasFileReadStreamCapability(provider)) { + return provider; + } + + throw new Error('Provider neither has FileReadWrite, FileReadStream nor FileOpenReadWriteClose capability which is needed for the read operation.'); + } + + private async withWriteProvider(resource: URI): Promise { const provider = await this.withProvider(resource); if (hasOpenReadWriteCloseCapability(provider) || hasReadWriteCapability(provider)) { return provider; } - throw new Error('Provider neither has FileReadWrite nor FileOpenReadWriteClose capability which is needed for the operation.'); + throw new Error('Provider neither has FileReadWrite nor FileOpenReadWriteClose capability which is needed for the write operation.'); } //#endregion @@ -156,7 +167,7 @@ export class FileService extends Disposable implements IFileService { } // Bubble up any other error as is - throw this.ensureError(error); + throw ensureFileSystemProviderError(error); } } @@ -290,7 +301,7 @@ export class FileService extends Disposable implements IFileService { } async writeFile(resource: URI, bufferOrReadableOrStream: VSBuffer | VSBufferReadable | VSBufferReadableStream, options?: IWriteFileOptions): Promise { - const provider = this.throwIfFileSystemIsReadonly(await this.withReadWriteProvider(resource)); + const provider = this.throwIfFileSystemIsReadonly(await this.withWriteProvider(resource)); try { @@ -302,6 +313,18 @@ export class FileService extends Disposable implements IFileService { await this.mkdirp(provider, dirname(resource)); } + // optimization: if the provider has unbuffered write capability and the data + // to write is a Readable, we consume up to 3 chunks and try to write the data + // unbuffered to reduce the overhead. If the Readable has more data to provide + // we continue to write buffered. + if (hasReadWriteCapability(provider) && !(bufferOrReadableOrStream instanceof VSBuffer)) { + if (isReadableStream(bufferOrReadableOrStream)) { + bufferOrReadableOrStream = await consumeStreamWithLimit(bufferOrReadableOrStream, data => VSBuffer.concat(data), 3); + } else { + bufferOrReadableOrStream = consumeReadableWithLimit(bufferOrReadableOrStream, data => VSBuffer.concat(data), 3); + } + } + // write file: unbuffered (only if data to write is a buffer, or the provider has no buffered write capability) if (!hasOpenReadWriteCloseCapability(provider) || (hasReadWriteCapability(provider) && bufferOrReadableOrStream instanceof VSBuffer)) { await this.doWriteUnbuffered(provider, resource, bufferOrReadableOrStream); @@ -312,7 +335,7 @@ export class FileService extends Disposable implements IFileService { await this.doWriteBuffered(provider, resource, bufferOrReadableOrStream instanceof VSBuffer ? bufferToReadable(bufferOrReadableOrStream) : bufferOrReadableOrStream); } } catch (error) { - throw new FileOperationError(localize('err.write', "Unable to write file ({0})", this.ensureError(error).toString()), toFileOperationResult(error), options); + throw new FileOperationError(localize('err.write', "Unable to write file ({0})", ensureFileSystemProviderError(error).toString()), toFileOperationResult(error), options); } return this.resolve(resource, { resolveMetadata: true }); @@ -355,7 +378,7 @@ export class FileService extends Disposable implements IFileService { } async readFile(resource: URI, options?: IReadFileOptions): Promise { - const provider = await this.withReadWriteProvider(resource); + const provider = await this.withReadProvider(resource); const stream = await this.doReadAsFileStream(provider, resource, assign({ // optimization: since we know that the caller does not @@ -373,12 +396,12 @@ export class FileService extends Disposable implements IFileService { } async readFileStream(resource: URI, options?: IReadFileOptions): Promise { - const provider = await this.withReadWriteProvider(resource); + const provider = await this.withReadProvider(resource); return this.doReadAsFileStream(provider, resource, options); } - private async doReadAsFileStream(provider: IFileSystemProviderWithFileReadWriteCapability | IFileSystemProviderWithOpenReadWriteCloseCapability, resource: URI, options?: IReadFileOptions & { preferUnbuffered?: boolean }): Promise { + private async doReadAsFileStream(provider: IFileSystemProviderWithFileReadWriteCapability | IFileSystemProviderWithOpenReadWriteCloseCapability | IFileSystemProviderWithFileReadStreamCapability, resource: URI, options?: IReadFileOptions & { preferUnbuffered?: boolean }): Promise { // install a cancellation token that gets cancelled // when any error occurs. this allows us to resolve @@ -406,10 +429,15 @@ export class FileService extends Disposable implements IFileService { let fileStreamPromise: Promise; // read unbuffered (only if either preferred, or the provider has no buffered read capability) - if (!hasOpenReadWriteCloseCapability(provider) || (hasReadWriteCapability(provider) && options?.preferUnbuffered)) { + if (!(hasOpenReadWriteCloseCapability(provider) || hasFileReadStreamCapability(provider)) || (hasReadWriteCapability(provider) && options?.preferUnbuffered)) { fileStreamPromise = this.readFileUnbuffered(provider, resource, options); } + // read streamed (always prefer over primitive buffered read) + else if (hasFileReadStreamCapability(provider)) { + fileStreamPromise = Promise.resolve(this.readFileStreamed(provider, resource, cancellableSource.token, options)); + } + // read buffered else { fileStreamPromise = Promise.resolve(this.readFileBuffered(provider, resource, cancellableSource.token, options)); @@ -422,74 +450,30 @@ export class FileService extends Disposable implements IFileService { value: fileStream }; } catch (error) { - throw new FileOperationError(localize('err.read', "Unable to read file ({0})", this.ensureError(error).toString()), toFileOperationResult(error), options); + throw new FileOperationError(localize('err.read', "Unable to read file ({0})", ensureFileSystemProviderError(error).toString()), toFileOperationResult(error), options); } } - private readFileBuffered(provider: IFileSystemProviderWithOpenReadWriteCloseCapability, resource: URI, token: CancellationToken, options?: IReadFileOptions): VSBufferReadableStream { - const stream = newWriteableBufferStream(); + private readFileStreamed(provider: IFileSystemProviderWithFileReadStreamCapability, resource: URI, token: CancellationToken, options: IReadFileOptions = Object.create(null)): VSBufferReadableStream { + const fileStream = provider.readFileStream(resource, options, token); - // do not await reading but simply return - // the stream directly since it operates - // via events. finally end the stream and - // send through the possible error - let error: Error | undefined = undefined; - this.doReadFileBuffered(provider, resource, stream, token, options).then(undefined, err => error = err).finally(() => stream.end(error)); - - return stream; + return this.transformFileReadStream(fileStream, options); } - private async doReadFileBuffered(provider: IFileSystemProviderWithOpenReadWriteCloseCapability, resource: URI, stream: VSBufferWriteableStream, token: CancellationToken, options?: IReadFileOptions): Promise { - - // open handle through provider - const handle = await provider.open(resource, { create: false }); - - try { - let totalBytesRead = 0; - let bytesRead = 0; - let allowedRemainingBytes = (options && typeof options.length === 'number') ? options.length : undefined; - - let buffer = VSBuffer.alloc(Math.min(this.BUFFER_SIZE, typeof allowedRemainingBytes === 'number' ? allowedRemainingBytes : this.BUFFER_SIZE)); - - let posInFile = options && typeof options.position === 'number' ? options.position : 0; - let posInBuffer = 0; - do { - // read from source (handle) at current position (pos) into buffer (buffer) at - // buffer position (posInBuffer) up to the size of the buffer (buffer.byteLength). - bytesRead = await provider.read(handle, posInFile, buffer.buffer, posInBuffer, buffer.byteLength - posInBuffer); - - posInFile += bytesRead; - posInBuffer += bytesRead; - totalBytesRead += bytesRead; - - if (typeof allowedRemainingBytes === 'number') { - allowedRemainingBytes -= bytesRead; - } - - // when buffer full, create a new one and emit it through stream - if (posInBuffer === buffer.byteLength) { - stream.write(buffer); - - buffer = VSBuffer.alloc(Math.min(this.BUFFER_SIZE, typeof allowedRemainingBytes === 'number' ? allowedRemainingBytes : this.BUFFER_SIZE)); - - posInBuffer = 0; - } - } while (bytesRead > 0 && (typeof allowedRemainingBytes !== 'number' || allowedRemainingBytes > 0) && this.throwIfCancelled(token) && this.throwIfTooLarge(totalBytesRead, options)); + private readFileBuffered(provider: IFileSystemProviderWithOpenReadWriteCloseCapability, resource: URI, token: CancellationToken, options: IReadFileOptions = Object.create(null)): VSBufferReadableStream { + const fileStream = createReadStream(provider, resource, { + ...options, + bufferSize: this.BUFFER_SIZE + }, token); - // wrap up with last buffer (also respect maxBytes if provided) - if (posInBuffer > 0) { - let lastChunkLength = posInBuffer; - if (typeof allowedRemainingBytes === 'number') { - lastChunkLength = Math.min(posInBuffer, allowedRemainingBytes); - } + return this.transformFileReadStream(fileStream, options); + } - stream.write(buffer.slice(0, lastChunkLength)); - } - } catch (error) { - throw this.ensureError(error); - } finally { - await provider.close(handle); - } + private transformFileReadStream(stream: ReadableStreamEvents, options: IReadFileOptions): VSBufferReadableStream { + return transform(stream, { + data: data => data instanceof VSBuffer ? data : VSBuffer.wrap(data), + error: error => new FileOperationError(localize('err.read', "Unable to read file ({0})", ensureFileSystemProviderError(error).toString()), toFileOperationResult(error), options) + }, data => VSBuffer.concat(data)); } private async readFileUnbuffered(provider: IFileSystemProviderWithFileReadWriteCapability, resource: URI, options?: IReadFileOptions): Promise { @@ -505,34 +489,47 @@ export class FileService extends Disposable implements IFileService { buffer = buffer.slice(0, options.length); } + // Throw if file is too large to load + this.validateReadFileLimits(buffer.byteLength, options); + return bufferToStream(VSBuffer.wrap(buffer)); } private async validateReadFile(resource: URI, options?: IReadFileOptions): Promise { const stat = await this.resolve(resource, { resolveMetadata: true }); - // Return early if resource is a directory + // Throw if resource is a directory if (stat.isDirectory) { throw new FileOperationError(localize('fileIsDirectoryError', "Expected file {0} is actually a directory", this.resourceForError(resource)), FileOperationResult.FILE_IS_DIRECTORY, options); } - // Return early if file not modified since (unless disabled) + // Throw if file not modified since (unless disabled) if (options && typeof options.etag === 'string' && options.etag !== ETAG_DISABLED && options.etag === stat.etag) { throw new FileOperationError(localize('fileNotModifiedError', "File not modified since"), FileOperationResult.FILE_NOT_MODIFIED_SINCE, options); } - // Return early if file is too large to load + // Throw if file is too large to load + this.validateReadFileLimits(stat.size, options); + + return stat; + } + + private validateReadFileLimits(size: number, options?: IReadFileOptions): void { if (options?.limits) { - if (typeof options.limits.memory === 'number' && stat.size > options.limits.memory) { - throw new FileOperationError(localize('fileTooLargeForHeapError', "To open a file of this size, you need to restart and allow it to use more memory"), FileOperationResult.FILE_EXCEEDS_MEMORY_LIMIT); + let tooLargeErrorResult: FileOperationResult | undefined = undefined; + + if (typeof options.limits.memory === 'number' && size > options.limits.memory) { + tooLargeErrorResult = FileOperationResult.FILE_EXCEEDS_MEMORY_LIMIT; } - if (typeof options.limits.size === 'number' && stat.size > options.limits.size) { - throw new FileOperationError(localize('fileTooLargeError', "File is too large to open"), FileOperationResult.FILE_TOO_LARGE); + if (typeof options.limits.size === 'number' && size > options.limits.size) { + tooLargeErrorResult = FileOperationResult.FILE_TOO_LARGE; } - } - return stat; + if (typeof tooLargeErrorResult === 'number') { + throw new FileOperationError(localize('fileTooLargeError', "File is too large to open"), tooLargeErrorResult); + } + } } //#endregion @@ -540,8 +537,8 @@ export class FileService extends Disposable implements IFileService { //#region Move/Copy/Delete/Create Folder async move(source: URI, target: URI, overwrite?: boolean): Promise { - const sourceProvider = this.throwIfFileSystemIsReadonly(await this.withReadWriteProvider(source)); - const targetProvider = this.throwIfFileSystemIsReadonly(await this.withReadWriteProvider(target)); + const sourceProvider = this.throwIfFileSystemIsReadonly(await this.withWriteProvider(source)); + const targetProvider = this.throwIfFileSystemIsReadonly(await this.withWriteProvider(target)); // move const mode = await this.doMoveCopy(sourceProvider, source, targetProvider, target, 'move', !!overwrite); @@ -554,8 +551,8 @@ export class FileService extends Disposable implements IFileService { } async copy(source: URI, target: URI, overwrite?: boolean): Promise { - const sourceProvider = await this.withReadWriteProvider(source); - const targetProvider = this.throwIfFileSystemIsReadonly(await this.withReadWriteProvider(target)); + const sourceProvider = await this.withReadProvider(source); + const targetProvider = this.throwIfFileSystemIsReadonly(await this.withWriteProvider(target)); // copy const mode = await this.doMoveCopy(sourceProvider, source, targetProvider, target, 'copy', !!overwrite); @@ -567,7 +564,7 @@ export class FileService extends Disposable implements IFileService { return fileStat; } - private async doMoveCopy(sourceProvider: IFileSystemProviderWithFileReadWriteCapability | IFileSystemProviderWithOpenReadWriteCloseCapability, source: URI, targetProvider: IFileSystemProviderWithFileReadWriteCapability | IFileSystemProviderWithOpenReadWriteCloseCapability, target: URI, mode: 'move' | 'copy', overwrite: boolean): Promise<'move' | 'copy'> { + private async doMoveCopy(sourceProvider: IFileSystemProvider, source: URI, targetProvider: IFileSystemProvider, target: URI, mode: 'move' | 'copy', overwrite: boolean): Promise<'move' | 'copy'> { if (source.toString() === target.toString()) { return mode; // simulate node.js behaviour here and do a no-op if paths match } @@ -893,7 +890,7 @@ export class FileService extends Disposable implements IFileService { await this.doWriteReadableBufferedQueued(provider, handle, readableOrStream); } } catch (error) { - throw this.ensureError(error); + throw ensureFileSystemProviderError(error); } finally { // close handle always @@ -935,7 +932,7 @@ export class FileService extends Disposable implements IFileService { let posInFile = 0; let chunk: VSBuffer | null; - while (chunk = readable.read()) { + while ((chunk = readable.read()) !== null) { await this.doWriteBuffer(provider, handle, chunk, chunk.byteLength, posInFile, 0); posInFile += chunk.byteLength; @@ -1004,7 +1001,7 @@ export class FileService extends Disposable implements IFileService { } } while (bytesRead > 0); } catch (error) { - throw this.ensureError(error); + throw ensureFileSystemProviderError(error); } finally { await Promise.all([ typeof sourceHandle === 'number' ? sourceProvider.close(sourceHandle) : Promise.resolve(), @@ -1035,7 +1032,7 @@ export class FileService extends Disposable implements IFileService { const buffer = await sourceProvider.readFile(source); await this.doWriteBuffer(targetProvider, targetHandle, VSBuffer.wrap(buffer), buffer.byteLength, 0, 0); } catch (error) { - throw this.ensureError(error); + throw ensureFileSystemProviderError(error); } finally { await targetProvider.close(targetHandle); } @@ -1058,38 +1055,6 @@ export class FileService extends Disposable implements IFileService { return provider; } - private throwIfCancelled(token: CancellationToken): boolean { - if (token.isCancellationRequested) { - throw new Error('cancelled'); - } - - return true; - } - - private ensureError(error?: Error): Error { - if (!error) { - return new Error(localize('unknownError', "Unknown Error")); // https://github.com/Microsoft/vscode/issues/72798 - } - - return error; - } - - private throwIfTooLarge(totalBytesRead: number, options?: IReadFileOptions): boolean { - - // Return early if file is too large to load - if (options?.limits) { - if (typeof options.limits.memory === 'number' && totalBytesRead > options.limits.memory) { - throw new FileOperationError(localize('fileTooLargeForHeapError', "To open a file of this size, you need to restart and allow it to use more memory"), FileOperationResult.FILE_EXCEEDS_MEMORY_LIMIT); - } - - if (typeof options.limits.size === 'number' && totalBytesRead > options.limits.size) { - throw new FileOperationError(localize('fileTooLargeError', "File is too large to open"), FileOperationResult.FILE_TOO_LARGE); - } - } - - return true; - } - private resourceForError(resource: URI): string { if (resource.scheme === Schemas.file) { return resource.fsPath; diff --git a/src/vs/platform/files/common/files.ts b/src/vs/platform/files/common/files.ts index f35a079886a..0356e8dfdd3 100644 --- a/src/vs/platform/files/common/files.ts +++ b/src/vs/platform/files/common/files.ts @@ -3,6 +3,7 @@ * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ +import { localize } from 'vs/nls'; import { sep } from 'vs/base/common/path'; import { URI } from 'vs/base/common/uri'; import * as glob from 'vs/base/common/glob'; @@ -13,6 +14,8 @@ import { IDisposable } from 'vs/base/common/lifecycle'; import { isEqualOrParent, isEqual } from 'vs/base/common/resources'; import { isUndefinedOrNull } from 'vs/base/common/types'; import { VSBuffer, VSBufferReadable, VSBufferReadableStream } from 'vs/base/common/buffer'; +import { ReadableStreamEvents } from 'vs/base/common/stream'; +import { CancellationToken } from 'vs/base/common/cancellation'; export const IFileService = createDecorator('fileService'); @@ -158,6 +161,29 @@ export interface FileOverwriteOptions { overwrite: boolean; } +export interface FileReadStreamOptions { + + /** + * Is an integer specifying where to begin reading from in the file. If position is undefined, + * data will be read from the current file position. + */ + readonly position?: number; + + /** + * Is an integer specifying how many bytes to read from the file. By default, all bytes + * will be read. + */ + readonly length?: number; + + /** + * If provided, the size of the file will be checked against the limits. + */ + limits?: { + readonly size?: number; + readonly memory?: number; + }; +} + export interface FileWriteOptions { overwrite: boolean; create: boolean; @@ -194,6 +220,8 @@ export interface IWatchOptions { export const enum FileSystemProviderCapabilities { FileReadWrite = 1 << 1, FileOpenReadWriteClose = 1 << 2, + FileReadStream = 1 << 4, + FileFolderCopy = 1 << 3, PathCaseSensitive = 1 << 10, @@ -223,6 +251,8 @@ export interface IFileSystemProvider { readFile?(resource: URI): Promise; writeFile?(resource: URI, content: Uint8Array, opts: FileWriteOptions): Promise; + readFileStream?(resource: URI, opts: FileReadStreamOptions, token?: CancellationToken): ReadableStreamEvents; + open?(resource: URI, opts: FileOpenOptions): Promise; close?(fd: number): Promise; read?(fd: number, pos: number, data: Uint8Array, offset: number, length: number): Promise; @@ -257,11 +287,21 @@ export function hasOpenReadWriteCloseCapability(provider: IFileSystemProvider): return !!(provider.capabilities & FileSystemProviderCapabilities.FileOpenReadWriteClose); } +export interface IFileSystemProviderWithFileReadStreamCapability extends IFileSystemProvider { + readFileStream(resource: URI, opts: FileReadStreamOptions, token?: CancellationToken): ReadableStreamEvents; +} + +export function hasFileReadStreamCapability(provider: IFileSystemProvider): provider is IFileSystemProviderWithFileReadStreamCapability { + return !!(provider.capabilities & FileSystemProviderCapabilities.FileReadStream); +} + export enum FileSystemProviderErrorCode { FileExists = 'EntryExists', FileNotFound = 'EntryNotFound', FileNotADirectory = 'EntryNotADirectory', FileIsADirectory = 'EntryIsADirectory', + FileExceedsMemoryLimit = 'EntryExceedsMemoryLimit', + FileTooLarge = 'EntryTooLarge', NoPermissions = 'NoPermissions', Unavailable = 'Unavailable', Unknown = 'Unknown' @@ -274,13 +314,21 @@ export class FileSystemProviderError extends Error { } } -export function createFileSystemProviderError(error: Error, code: FileSystemProviderErrorCode): FileSystemProviderError { +export function createFileSystemProviderError(error: Error | string, code: FileSystemProviderErrorCode): FileSystemProviderError { const providerError = new FileSystemProviderError(error.toString(), code); markAsFileSystemProviderError(providerError, code); return providerError; } +export function ensureFileSystemProviderError(error?: Error): Error { + if (!error) { + return createFileSystemProviderError(localize('unknownError', "Unknown Error"), FileSystemProviderErrorCode.Unknown); // https://github.com/Microsoft/vscode/issues/72798 + } + + return error; +} + export function markAsFileSystemProviderError(error: Error, code: FileSystemProviderErrorCode): Error { error.name = code ? `${code} (FileSystemError)` : `FileSystemError`; @@ -311,6 +359,8 @@ export function toFileSystemProviderErrorCode(error: Error | undefined | null): case FileSystemProviderErrorCode.FileIsADirectory: return FileSystemProviderErrorCode.FileIsADirectory; case FileSystemProviderErrorCode.FileNotADirectory: return FileSystemProviderErrorCode.FileNotADirectory; case FileSystemProviderErrorCode.FileNotFound: return FileSystemProviderErrorCode.FileNotFound; + case FileSystemProviderErrorCode.FileExceedsMemoryLimit: return FileSystemProviderErrorCode.FileExceedsMemoryLimit; + case FileSystemProviderErrorCode.FileTooLarge: return FileSystemProviderErrorCode.FileTooLarge; case FileSystemProviderErrorCode.NoPermissions: return FileSystemProviderErrorCode.NoPermissions; case FileSystemProviderErrorCode.Unavailable: return FileSystemProviderErrorCode.Unavailable; } @@ -335,7 +385,10 @@ export function toFileOperationResult(error: Error): FileOperationResult { return FileOperationResult.FILE_PERMISSION_DENIED; case FileSystemProviderErrorCode.FileExists: return FileOperationResult.FILE_MOVE_CONFLICT; - case FileSystemProviderErrorCode.FileNotADirectory: + case FileSystemProviderErrorCode.FileExceedsMemoryLimit: + return FileOperationResult.FILE_EXCEEDS_MEMORY_LIMIT; + case FileSystemProviderErrorCode.FileTooLarge: + return FileOperationResult.FILE_TOO_LARGE; default: return FileOperationResult.FILE_OTHER_ERROR; } @@ -612,7 +665,7 @@ export interface IFileStreamContent extends IBaseStatWithMetadata { value: VSBufferReadableStream; } -export interface IReadFileOptions { +export interface IReadFileOptions extends FileReadStreamOptions { /** * The optional etag parameter allows to return early from resolving the resource if @@ -621,26 +674,6 @@ export interface IReadFileOptions { * It is the task of the caller to makes sure to handle this error case from the promise. */ readonly etag?: string; - - /** - * Is an integer specifying where to begin reading from in the file. If position is null, - * data will be read from the current file position. - */ - readonly position?: number; - - /** - * Is an integer specifying how many bytes to read from the file. By default, all bytes - * will be read. - */ - readonly length?: number; - - /** - * If provided, the size of the file will be checked against the limits. - */ - limits?: { - readonly size?: number; - readonly memory?: number; - }; } export interface IWriteFileOptions { diff --git a/src/vs/platform/files/common/io.ts b/src/vs/platform/files/common/io.ts new file mode 100644 index 00000000000..1fb8ff0b285 --- /dev/null +++ b/src/vs/platform/files/common/io.ts @@ -0,0 +1,114 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +import { localize } from 'vs/nls'; +import { URI } from 'vs/base/common/uri'; +import { VSBuffer, VSBufferWriteableStream, newWriteableBufferStream, VSBufferReadableStream } from 'vs/base/common/buffer'; +import { CancellationToken } from 'vs/base/common/cancellation'; +import { IFileSystemProviderWithOpenReadWriteCloseCapability, FileReadStreamOptions, createFileSystemProviderError, FileSystemProviderErrorCode, ensureFileSystemProviderError } from 'vs/platform/files/common/files'; +import { canceled } from 'vs/base/common/errors'; + +export interface ICreateReadStreamOptions extends FileReadStreamOptions { + + /** + * The size of the buffer to use before sending to the stream. + */ + bufferSize: number; +} + +export function createReadStream(provider: IFileSystemProviderWithOpenReadWriteCloseCapability, resource: URI, options: ICreateReadStreamOptions, token?: CancellationToken): VSBufferReadableStream { + const stream = newWriteableBufferStream(); + + // do not await reading but simply return the stream directly since it operates + // via events. finally end the stream and send through the possible error + let error: Error | undefined = undefined; + + doReadFileIntoStream(provider, resource, stream, options, token).then(undefined, err => error = err).finally(() => stream.end(error)); + + return stream; +} + +async function doReadFileIntoStream(provider: IFileSystemProviderWithOpenReadWriteCloseCapability, resource: URI, stream: VSBufferWriteableStream, options: ICreateReadStreamOptions, token?: CancellationToken): Promise { + + // Check for cancellation + throwIfCancelled(token); + + // open handle through provider + const handle = await provider.open(resource, { create: false }); + + // Check for cancellation + throwIfCancelled(token); + + try { + let totalBytesRead = 0; + let bytesRead = 0; + let allowedRemainingBytes = (options && typeof options.length === 'number') ? options.length : undefined; + + let buffer = VSBuffer.alloc(Math.min(options.bufferSize, typeof allowedRemainingBytes === 'number' ? allowedRemainingBytes : options.bufferSize)); + + let posInFile = options && typeof options.position === 'number' ? options.position : 0; + let posInBuffer = 0; + do { + // read from source (handle) at current position (pos) into buffer (buffer) at + // buffer position (posInBuffer) up to the size of the buffer (buffer.byteLength). + bytesRead = await provider.read(handle, posInFile, buffer.buffer, posInBuffer, buffer.byteLength - posInBuffer); + + posInFile += bytesRead; + posInBuffer += bytesRead; + totalBytesRead += bytesRead; + + if (typeof allowedRemainingBytes === 'number') { + allowedRemainingBytes -= bytesRead; + } + + // when buffer full, create a new one and emit it through stream + if (posInBuffer === buffer.byteLength) { + stream.write(buffer); + + buffer = VSBuffer.alloc(Math.min(options.bufferSize, typeof allowedRemainingBytes === 'number' ? allowedRemainingBytes : options.bufferSize)); + + posInBuffer = 0; + } + } while (bytesRead > 0 && (typeof allowedRemainingBytes !== 'number' || allowedRemainingBytes > 0) && throwIfCancelled(token) && throwIfTooLarge(totalBytesRead, options)); + + // wrap up with last buffer (also respect maxBytes if provided) + if (posInBuffer > 0) { + let lastChunkLength = posInBuffer; + if (typeof allowedRemainingBytes === 'number') { + lastChunkLength = Math.min(posInBuffer, allowedRemainingBytes); + } + + stream.write(buffer.slice(0, lastChunkLength)); + } + } catch (error) { + throw ensureFileSystemProviderError(error); + } finally { + await provider.close(handle); + } +} + +function throwIfCancelled(token?: CancellationToken): boolean { + if (token && token.isCancellationRequested) { + throw canceled(); + } + + return true; +} + +function throwIfTooLarge(totalBytesRead: number, options: ICreateReadStreamOptions): boolean { + + // Return early if file is too large to load and we have configured limits + if (options?.limits) { + if (typeof options.limits.memory === 'number' && totalBytesRead > options.limits.memory) { + throw createFileSystemProviderError(localize('fileTooLargeForHeapError', "To open a file of this size, you need to restart and allow it to use more memory"), FileSystemProviderErrorCode.FileExceedsMemoryLimit); + } + + if (typeof options.limits.size === 'number' && totalBytesRead > options.limits.size) { + throw createFileSystemProviderError(localize('fileTooLargeError', "File is too large to open"), FileSystemProviderErrorCode.FileTooLarge); + } + } + + return true; +} diff --git a/src/vs/platform/files/node/diskFileSystemProvider.ts b/src/vs/platform/files/node/diskFileSystemProvider.ts index 4dc39002b35..1dbfa03c62d 100644 --- a/src/vs/platform/files/node/diskFileSystemProvider.ts +++ b/src/vs/platform/files/node/diskFileSystemProvider.ts @@ -6,7 +6,7 @@ import { mkdir, open, close, read, write, fdatasync, Dirent, Stats } from 'fs'; import { promisify } from 'util'; import { IDisposable, Disposable, toDisposable, dispose, combinedDisposable } from 'vs/base/common/lifecycle'; -import { IFileSystemProvider, FileSystemProviderCapabilities, IFileChange, IWatchOptions, IStat, FileType, FileDeleteOptions, FileOverwriteOptions, FileWriteOptions, FileOpenOptions, FileSystemProviderErrorCode, createFileSystemProviderError, FileSystemProviderError } from 'vs/platform/files/common/files'; +import { FileSystemProviderCapabilities, IFileChange, IWatchOptions, IStat, FileType, FileDeleteOptions, FileOverwriteOptions, FileWriteOptions, FileOpenOptions, FileSystemProviderErrorCode, createFileSystemProviderError, FileSystemProviderError, IFileSystemProviderWithFileReadWriteCapability, IFileSystemProviderWithFileReadStreamCapability, IFileSystemProviderWithOpenReadWriteCloseCapability, FileReadStreamOptions, IFileSystemProviderWithFileFolderCopyCapability } from 'vs/platform/files/common/files'; import { URI } from 'vs/base/common/uri'; import { Event, Emitter } from 'vs/base/common/event'; import { isLinux, isWindows } from 'vs/base/common/platform'; @@ -22,13 +22,23 @@ import { FileWatcher as UnixWatcherService } from 'vs/platform/files/node/watche import { FileWatcher as WindowsWatcherService } from 'vs/platform/files/node/watcher/win32/watcherService'; import { FileWatcher as NsfwWatcherService } from 'vs/platform/files/node/watcher/nsfw/watcherService'; import { FileWatcher as NodeJSWatcherService } from 'vs/platform/files/node/watcher/nodejs/watcherService'; +import { VSBuffer } from 'vs/base/common/buffer'; +import { CancellationToken } from 'vs/base/common/cancellation'; +import { ReadableStreamEvents, transform } from 'vs/base/common/stream'; +import { createReadStream } from 'vs/platform/files/common/io'; export interface IWatcherOptions { pollingInterval?: number; usePolling: boolean; } -export class DiskFileSystemProvider extends Disposable implements IFileSystemProvider { +export class DiskFileSystemProvider extends Disposable implements + IFileSystemProviderWithFileReadWriteCapability, + IFileSystemProviderWithOpenReadWriteCloseCapability, + IFileSystemProviderWithFileReadStreamCapability, + IFileSystemProviderWithFileFolderCopyCapability { + + private readonly BUFFER_SIZE = 64 * 1024; constructor(private logService: ILogService, private watcherOptions?: IWatcherOptions) { super(); @@ -44,6 +54,7 @@ export class DiskFileSystemProvider extends Disposable implements IFileSystemPro this._capabilities = FileSystemProviderCapabilities.FileReadWrite | FileSystemProviderCapabilities.FileOpenReadWriteClose | + FileSystemProviderCapabilities.FileReadStream | FileSystemProviderCapabilities.FileFolderCopy; if (isLinux) { @@ -121,6 +132,15 @@ export class DiskFileSystemProvider extends Disposable implements IFileSystemPro } } + readFileStream(resource: URI, opts: FileReadStreamOptions, token?: CancellationToken): ReadableStreamEvents { + const fileStream = createReadStream(this, resource, { + ...opts, + bufferSize: this.BUFFER_SIZE + }, token); + + return transform(fileStream, { data: data => data.buffer }, data => VSBuffer.concat(data.map(data => VSBuffer.wrap(data))).buffer); + } + async writeFile(resource: URI, content: Uint8Array, opts: FileWriteOptions): Promise { let handle: number | undefined = undefined; try { @@ -131,11 +151,11 @@ export class DiskFileSystemProvider extends Disposable implements IFileSystemPro const fileExists = await exists(filePath); if (fileExists) { if (!opts.overwrite) { - throw createFileSystemProviderError(new Error(localize('fileExists', "File already exists")), FileSystemProviderErrorCode.FileExists); + throw createFileSystemProviderError(localize('fileExists', "File already exists"), FileSystemProviderErrorCode.FileExists); } } else { if (!opts.create) { - throw createFileSystemProviderError(new Error(localize('fileNotExists', "File does not exist")), FileSystemProviderErrorCode.FileNotFound); + throw createFileSystemProviderError(localize('fileNotExists', "File does not exist"), FileSystemProviderErrorCode.FileNotFound); } } } @@ -441,13 +461,13 @@ export class DiskFileSystemProvider extends Disposable implements IFileSystemPro } if (isSameResourceWithDifferentPathCase && mode === 'copy') { - throw createFileSystemProviderError(new Error('File cannot be copied to same path with different path case'), FileSystemProviderErrorCode.FileExists); + throw createFileSystemProviderError(localize('fileCopyErrorPathCase', "'File cannot be copied to same path with different path case"), FileSystemProviderErrorCode.FileExists); } // handle existing target (unless this is a case change) if (!isSameResourceWithDifferentPathCase && await exists(toFilePath)) { if (!overwrite) { - throw createFileSystemProviderError(new Error('File at target already exists'), FileSystemProviderErrorCode.FileExists); + throw createFileSystemProviderError(localize('fileCopyErrorExists', "File at target already exists"), FileSystemProviderErrorCode.FileExists); } // Delete target diff --git a/src/vs/platform/files/test/node/diskFileService.test.ts b/src/vs/platform/files/test/node/diskFileService.test.ts index ac05f047c80..f322bf90872 100644 --- a/src/vs/platform/files/test/node/diskFileService.test.ts +++ b/src/vs/platform/files/test/node/diskFileService.test.ts @@ -20,13 +20,14 @@ import { NullLogService } from 'vs/platform/log/common/log'; import { isLinux, isWindows } from 'vs/base/common/platform'; import { DisposableStore } from 'vs/base/common/lifecycle'; import { isEqual } from 'vs/base/common/resources'; -import { VSBuffer, VSBufferReadable, streamToBufferReadableStream, VSBufferReadableStream, bufferToReadable, bufferToStream } from 'vs/base/common/buffer'; +import { VSBuffer, VSBufferReadable, streamToBufferReadableStream, VSBufferReadableStream, bufferToReadable, bufferToStream, streamToBuffer } from 'vs/base/common/buffer'; import { find } from 'vs/base/common/arrays'; function getByName(root: IFileStat, name: string): IFileStat | undefined { if (root.children === undefined) { return undefined; } + return find(root.children, child => child.name === name); } @@ -57,6 +58,7 @@ export class TestDiskFileSystemProvider extends DiskFileSystemProvider { totalBytesRead: number = 0; private invalidStatSize: boolean = false; + private smallStatSize: boolean = false; private _testCapabilities!: FileSystemProviderCapabilities; get capabilities(): FileSystemProviderCapabilities { @@ -64,6 +66,7 @@ export class TestDiskFileSystemProvider extends DiskFileSystemProvider { this._testCapabilities = FileSystemProviderCapabilities.FileReadWrite | FileSystemProviderCapabilities.FileOpenReadWriteClose | + FileSystemProviderCapabilities.FileReadStream | FileSystemProviderCapabilities.FileFolderCopy; if (isLinux) { @@ -78,8 +81,12 @@ export class TestDiskFileSystemProvider extends DiskFileSystemProvider { this._testCapabilities = capabilities; } - setInvalidStatSize(disabled: boolean): void { - this.invalidStatSize = disabled; + setInvalidStatSize(enabled: boolean): void { + this.invalidStatSize = enabled; + } + + setSmallStatSize(enabled: boolean): void { + this.smallStatSize = enabled; } async stat(resource: URI): Promise { @@ -87,6 +94,8 @@ export class TestDiskFileSystemProvider extends DiskFileSystemProvider { if (this.invalidStatSize) { res.size = String(res.size) as any; // for https://github.com/Microsoft/vscode/issues/72909 + } else if (this.smallStatSize) { + res.size = 1; } return res; @@ -174,7 +183,7 @@ suite('Disk File Service', function () { assert.equal(event!.target!.isDirectory, true); }); - test('createFolder: creating multiple folders at once', async function () { + test('createFolder: creating multiple folders at once', async () => { let event: FileOperationEvent; disposables.add(service.onAfterOperation(e => event = e)); @@ -481,56 +490,56 @@ suite('Disk File Service', function () { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); setCapabilities(testProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - await testMoveAcrossProviders(); + return testMoveAcrossProviders(); }); test('move - across providers (unbuffered => unbuffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); setCapabilities(testProvider, FileSystemProviderCapabilities.FileReadWrite); - await testMoveAcrossProviders(); + return testMoveAcrossProviders(); }); test('move - across providers (buffered => unbuffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); setCapabilities(testProvider, FileSystemProviderCapabilities.FileReadWrite); - await testMoveAcrossProviders(); + return testMoveAcrossProviders(); }); test('move - across providers (unbuffered => buffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); setCapabilities(testProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - await testMoveAcrossProviders(); + return testMoveAcrossProviders(); }); test('move - across providers - large (buffered => buffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); setCapabilities(testProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - await testMoveAcrossProviders('lorem.txt'); + return testMoveAcrossProviders('lorem.txt'); }); test('move - across providers - large (unbuffered => unbuffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); setCapabilities(testProvider, FileSystemProviderCapabilities.FileReadWrite); - await testMoveAcrossProviders('lorem.txt'); + return testMoveAcrossProviders('lorem.txt'); }); test('move - across providers - large (buffered => unbuffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); setCapabilities(testProvider, FileSystemProviderCapabilities.FileReadWrite); - await testMoveAcrossProviders('lorem.txt'); + return testMoveAcrossProviders('lorem.txt'); }); test('move - across providers - large (unbuffered => buffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); setCapabilities(testProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - await testMoveAcrossProviders('lorem.txt'); + return testMoveAcrossProviders('lorem.txt'); }); async function testMoveAcrossProviders(sourceFile = 'index.html'): Promise { @@ -596,28 +605,28 @@ suite('Disk File Service', function () { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); setCapabilities(testProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - await testMoveFolderAcrossProviders(); + return testMoveFolderAcrossProviders(); }); test('move - directory - across providers (unbuffered => unbuffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); setCapabilities(testProvider, FileSystemProviderCapabilities.FileReadWrite); - await testMoveFolderAcrossProviders(); + return testMoveFolderAcrossProviders(); }); test('move - directory - across providers (buffered => unbuffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); setCapabilities(testProvider, FileSystemProviderCapabilities.FileReadWrite); - await testMoveFolderAcrossProviders(); + return testMoveFolderAcrossProviders(); }); - test('move - directory - across providers (unbuffered => buffered)', async function () { + test('move - directory - across providers (unbuffered => buffered)', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); setCapabilities(testProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - await testMoveFolderAcrossProviders(); + return testMoveFolderAcrossProviders(); }); async function testMoveFolderAcrossProviders(): Promise { @@ -992,6 +1001,10 @@ suite('Disk File Service', function () { assert.equal(source.size, copied.size); }); + test('readFile - small file - default', () => { + return testReadFile(URI.file(join(testDir, 'small.txt'))); + }); + test('readFile - small file - buffered', () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); @@ -1016,6 +1029,22 @@ suite('Disk File Service', function () { return testReadFile(URI.file(join(testDir, 'small.txt'))); }); + test('readFile - small file - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testReadFile(URI.file(join(testDir, 'small.txt'))); + }); + + test('readFile - small file - streamed / readonly', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream | FileSystemProviderCapabilities.Readonly); + + return testReadFile(URI.file(join(testDir, 'small.txt'))); + }); + + test('readFile - large file - default', async () => { + return testReadFile(URI.file(join(testDir, 'lorem.txt'))); + }); + test('readFile - large file - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); @@ -1028,35 +1057,69 @@ suite('Disk File Service', function () { return testReadFile(URI.file(join(testDir, 'lorem.txt'))); }); + test('readFile - large file - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testReadFile(URI.file(join(testDir, 'lorem.txt'))); + }); + async function testReadFile(resource: URI): Promise { const content = await service.readFile(resource); assert.equal(content.value.toString(), readFileSync(resource.fsPath)); } - test('readFile - Files are intermingled #38331 - buffered', async () => { + test('readFileStream - small file - default', () => { + return testReadFileStream(URI.file(join(testDir, 'small.txt'))); + }); + + test('readFileStream - small file - buffered', () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - let resource1 = URI.file(join(testDir, 'lorem.txt')); - let resource2 = URI.file(join(testDir, 'some_utf16le.css')); + return testReadFileStream(URI.file(join(testDir, 'small.txt'))); + }); - // load in sequence and keep data - const value1 = await service.readFile(resource1); - const value2 = await service.readFile(resource2); + test('readFileStream - small file - unbuffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - // load in parallel in expect the same result - const result = await Promise.all([ - service.readFile(resource1), - service.readFile(resource2) - ]); + return testReadFileStream(URI.file(join(testDir, 'small.txt'))); + }); - assert.equal(result[0].value.toString(), value1.value.toString()); - assert.equal(result[1].value.toString(), value2.value.toString()); + test('readFileStream - small file - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testReadFileStream(URI.file(join(testDir, 'small.txt'))); + }); + + async function testReadFileStream(resource: URI): Promise { + const content = await service.readFileStream(resource); + + assert.equal((await streamToBuffer(content.value)).toString(), readFileSync(resource.fsPath)); + } + + test('readFile - Files are intermingled #38331 - default', async () => { + return testFilesNotIntermingled(); + }); + + test('readFile - Files are intermingled #38331 - buffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); + + return testFilesNotIntermingled(); }); test('readFile - Files are intermingled #38331 - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + return testFilesNotIntermingled(); + }); + + test('readFile - Files are intermingled #38331 - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testFilesNotIntermingled(); + }); + + async function testFilesNotIntermingled() { let resource1 = URI.file(join(testDir, 'lorem.txt')); let resource2 = URI.file(join(testDir, 'some_utf16le.css')); @@ -1072,109 +1135,150 @@ suite('Disk File Service', function () { assert.equal(result[0].value.toString(), value1.value.toString()); assert.equal(result[1].value.toString(), value2.value.toString()); + } + + test('readFile - from position (ASCII) - default', async () => { + return testReadFileFromPositionAscii(); }); test('readFile - from position (ASCII) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); + return testReadFileFromPositionAscii(); + }); + + test('readFile - from position (ASCII) - unbuffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + + return testReadFileFromPositionAscii(); + }); + + test('readFile - from position (ASCII) - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testReadFileFromPositionAscii(); + }); + + async function testReadFileFromPositionAscii() { const resource = URI.file(join(testDir, 'small.txt')); const contents = await service.readFile(resource, { position: 6 }); assert.equal(contents.value.toString(), 'File'); + } + + test('readFile - from position (with umlaut) - default', async () => { + return testReadFileFromPositionUmlaut(); }); test('readFile - from position (with umlaut) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'small_umlaut.txt')); - - const contents = await service.readFile(resource, { position: Buffer.from('Small File with Ü').length }); - - assert.equal(contents.value.toString(), 'mlaut'); + return testReadFileFromPositionUmlaut(); }); - test('readFile - from position (ASCII) - unbuffered', async () => { + test('readFile - from position (with umlaut) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - const resource = URI.file(join(testDir, 'small.txt')); + return testReadFileFromPositionUmlaut(); + }); - const contents = await service.readFile(resource, { position: 6 }); + test('readFile - from position (with umlaut) - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); - assert.equal(contents.value.toString(), 'File'); + return testReadFileFromPositionUmlaut(); }); - test('readFile - from position (with umlaut) - unbuffered', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - + async function testReadFileFromPositionUmlaut() { const resource = URI.file(join(testDir, 'small_umlaut.txt')); const contents = await service.readFile(resource, { position: Buffer.from('Small File with Ü').length }); assert.equal(contents.value.toString(), 'mlaut'); - }); + } + test('readFile - 3 bytes (ASCII) - default', async () => { + return testReadThreeBytesFromFile(); + }); test('readFile - 3 bytes (ASCII) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'small.txt')); - - const contents = await service.readFile(resource, { length: 3 }); - - assert.equal(contents.value.toString(), 'Sma'); + return testReadThreeBytesFromFile(); }); test('readFile - 3 bytes (ASCII) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + return testReadThreeBytesFromFile(); + }); + + test('readFile - 3 bytes (ASCII) - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testReadThreeBytesFromFile(); + }); + + async function testReadThreeBytesFromFile() { const resource = URI.file(join(testDir, 'small.txt')); const contents = await service.readFile(resource, { length: 3 }); assert.equal(contents.value.toString(), 'Sma'); + } + + test('readFile - 20000 bytes (large) - default', async () => { + return readLargeFileWithLength(20000); }); test('readFile - 20000 bytes (large) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'lorem.txt')); - - const contents = await service.readFile(resource, { length: 20000 }); - - assert.equal(contents.value.byteLength, 20000); + return readLargeFileWithLength(20000); }); test('readFile - 20000 bytes (large) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - const resource = URI.file(join(testDir, 'lorem.txt')); + return readLargeFileWithLength(20000); + }); - const contents = await service.readFile(resource, { length: 20000 }); + test('readFile - 20000 bytes (large) - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return readLargeFileWithLength(20000); + }); - assert.equal(contents.value.byteLength, 20000); + test('readFile - 80000 bytes (large) - default', async () => { + return readLargeFileWithLength(80000); }); test('readFile - 80000 bytes (large) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'lorem.txt')); - - const contents = await service.readFile(resource, { length: 80000 }); - - assert.equal(contents.value.byteLength, 80000); + return readLargeFileWithLength(80000); }); test('readFile - 80000 bytes (large) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - const resource = URI.file(join(testDir, 'lorem.txt')); + return readLargeFileWithLength(80000); + }); - const contents = await service.readFile(resource, { length: 80000 }); + test('readFile - 80000 bytes (large) - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); - assert.equal(contents.value.byteLength, 80000); + return readLargeFileWithLength(80000); }); + async function readLargeFileWithLength(length: number) { + const resource = URI.file(join(testDir, 'lorem.txt')); + + const contents = await service.readFile(resource, { length }); + + assert.equal(contents.value.byteLength, length); + } + test('readFile - FILE_IS_DIRECTORY', async () => { const resource = URI.file(join(testDir, 'deep')); @@ -1203,9 +1307,29 @@ suite('Disk File Service', function () { assert.equal(error!.fileOperationResult, FileOperationResult.FILE_NOT_FOUND); }); + test('readFile - FILE_NOT_MODIFIED_SINCE - default', async () => { + return testNotModifiedSince(); + }); + test('readFile - FILE_NOT_MODIFIED_SINCE - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); + return testNotModifiedSince(); + }); + + test('readFile - FILE_NOT_MODIFIED_SINCE - unbuffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + + return testNotModifiedSince(); + }); + + test('readFile - FILE_NOT_MODIFIED_SINCE - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testNotModifiedSince(); + }); + + async function testNotModifiedSince() { const resource = URI.file(join(testDir, 'index.html')); const contents = await service.readFile(resource); @@ -1221,10 +1345,9 @@ suite('Disk File Service', function () { assert.ok(error); assert.equal(error!.fileOperationResult, FileOperationResult.FILE_NOT_MODIFIED_SINCE); assert.equal(fileProvider.totalBytesRead, 0); - }); + } test('readFile - FILE_NOT_MODIFIED_SINCE does not fire wrongly - https://github.com/Microsoft/vscode/issues/72909', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); fileProvider.setInvalidStatSize(true); const resource = URI.file(join(testDir, 'index.html')); @@ -1241,45 +1364,37 @@ suite('Disk File Service', function () { assert.ok(!error); }); - test('readFile - FILE_NOT_MODIFIED_SINCE - unbuffered', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - - const resource = URI.file(join(testDir, 'index.html')); - - const contents = await service.readFile(resource); - fileProvider.totalBytesRead = 0; - - let error: FileOperationError | undefined = undefined; - try { - await service.readFile(resource, { etag: contents.etag }); - } catch (err) { - error = err; - } - - assert.ok(error); - assert.equal(error!.fileOperationResult, FileOperationResult.FILE_NOT_MODIFIED_SINCE); - assert.equal(fileProvider.totalBytesRead, 0); + test('readFile - FILE_EXCEEDS_MEMORY_LIMIT - default', async () => { + return testFileExceedsMemoryLimit(); }); test('readFile - FILE_EXCEEDS_MEMORY_LIMIT - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'index.html')); - - let error: FileOperationError | undefined = undefined; - try { - await service.readFile(resource, { limits: { memory: 10 } }); - } catch (err) { - error = err; - } - - assert.ok(error); - assert.equal(error!.fileOperationResult, FileOperationResult.FILE_EXCEEDS_MEMORY_LIMIT); + return testFileExceedsMemoryLimit(); }); test('readFile - FILE_EXCEEDS_MEMORY_LIMIT - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + return testFileExceedsMemoryLimit(); + }); + + test('readFile - FILE_EXCEEDS_MEMORY_LIMIT - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testFileExceedsMemoryLimit(); + }); + + async function testFileExceedsMemoryLimit() { + await doTestFileExceedsMemoryLimit(); + + // Also test when the stat size is wrong + fileProvider.setSmallStatSize(true); + return doTestFileExceedsMemoryLimit(false); + } + + async function doTestFileExceedsMemoryLimit(testTotalBytesRead = true) { const resource = URI.file(join(testDir, 'index.html')); let error: FileOperationError | undefined = undefined; @@ -1291,27 +1406,43 @@ suite('Disk File Service', function () { assert.ok(error); assert.equal(error!.fileOperationResult, FileOperationResult.FILE_EXCEEDS_MEMORY_LIMIT); + + if (testTotalBytesRead) { + assert.equal(fileProvider.totalBytesRead, 0); + } + } + + test('readFile - FILE_TOO_LARGE - default', async () => { + return testFileTooLarge(); }); test('readFile - FILE_TOO_LARGE - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'index.html')); - - let error: FileOperationError | undefined = undefined; - try { - await service.readFile(resource, { limits: { size: 10 } }); - } catch (err) { - error = err; - } - - assert.ok(error); - assert.equal(error!.fileOperationResult, FileOperationResult.FILE_TOO_LARGE); + return testFileTooLarge(); }); test('readFile - FILE_TOO_LARGE - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + return testFileTooLarge(); + }); + + test('readFile - FILE_TOO_LARGE - streamed', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadStream); + + return testFileTooLarge(); + }); + + async function testFileTooLarge() { + await doTestFileExceedsMemoryLimit(); + + // Also test when the stat size is wrong + fileProvider.setSmallStatSize(true); + return doTestFileTooLarge(); + } + + async function doTestFileTooLarge() { const resource = URI.file(join(testDir, 'index.html')); let error: FileOperationError | undefined = undefined; @@ -1323,7 +1454,7 @@ suite('Disk File Service', function () { assert.ok(error); assert.equal(error!.fileOperationResult, FileOperationResult.FILE_TOO_LARGE); - }); + } test('createFile', async () => { assertCreateFile(contents => VSBuffer.fromString(contents)); @@ -1390,71 +1521,51 @@ suite('Disk File Service', function () { assert.equal(event!.target!.resource.fsPath, resource.fsPath); }); - test('writeFile - buffered', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - - const resource = URI.file(join(testDir, 'small.txt')); - - const content = readFileSync(resource.fsPath); - assert.equal(content, 'Small File'); - - const newContent = 'Updates to the small file'; - await service.writeFile(resource, VSBuffer.fromString(newContent)); - - assert.equal(readFileSync(resource.fsPath), newContent); + test('writeFile - default', async () => { + return testWriteFile(); }); - test('writeFile (large file) - buffered', async () => { + test('writeFile - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'lorem.txt')); - - const content = readFileSync(resource.fsPath); - const newContent = content.toString() + content.toString(); + return testWriteFile(); + }); - const fileStat = await service.writeFile(resource, VSBuffer.fromString(newContent)); - assert.equal(fileStat.name, 'lorem.txt'); + test('writeFile - unbuffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - assert.equal(readFileSync(resource.fsPath), newContent); + return testWriteFile(); }); - test('writeFile - buffered - readonly throws', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose | FileSystemProviderCapabilities.Readonly); - + async function testWriteFile() { const resource = URI.file(join(testDir, 'small.txt')); const content = readFileSync(resource.fsPath); assert.equal(content, 'Small File'); const newContent = 'Updates to the small file'; + await service.writeFile(resource, VSBuffer.fromString(newContent)); - let error: Error; - try { - await service.writeFile(resource, VSBuffer.fromString(newContent)); - } catch (err) { - error = err; - } + assert.equal(readFileSync(resource.fsPath), newContent); + } - assert.ok(error!); + test('writeFile (large file) - default', async () => { + return testWriteFileLarge(); }); - test('writeFile - unbuffered', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - - const resource = URI.file(join(testDir, 'small.txt')); - - const content = readFileSync(resource.fsPath); - assert.equal(content, 'Small File'); - - const newContent = 'Updates to the small file'; - await service.writeFile(resource, VSBuffer.fromString(newContent)); + test('writeFile (large file) - buffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - assert.equal(readFileSync(resource.fsPath), newContent); + return testWriteFileLarge(); }); test('writeFile (large file) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + return testWriteFileLarge(); + }); + + async function testWriteFileLarge() { const resource = URI.file(join(testDir, 'lorem.txt')); const content = readFileSync(resource.fsPath); @@ -1464,11 +1575,21 @@ suite('Disk File Service', function () { assert.equal(fileStat.name, 'lorem.txt'); assert.equal(readFileSync(resource.fsPath), newContent); + } + + test('writeFile - buffered - readonly throws', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose | FileSystemProviderCapabilities.Readonly); + + return testWriteFileReadonlyThrows(); }); test('writeFile - unbuffered - readonly throws', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite | FileSystemProviderCapabilities.Readonly); + return testWriteFileReadonlyThrows(); + }); + + async function testWriteFileReadonlyThrows() { const resource = URI.file(join(testDir, 'small.txt')); const content = readFileSync(resource.fsPath); @@ -1484,7 +1605,7 @@ suite('Disk File Service', function () { } assert.ok(error!); - }); + } test('writeFile (large file) - multiple parallel writes queue up', async () => { const resource = URI.file(join(testDir, 'lorem.txt')); @@ -1501,37 +1622,23 @@ suite('Disk File Service', function () { assert.ok(['0', '00', '000', '0000', '00000'].some(offset => fileContent === offset + newContent)); }); - test('writeFile (readable) - buffered', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - - const resource = URI.file(join(testDir, 'small.txt')); - - const content = readFileSync(resource.fsPath); - assert.equal(content, 'Small File'); - - const newContent = 'Updates to the small file'; - await service.writeFile(resource, toLineByLineReadable(newContent)); - - assert.equal(readFileSync(resource.fsPath), newContent); + test('writeFile (readable) - default', async () => { + return testWriteFileReadable(); }); - test('writeFile (large file - readable) - buffered', async () => { + test('writeFile (readable) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const resource = URI.file(join(testDir, 'lorem.txt')); - - const content = readFileSync(resource.fsPath); - const newContent = content.toString() + content.toString(); - - const fileStat = await service.writeFile(resource, toLineByLineReadable(newContent)); - assert.equal(fileStat.name, 'lorem.txt'); - - assert.equal(readFileSync(resource.fsPath), newContent); + return testWriteFileReadable(); }); test('writeFile (readable) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + return testWriteFileReadable(); + }); + + async function testWriteFileReadable() { const resource = URI.file(join(testDir, 'small.txt')); const content = readFileSync(resource.fsPath); @@ -1541,11 +1648,25 @@ suite('Disk File Service', function () { await service.writeFile(resource, toLineByLineReadable(newContent)); assert.equal(readFileSync(resource.fsPath), newContent); + } + + test('writeFile (large file - readable) - default', async () => { + return testWriteFileLargeReadable(); + }); + + test('writeFile (large file - readable) - buffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); + + return testWriteFileLargeReadable(); }); test('writeFile (large file - readable) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + return testWriteFileLargeReadable(); + }); + + async function testWriteFileLargeReadable() { const resource = URI.file(join(testDir, 'lorem.txt')); const content = readFileSync(resource.fsPath); @@ -1555,11 +1676,25 @@ suite('Disk File Service', function () { assert.equal(fileStat.name, 'lorem.txt'); assert.equal(readFileSync(resource.fsPath), newContent); + } + + test('writeFile (stream) - default', async () => { + return testWriteFileStream(); }); test('writeFile (stream) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); + return testWriteFileStream(); + }); + + test('writeFile (stream) - unbuffered', async () => { + setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); + + return testWriteFileStream(); + }); + + async function testWriteFileStream() { const source = URI.file(join(testDir, 'small.txt')); const target = URI.file(join(testDir, 'small-copy.txt')); @@ -1567,35 +1702,25 @@ suite('Disk File Service', function () { assert.equal(fileStat.name, 'small-copy.txt'); assert.equal(readFileSync(source.fsPath).toString(), readFileSync(target.fsPath).toString()); + } + + test('writeFile (large file - stream) - default', async () => { + return testWriteFileLargeStream(); }); test('writeFile (large file - stream) - buffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileOpenReadWriteClose); - const source = URI.file(join(testDir, 'lorem.txt')); - const target = URI.file(join(testDir, 'lorem-copy.txt')); - - const fileStat = await service.writeFile(target, streamToBufferReadableStream(createReadStream(source.fsPath))); - assert.equal(fileStat.name, 'lorem-copy.txt'); - - assert.equal(readFileSync(source.fsPath).toString(), readFileSync(target.fsPath).toString()); + return testWriteFileLargeStream(); }); - test('writeFile (stream) - unbuffered', async () => { + test('writeFile (large file - stream) - unbuffered', async () => { setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - const source = URI.file(join(testDir, 'small.txt')); - const target = URI.file(join(testDir, 'small-copy.txt')); - - const fileStat = await service.writeFile(target, streamToBufferReadableStream(createReadStream(source.fsPath))); - assert.equal(fileStat.name, 'small-copy.txt'); - - assert.equal(readFileSync(source.fsPath).toString(), readFileSync(target.fsPath).toString()); + return testWriteFileLargeStream(); }); - test('writeFile (large file - stream) - unbuffered', async () => { - setCapabilities(fileProvider, FileSystemProviderCapabilities.FileReadWrite); - + async function testWriteFileLargeStream() { const source = URI.file(join(testDir, 'lorem.txt')); const target = URI.file(join(testDir, 'lorem-copy.txt')); @@ -1603,7 +1728,7 @@ suite('Disk File Service', function () { assert.equal(fileStat.name, 'lorem-copy.txt'); assert.equal(readFileSync(source.fsPath).toString(), readFileSync(target.fsPath).toString()); - }); + } test('writeFile (file is created including parents)', async () => { const resource = URI.file(join(testDir, 'other', 'newfile.txt')); diff --git a/src/vs/workbench/services/userData/common/fileUserDataProvider.ts b/src/vs/workbench/services/userData/common/fileUserDataProvider.ts index cef3768ec96..9021b0eb963 100644 --- a/src/vs/workbench/services/userData/common/fileUserDataProvider.ts +++ b/src/vs/workbench/services/userData/common/fileUserDataProvider.ts @@ -5,14 +5,19 @@ import { Event, Emitter } from 'vs/base/common/event'; import { Disposable, IDisposable } from 'vs/base/common/lifecycle'; -import { IFileSystemProviderWithFileReadWriteCapability, IFileChange, IWatchOptions, IStat, FileOverwriteOptions, FileType, FileWriteOptions, FileDeleteOptions, FileSystemProviderCapabilities, IFileSystemProviderWithOpenReadWriteCloseCapability, FileOpenOptions, hasReadWriteCapability, hasOpenReadWriteCloseCapability } from 'vs/platform/files/common/files'; +import { IFileSystemProviderWithFileReadWriteCapability, IFileChange, IWatchOptions, IStat, FileOverwriteOptions, FileType, FileWriteOptions, FileDeleteOptions, FileSystemProviderCapabilities, IFileSystemProviderWithOpenReadWriteCloseCapability, FileOpenOptions, hasReadWriteCapability, hasOpenReadWriteCloseCapability, IFileSystemProviderWithFileReadStreamCapability, FileReadStreamOptions, hasFileReadStreamCapability } from 'vs/platform/files/common/files'; import { URI } from 'vs/base/common/uri'; import * as resources from 'vs/base/common/resources'; import { startsWith } from 'vs/base/common/strings'; import { BACKUPS } from 'vs/platform/environment/common/environment'; import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService'; +import { CancellationToken } from 'vs/base/common/cancellation'; +import { ReadableStreamEvents } from 'vs/base/common/stream'; -export class FileUserDataProvider extends Disposable implements IFileSystemProviderWithFileReadWriteCapability, IFileSystemProviderWithOpenReadWriteCloseCapability { +export class FileUserDataProvider extends Disposable implements + IFileSystemProviderWithFileReadWriteCapability, + IFileSystemProviderWithOpenReadWriteCloseCapability, + IFileSystemProviderWithFileReadStreamCapability { readonly capabilities: FileSystemProviderCapabilities = this.fileSystemProvider.capabilities; readonly onDidChangeCapabilities: Event = Event.None; @@ -60,6 +65,13 @@ export class FileUserDataProvider extends Disposable implements IFileSystemProvi throw new Error('not supported'); } + readFileStream(resource: URI, opts: FileReadStreamOptions, token?: CancellationToken): ReadableStreamEvents { + if (hasFileReadStreamCapability(this.fileSystemProvider)) { + return this.fileSystemProvider.readFileStream(this.toFileSystemResource(resource), opts, token); + } + throw new Error('not supported'); + } + readdir(resource: URI): Promise<[string, FileType][]> { return this.fileSystemProvider.readdir(this.toFileSystemResource(resource)); } -- GitLab