提交 14b71413 编写于 作者: B Benjamin Pasero

storage - improve perf by chunking INSERTS/DELETES

上级 e9bb33f0
......@@ -439,9 +439,9 @@ export function range(arg: number, to?: number): number[] {
return result;
}
export function fill<T>(num: number, valueFn: () => T, arr: T[] = []): T[] {
export function fill<T>(num: number, value: T, arr: T[] = []): T[] {
for (let i = 0; i < num; i++) {
arr[i] = valueFn();
arr[i] = value;
}
return arr;
......
......@@ -12,6 +12,7 @@ import { mapToString, setToString } from 'vs/base/common/map';
import { basename } from 'path';
import { mark } from 'vs/base/common/performance';
import { copy, renameIgnoreError, unlink } from 'vs/base/node/pfs';
import { fill } from 'vs/base/common/arrays';
export enum StorageHint {
......@@ -328,6 +329,7 @@ export class SQLiteStorageDatabase implements IStorageDatabase {
private static measuredRequireDuration: boolean; // TODO@Ben remove me after a while
private static BUSY_OPEN_TIMEOUT = 2000; // timeout in ms to retry when opening DB fails with SQLITE_BUSY
private static MAX_HOST_PARAMETERS = 256; // maximum number of parameters within a statement
private path: string;
private name: string;
......@@ -383,35 +385,71 @@ export class SQLiteStorageDatabase implements IStorageDatabase {
}
return this.transaction(connection, () => {
// INSERT
if (request.insert && request.insert.size > 0) {
this.prepare(connection, 'INSERT INTO ItemTable VALUES (?,?)', stmt => {
request.insert!.forEach((value, key) => {
stmt.run([key, value]);
});
}, () => {
const keys: string[] = [];
let length = 0;
request.insert!.forEach((value, key) => {
keys.push(key);
length += value.length;
});
const keysValuesChunks: (string[])[] = [];
keysValuesChunks.push([]); // seed with initial empty chunk
// Split key/values into chunks of SQLiteStorageDatabase.MAX_HOST_PARAMETERS
// so that we can efficiently run the INSERT with as many HOST parameters as possible
let currentChunkIndex = 0;
request.insert.forEach((value, key) => {
let keyValueChunk = keysValuesChunks[currentChunkIndex];
if (keyValueChunk.length > SQLiteStorageDatabase.MAX_HOST_PARAMETERS) {
currentChunkIndex++;
keyValueChunk = [];
keysValuesChunks.push(keyValueChunk);
}
return `Keys: ${keys.join(', ')} Length: ${length}`;
keyValueChunk.push(key, value);
});
keysValuesChunks.forEach(keysValuesChunk => {
this.prepare(connection, `INSERT INTO ItemTable VALUES ${fill(keysValuesChunk.length / 2, '(?,?)').join(',')}`, stmt => stmt.run(keysValuesChunk), () => {
const keys: string[] = [];
let length = 0;
request.insert!.forEach((value, key) => {
keys.push(key);
length += value.length;
});
return `Keys: ${keys.join(', ')} Length: ${length}`;
});
});
}
// DELETE
if (request.delete && request.delete.size) {
this.prepare(connection, 'DELETE FROM ItemTable WHERE key=?', stmt => {
request.delete!.forEach(key => {
stmt.run(key);
});
}, () => {
const keys: string[] = [];
request.delete!.forEach(key => {
keys.push(key);
});
const keysChunks: (string[])[] = [];
keysChunks.push([]); // seed with initial empty chunk
// Split keys into chunks of SQLiteStorageDatabase.MAX_HOST_PARAMETERS
// so that we can efficiently run the DELETE with as many HOST parameters
// as possible
let currentChunkIndex = 0;
request.delete.forEach(key => {
let keyChunk = keysChunks[currentChunkIndex];
if (keyChunk.length > SQLiteStorageDatabase.MAX_HOST_PARAMETERS) {
currentChunkIndex++;
keyChunk = [];
keysChunks.push(keyChunk);
}
keyChunk.push(key);
});
keysChunks.forEach(keysChunk => {
this.prepare(connection, `DELETE FROM ItemTable WHERE key IN (${fill(keysChunk.length, '?').join(',')})`, stmt => stmt.run(keysChunk), () => {
const keys: string[] = [];
request.delete!.forEach(key => {
keys.push(key);
});
return `Keys: ${keys.join(', ')}`;
return `Keys: ${keys.join(', ')}`;
});
});
}
});
......
......@@ -32,8 +32,16 @@ suite('Arrays', () => {
});
test('stableSort', () => {
function fill<T>(num: number, valueFn: () => T, arr: T[] = []): T[] {
for (let i = 0; i < num; i++) {
arr[i] = valueFn();
}
return arr;
}
let counter = 0;
let data = arrays.fill(10000, () => ({ n: 1, m: counter++ }));
let data = fill(10000, () => ({ n: 1, m: counter++ }));
arrays.mergeSort(data, (a, b) => a.n - b.n);
......
......@@ -737,4 +737,68 @@ suite('SQLite Storage Library', () => {
await del(storageDir, tmpdir());
});
test('lots of INSERT & DELETE (below inline max)', async () => {
const storageDir = uniqueStorageDir();
await mkdirp(storageDir);
const storage = new SQLiteStorageDatabase(join(storageDir, 'storage.db'));
const items = new Map<string, string>();
const keys: Set<string> = new Set<string>();
for (let i = 0; i < 200; i++) {
const uuid = generateUuid();
const key = `key: ${uuid}`;
items.set(key, `value: ${uuid}`);
keys.add(key);
}
await storage.updateItems({ insert: items });
let storedItems = await storage.getItems();
equal(storedItems.size, items.size);
await storage.updateItems({ delete: keys });
storedItems = await storage.getItems();
equal(storedItems.size, 0);
await storage.close();
await del(storageDir, tmpdir());
});
test('lots of INSERT & DELETE (above inline max)', async () => {
const storageDir = uniqueStorageDir();
await mkdirp(storageDir);
const storage = new SQLiteStorageDatabase(join(storageDir, 'storage.db'));
const items = new Map<string, string>();
const keys: Set<string> = new Set<string>();
for (let i = 0; i < 400; i++) {
const uuid = generateUuid();
const key = `key: ${uuid}`;
items.set(key, `value: ${uuid}`);
keys.add(key);
}
await storage.updateItems({ insert: items });
let storedItems = await storage.getItems();
equal(storedItems.size, items.size);
await storage.updateItems({ delete: keys });
storedItems = await storage.getItems();
equal(storedItems.size, 0);
await storage.close();
await del(storageDir, tmpdir());
});
});
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册