Skip to content
Merged
Show file tree
Hide file tree
Changes from 42 commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
c361764
indexeddb: improve allDocs() perf with skip & key ranges
Feb 15, 2023
e0dfb7a
Rename index
Feb 17, 2023
6c9f86d
Merge branch 'master' into indexeddb-faster-alldocs
alxndrsn Aug 30, 2023
dbe2405
Merge branch 'master' into indexeddb-faster-alldocs
alxndrsn Sep 13, 2023
44a0a15
Merge branch 'master' into indexeddb-faster-alldocs
alxndrsn Oct 7, 2023
6eb3972
Merge branch 'master' into indexeddb-faster-alldocs
alxndrsn Oct 11, 2023
9bc7927
fix eol-last
Oct 11, 2023
42cc853
add migration
Oct 11, 2023
dd6ed4c
Use isLocalId()
Oct 16, 2023
796e1e4
Merge branch 'master' into indexeddb-faster-alldocs
alxndrsn Oct 18, 2023
f470aa7
Merge branch 'master' into indexeddb-faster-alldocs
alxndrsn Dec 11, 2023
8d72af6
rename META_STORE to META_LOCAL_STORE
Dec 11, 2023
7f3e980
get(): use isLocalId()
Dec 11, 2023
8b95c7f
Implement getLocal()
Dec 11, 2023
7d468da
Revert "Implement getLocal()"
Dec 11, 2023
e2ea6ee
Implement getLocal()
Dec 11, 2023
1cde95c
Merge branch 'master' into indexeddb-faster-alldocs
alxndrsn Dec 30, 2023
2187822
mocha v10: fix failure handling
Dec 31, 2023
d50645d
mocha v10: fix failure handling
Dec 31, 2023
85fbca8
indexeddb._getLocal(): process attachments
Dec 31, 2023
dec066a
Merge branch 'master' into indexeddb-faster-alldocs
Mar 24, 2024
02a837c
more comment
Mar 24, 2024
ad652f8
Merge remote-tracking branch 'upstream/master' into indexeddb-faster-…
Mar 24, 2024
1ec03cd
Merge remote-tracking branch 'upstream/master' into indexeddb-faster-…
Mar 25, 2024
9d518e8
Resolve REVIEW comment
Mar 25, 2024
81ed8d5
getRevisionTree(): only open DOC_STORE
Mar 25, 2024
06c2b26
fetchResults() batch
Mar 25, 2024
b9992fc
Add a big test
Mar 25, 2024
32ab14a
add some more to the test
Mar 25, 2024
9f3399c
lint
Mar 25, 2024
f86dd92
getLocal(): remove use strict & attachment support
Mar 27, 2024
f889598
lint
Mar 27, 2024
dd62ea2
Revert "getLocal(): remove use strict & attachment support"
Mar 27, 2024
213d8c4
Revert "lint"
Mar 27, 2024
aed6f98
remove use strict
Mar 28, 2024
56a17f4
use const
Mar 28, 2024
18f706f
Merge remote-tracking branch 'upstream/master' into indexeddb-faster-…
Apr 2, 2024
f9a5ba3
delete seq index; don't create other index
Apr 3, 2024
cb90ef3
don't delete seq - seems to be required somewhere
Apr 3, 2024
179cc49
delete seq; maintain _isDeleted_id
Apr 3, 2024
a8b0c63
Merge remote-tracking branch 'upstream/master' into indexeddb-faster-…
Apr 3, 2024
4fd2f51
Merge remote-tracking branch 'upstream/master' into indexeddb-faster-…
Apr 4, 2024
8c0b977
allDocs: don't recurse
Apr 5, 2024
063c725
rename deleted,id db index
Apr 5, 2024
0b06fbc
Merge remote-tracking branch 'upstream/master' into indexeddb-faster-…
Apr 8, 2024
569557d
new test: make more robust in CI
Apr 8, 2024
1412422
Update/add tests for local doc attachments
Apr 9, 2024
d8d9f42
update github issue link
Apr 9, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
231 changes: 133 additions & 98 deletions packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
'use strict';

import { isLocalId } from 'pouchdb-adapter-utils';
import { createError, IDB_ERROR } from 'pouchdb-errors';
import { collectConflicts } from 'pouchdb-merge';

Expand All @@ -27,33 +26,18 @@ function allDocsKeys(keys, docStore, allDocsInner) {
});
}

function createKeyRange(start, end, inclusiveEnd, key, descending) {
function createKeyRange(start, end, inclusiveStart, inclusiveEnd, key, descending) {
try {
if (start && end) {
if (descending) {
return IDBKeyRange.bound(end, start, !inclusiveEnd, false);
} else {
return IDBKeyRange.bound(start, end, false, !inclusiveEnd);
}
} else if (start) {
if (descending) {
return IDBKeyRange.upperBound(start);
} else {
return IDBKeyRange.lowerBound(start);
}
} else if (end) {
if (descending) {
return IDBKeyRange.lowerBound(end, !inclusiveEnd);
} else {
return IDBKeyRange.upperBound(end, !inclusiveEnd);
}
} else if (key) {
return IDBKeyRange.only(key);
if (key) {
return IDBKeyRange.only([0, key]);
} else if (descending) {
return IDBKeyRange.bound(end, start, !inclusiveEnd, !inclusiveStart);
} else {
return IDBKeyRange.bound(start, end, !inclusiveStart, !inclusiveEnd);
}
} catch (e) {
return {error: e};
}
return null;
}

function handleKeyRangeError(opts, metadata, err, callback) {
Expand Down Expand Up @@ -96,31 +80,46 @@ export default function (txn, metadata, opts, callback) {
var results = [];
var processing = [];

var start = 'startkey' in opts ? opts.startkey : false;
var end = 'endkey' in opts ? opts.endkey : false;
var key = 'key' in opts ? opts.key : false;
var keys = 'keys' in opts ? opts.keys : false;
var skip = opts.skip || 0;
var limit = typeof opts.limit === 'number' ? opts.limit : -1;
var limit = typeof opts.limit === 'number' ? opts.limit : undefined;
var inclusiveEnd = opts.inclusive_end !== false;
var descending = 'descending' in opts && opts.descending ? 'prev' : null;

var keyRange;
if (!keys) {
keyRange = createKeyRange(start, end, inclusiveEnd, key, descending);
if (keyRange && keyRange.error) {
return handleKeyRangeError(opts, metadata, keyRange.error, callback);
}
}
var start = 'startkey' in opts ? opts.startkey : (descending ? '\uffff' : '');
var end = 'endkey' in opts ? opts.endkey : (descending ? '' : '\uffff');

var docStore = txn.txn.objectStore(DOC_STORE);

txn.txn.oncomplete = onTxnComplete;

if (keys) {
return allDocsKeys(opts.keys, docStore, allDocsInner);
txn.txn.oncomplete = onTxnComplete;
const allDocsInner = doc => {
if (doc.error) {
return results.push(doc);
}

const row = { id:doc.id, key:doc.id, value:{ rev:doc.rev } };

if (doc.deleted) {
row.value.deleted = true;
row.doc = null;
} else if (opts.include_docs) {
include_doc(row, doc);
}

results.push(row);
};
return allDocsKeys(keys, docStore, allDocsInner);
}

let keyRange = createKeyRange([0, start], [0, end], true, inclusiveEnd, key, descending);
if (keyRange.error) {
return handleKeyRangeError(opts, metadata, keyRange.error, callback);
}

// txn.oncomplete must be set AFTER key-range-error is generated
txn.txn.oncomplete = onTxnComplete;

function include_doc(row, doc) {
var docData = doc.revs[doc.rev].data;

Expand All @@ -141,79 +140,115 @@ export default function (txn, metadata, opts, callback) {
}
}

function allDocsInner(doc) {
if (doc.error && keys) {
// key was not found with "keys" requests
results.push(doc);
return true;
}

var row = {
id: doc.id,
key: doc.id,
value: {
rev: doc.rev
}
function onTxnComplete() {
const returnVal = {
total_rows: metadata.doc_count,
offset: 0,
rows: results
};
/* istanbul ignore if */
if (opts.update_seq) {
returnVal.update_seq = metadata.seq;
}

var deleted = doc.deleted;
if (deleted) {
if (keys) {
results.push(row);
row.value.deleted = true;
row.doc = null;
}
} else if (skip-- <= 0) {
results.push(row);
if (opts.include_docs) {
include_doc(row, doc);
}
if (--limit === 0) {
return false;
}
if (processing.length) {
Promise.all(processing).then(function () {
callback(null, returnVal);
});
} else {
callback(null, returnVal);
}
return true;
}

function onTxnComplete() {
Promise.all(processing).then(function () {
var returnVal = {
total_rows: metadata.doc_count,
offset: 0,
rows: results
};
const dbIndex = docStore.index('_isDeleted_id');

if (!skip && !limit) {
fetchResults();
} else {
let firstKey;
let limitKey = limit > 0;

/* istanbul ignore if */
if (opts.update_seq) {
returnVal.update_seq = metadata.seq;
dbIndex.openKeyCursor(keyRange, descending || 'next').onsuccess = (e) => {
const cursor = e.target.result;

if (skip) {
if (!cursor) { return txn.txn.commit(); }
cursor.advance(skip);
skip = 0;
return;
}
callback(null, returnVal);
});
}

var cursor = descending ?
docStore.openCursor(keyRange, descending) :
docStore.openCursor(keyRange);
if (firstKey === undefined) {
firstKey = cursor && cursor.key;
if (!firstKey) { return txn.txn.commit(); }
}

cursor.onsuccess = function (e) {
if (limit) {
if (limit > 1 && cursor) {
cursor.advance(limit - 1);
limit = undefined;
return;
}
limit = undefined;
}

var doc = e.target.result && e.target.result.value;

// Happens if opts does not have limit,
// because cursor will end normally then,
// when all docs are retrieved.
// Would not be needed, if getAll() optimization was used like in #6059
if (!doc) { return; }
if (limitKey) {
limitKey = cursor && cursor.key;
}
if (!limitKey) {
limitKey = descending ? keyRange.lower : keyRange.upper;
}

// Skip local docs
if (isLocalId(doc.id)) {
return e.target.result.continue();
}
keyRange = createKeyRange(firstKey, limitKey, true, inclusiveEnd, key, descending);
if (keyRange.error) {
txn.txn.abort();
return handleKeyRangeError(opts, metadata, keyRange.error, callback);
}

var continueCursor = allDocsInner(doc);
if (continueCursor) {
e.target.result.continue();
}
};
fetchResults();
};
}

function fetchResults() {
// There is a risk here with getting all results into memory - if they have multiple
// revs, then we risk loading loads of extra data which is then discarded. This is
// reduced by batching. This also loads unused data when include_docs is false.
//
// Current batch size is quite arbitrary, but seems like (1) more than a typical
// result size, and (2) not so big it's likely to cause issues.
const batchSize = 100;

fetchNextBatch();

function fetchNextBatch() {
dbIndex.getAll(keyRange, batchSize).onsuccess = (e) => {
const batch = e.target.result;
for (let i=0; i<batch.length; ++i) {
const doc = batch[i];
const row = { id:doc.id, key:doc.id, value:{ rev:doc.rev } };
if (opts.include_docs) {
include_doc(row, doc);
}
results.push(row);
}

if (batch.length >= batchSize) {
const lastSeenKey = [ 0, batch[batch.length-1].id ];
const startKey = descending ? keyRange.upper : lastSeenKey;
const endKey = descending ? lastSeenKey : keyRange.upper;
if (startKey[1] !== endKey[1]) {
const incEnd = descending ? false : inclusiveEnd;
const incStart = descending ? true : false;
keyRange = createKeyRange(startKey, endKey, incStart, incEnd, key, descending);
return fetchNextBatch();
}
}
if (descending) {
results.reverse();
}
return txn.txn.commit();
};
}
}
}
19 changes: 11 additions & 8 deletions packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import { isLocalId, parseDoc } from 'pouchdb-adapter-utils';
import { binaryMd5 as md5 } from 'pouchdb-md5';
import { winningRev as calculateWinningRev, merge, compactTree } from 'pouchdb-merge';

import { DOC_STORE, META_STORE, idbError } from './util';
import { DOC_STORE, META_LOCAL_STORE, idbError } from './util';

import { rewrite } from './rewrite';

Expand Down Expand Up @@ -62,7 +62,8 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback)
}

docs.forEach(function (doc) {
txn.objectStore(DOC_STORE).get(doc.id).onsuccess = readDone;
const docStore = isLocalId(doc.id) ? META_LOCAL_STORE : DOC_STORE;
txn.objectStore(docStore).get(doc.id).onsuccess = readDone;
});
}

Expand Down Expand Up @@ -212,8 +213,6 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback)
doc.deleted = doc.revs[winningRev].deleted ? 1 : 0;

// Bump the seq for every new (non local) revision written
// TODO: index expects a unique seq, not sure if ignoring local will
// work
if (!isLocal) {
doc.seq = ++metadata.seq;

Expand Down Expand Up @@ -285,7 +284,7 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback)

// Local documents have different revision handling
if (isLocal && doc.deleted) {
txn.objectStore(DOC_STORE).delete(doc.id).onsuccess = function () {
txn.objectStore(META_LOCAL_STORE).delete(doc.id).onsuccess = function () {
results[i] = {
ok: true,
id: doc.id,
Expand All @@ -296,7 +295,8 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback)
return;
}

txn.objectStore(DOC_STORE).put(doc).onsuccess = function () {
const docStore = isLocal ? META_LOCAL_STORE : DOC_STORE;
txn.objectStore(docStore).put(doc).onsuccess = function () {
results[i] = {
ok: true,
id: doc.id,
Expand All @@ -308,7 +308,7 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback)

function updateSeq(i) {
if (i === lastWriteIndex) {
txn.objectStore(META_STORE).put(metadata);
txn.objectStore(META_LOCAL_STORE).put(metadata);
}
}

Expand Down Expand Up @@ -396,7 +396,10 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback)
}

preProcessAttachments().then(function () {
api._openTransactionSafely([DOC_STORE, META_STORE], 'readwrite', function (err, _txn) {
// We _could_ check doc ids here, and skip opening DOC_STORE if all docs are local.
// This may marginally slow things down for local docs. It seems pragmatic to keep
// the code simple and optimise for calls to bulkDocs() which include non-local docs.
api._openTransactionSafely([DOC_STORE, META_LOCAL_STORE], 'readwrite', function (err, _txn) {
if (err) {
return callback(err);
}
Expand Down
37 changes: 37 additions & 0 deletions packages/node_modules/pouchdb-adapter-indexeddb/src/getLocal.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { createError, MISSING_DOC } from 'pouchdb-errors';

import { META_LOCAL_STORE, processAttachment } from './util';

// _getLocal() doesn't know if opts.binary is set or not, so assume it's not.
const BINARY_ATTACHMENTS = false;

export default function (txn, id, api, callback) {
if (txn.error) {
return callback(txn.error);
}

txn.txn.objectStore(META_LOCAL_STORE).get(id).onsuccess = function (e) {
const doc = e.target.result;

if (!doc) {
callback(createError(MISSING_DOC, 'missing'));
return;
}

const result = doc.revs[doc.rev].data;
result._id = doc.id;
result._rev = doc.rev;

if (result._attachments) {
const processing = [];
for (const name in result._attachments) {
processing.push(processAttachment(name, doc, result, BINARY_ATTACHMENTS, api.blobSupport));
}
Promise.all(processing)
.then(() => callback(null, result))
.catch(callback);
} else {
callback(null, result);
}
};
}
Loading