diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js index 704681a223..589b07664d 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js @@ -1,6 +1,5 @@ 'use strict'; -import { isLocalId } from 'pouchdb-adapter-utils'; import { createError, IDB_ERROR } from 'pouchdb-errors'; import { collectConflicts } from 'pouchdb-merge'; @@ -27,33 +26,18 @@ function allDocsKeys(keys, docStore, allDocsInner) { }); } -function createKeyRange(start, end, inclusiveEnd, key, descending) { +function createKeyRange(start, end, inclusiveStart, inclusiveEnd, key, descending) { try { - if (start && end) { - if (descending) { - return IDBKeyRange.bound(end, start, !inclusiveEnd, false); - } else { - return IDBKeyRange.bound(start, end, false, !inclusiveEnd); - } - } else if (start) { - if (descending) { - return IDBKeyRange.upperBound(start); - } else { - return IDBKeyRange.lowerBound(start); - } - } else if (end) { - if (descending) { - return IDBKeyRange.lowerBound(end, !inclusiveEnd); - } else { - return IDBKeyRange.upperBound(end, !inclusiveEnd); - } - } else if (key) { - return IDBKeyRange.only(key); + if (key) { + return IDBKeyRange.only([0, key]); + } else if (descending) { + return IDBKeyRange.bound(end, start, !inclusiveEnd, !inclusiveStart); + } else { + return IDBKeyRange.bound(start, end, !inclusiveStart, !inclusiveEnd); } } catch (e) { return {error: e}; } - return null; } function handleKeyRangeError(opts, metadata, err, callback) { @@ -96,31 +80,46 @@ export default function (txn, metadata, opts, callback) { var results = []; var processing = []; - var start = 'startkey' in opts ? opts.startkey : false; - var end = 'endkey' in opts ? opts.endkey : false; var key = 'key' in opts ? opts.key : false; var keys = 'keys' in opts ? opts.keys : false; var skip = opts.skip || 0; - var limit = typeof opts.limit === 'number' ? opts.limit : -1; + var limit = typeof opts.limit === 'number' ? opts.limit : undefined; var inclusiveEnd = opts.inclusive_end !== false; var descending = 'descending' in opts && opts.descending ? 'prev' : null; - - var keyRange; - if (!keys) { - keyRange = createKeyRange(start, end, inclusiveEnd, key, descending); - if (keyRange && keyRange.error) { - return handleKeyRangeError(opts, metadata, keyRange.error, callback); - } - } + var start = 'startkey' in opts ? opts.startkey : (descending ? '\uffff' : ''); + var end = 'endkey' in opts ? opts.endkey : (descending ? '' : '\uffff'); var docStore = txn.txn.objectStore(DOC_STORE); - txn.txn.oncomplete = onTxnComplete; - if (keys) { - return allDocsKeys(opts.keys, docStore, allDocsInner); + txn.txn.oncomplete = onTxnComplete; + const allDocsInner = doc => { + if (doc.error) { + return results.push(doc); + } + + const row = { id:doc.id, key:doc.id, value:{ rev:doc.rev } }; + + if (doc.deleted) { + row.value.deleted = true; + row.doc = null; + } else if (opts.include_docs) { + include_doc(row, doc); + } + + results.push(row); + }; + return allDocsKeys(keys, docStore, allDocsInner); + } + + let keyRange = createKeyRange([0, start], [0, end], true, inclusiveEnd, key, descending); + if (keyRange.error) { + return handleKeyRangeError(opts, metadata, keyRange.error, callback); } + // txn.oncomplete must be set AFTER key-range-error is generated + txn.txn.oncomplete = onTxnComplete; + function include_doc(row, doc) { var docData = doc.revs[doc.rev].data; @@ -141,79 +140,120 @@ export default function (txn, metadata, opts, callback) { } } - function allDocsInner(doc) { - if (doc.error && keys) { - // key was not found with "keys" requests - results.push(doc); - return true; + function onTxnComplete() { + const returnVal = { + total_rows: metadata.doc_count, + offset: 0, + rows: results + }; + /* istanbul ignore if */ + if (opts.update_seq) { + returnVal.update_seq = metadata.seq; } - var row = { - id: doc.id, - key: doc.id, - value: { - rev: doc.rev - } - }; + if (processing.length) { + Promise.all(processing).then(function () { + callback(null, returnVal); + }); + } else { + callback(null, returnVal); + } + } - var deleted = doc.deleted; - if (deleted) { - if (keys) { - results.push(row); - row.value.deleted = true; - row.doc = null; - } - } else if (skip-- <= 0) { - results.push(row); - if (opts.include_docs) { - include_doc(row, doc); + const dbIndex = docStore.index('deleted,id'); + + if (!skip && !limit) { + fetchResults(); + } else { + let firstKey; + let limitKey = limit > 0; + + dbIndex.openKeyCursor(keyRange, descending || 'next').onsuccess = (e) => { + const cursor = e.target.result; + + if (skip) { + if (!cursor) { return txn.txn.commit(); } + cursor.advance(skip); + skip = 0; + return; } - if (--limit === 0) { - return false; + + if (firstKey === undefined) { + firstKey = cursor && cursor.key; + if (!firstKey) { return txn.txn.commit(); } } - } - return true; - } - function onTxnComplete() { - Promise.all(processing).then(function () { - var returnVal = { - total_rows: metadata.doc_count, - offset: 0, - rows: results - }; - - /* istanbul ignore if */ - if (opts.update_seq) { - returnVal.update_seq = metadata.seq; + if (limit) { + if (limit > 1 && cursor) { + cursor.advance(limit - 1); + limit = undefined; + return; + } + limit = undefined; } - callback(null, returnVal); - }); - } - var cursor = descending ? - docStore.openCursor(keyRange, descending) : - docStore.openCursor(keyRange); - cursor.onsuccess = function (e) { + if (limitKey) { + limitKey = cursor && cursor.key; + } + if (!limitKey) { + limitKey = descending ? keyRange.lower : keyRange.upper; + } + + keyRange = createKeyRange(firstKey, limitKey, true, inclusiveEnd, key, descending); + if (keyRange.error) { + txn.txn.abort(); + return handleKeyRangeError(opts, metadata, keyRange.error, callback); + } - var doc = e.target.result && e.target.result.value; + fetchResults(); + }; + } - // Happens if opts does not have limit, - // because cursor will end normally then, - // when all docs are retrieved. - // Would not be needed, if getAll() optimization was used like in #6059 - if (!doc) { return; } + async function fetchResults() { + // There is a risk here with getting all results into memory - if they have multiple + // revs, then we risk loading loads of extra data which is then discarded. This is + // reduced by batching. This also loads unused data when include_docs is false. + // + // Current batch size is quite arbitrary, but seems like (1) more than a typical + // result size, and (2) not so big it's likely to cause issues. + const batchSize = 100; - // Skip local docs - if (isLocalId(doc.id)) { - return e.target.result.continue(); + let kr = keyRange; + do { + kr = await fetchNextBatch(kr); + } while (kr); + if (descending) { + results.reverse(); } + return txn.txn.commit(); - var continueCursor = allDocsInner(doc); - if (continueCursor) { - e.target.result.continue(); - } - }; + function fetchNextBatch(kr) { + return new Promise((resolve) => { + dbIndex.getAll(kr, batchSize).onsuccess = (e) => { + const batch = e.target.result; + for (let i=0; i= batchSize) { + const lastSeenKey = [ 0, batch[batch.length-1].id ]; + const startKey = descending ? kr.upper : lastSeenKey; + const endKey = descending ? lastSeenKey : kr.upper; + if (startKey[1] !== endKey[1]) { + const incEnd = descending ? false : inclusiveEnd; + const incStart = descending ? true : false; + return resolve(createKeyRange(startKey, endKey, incStart, incEnd, key, descending)); + } + } + return resolve(); + }; + }); + } + } } diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js index 30a7f13d2d..ee2ee1a865 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js @@ -19,7 +19,7 @@ import { isLocalId, parseDoc } from 'pouchdb-adapter-utils'; import { binaryMd5 as md5 } from 'pouchdb-md5'; import { winningRev as calculateWinningRev, merge, compactTree } from 'pouchdb-merge'; -import { DOC_STORE, META_STORE, idbError } from './util'; +import { DOC_STORE, META_LOCAL_STORE, idbError } from './util'; import { rewrite } from './rewrite'; @@ -62,7 +62,8 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback) } docs.forEach(function (doc) { - txn.objectStore(DOC_STORE).get(doc.id).onsuccess = readDone; + const docStore = isLocalId(doc.id) ? META_LOCAL_STORE : DOC_STORE; + txn.objectStore(docStore).get(doc.id).onsuccess = readDone; }); } @@ -212,8 +213,6 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback) doc.deleted = doc.revs[winningRev].deleted ? 1 : 0; // Bump the seq for every new (non local) revision written - // TODO: index expects a unique seq, not sure if ignoring local will - // work if (!isLocal) { doc.seq = ++metadata.seq; @@ -285,7 +284,7 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback) // Local documents have different revision handling if (isLocal && doc.deleted) { - txn.objectStore(DOC_STORE).delete(doc.id).onsuccess = function () { + txn.objectStore(META_LOCAL_STORE).delete(doc.id).onsuccess = function () { results[i] = { ok: true, id: doc.id, @@ -296,7 +295,8 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback) return; } - txn.objectStore(DOC_STORE).put(doc).onsuccess = function () { + const docStore = isLocal ? META_LOCAL_STORE : DOC_STORE; + txn.objectStore(docStore).put(doc).onsuccess = function () { results[i] = { ok: true, id: doc.id, @@ -308,7 +308,7 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback) function updateSeq(i) { if (i === lastWriteIndex) { - txn.objectStore(META_STORE).put(metadata); + txn.objectStore(META_LOCAL_STORE).put(metadata); } } @@ -396,7 +396,10 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback) } preProcessAttachments().then(function () { - api._openTransactionSafely([DOC_STORE, META_STORE], 'readwrite', function (err, _txn) { + // We _could_ check doc ids here, and skip opening DOC_STORE if all docs are local. + // This may marginally slow things down for local docs. It seems pragmatic to keep + // the code simple and optimise for calls to bulkDocs() which include non-local docs. + api._openTransactionSafely([DOC_STORE, META_LOCAL_STORE], 'readwrite', function (err, _txn) { if (err) { return callback(err); } diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js index b3d07789e1..73cde6d0a2 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js @@ -1,5 +1,7 @@ 'use strict'; +import { isLocalId } from 'pouchdb-adapter-utils'; +import { createError, MISSING_DOC } from 'pouchdb-errors'; import { base64StringToBlobOrBuffer as b64StringToBluffer, btoa, @@ -7,6 +9,11 @@ import { } from 'pouchdb-binary-utils'; export default function getAttachment(docId, attachId, attachment, opts, cb) { + if (isLocalId(docId)) { + cb(createError(MISSING_DOC, 'missing')); + return; + } + const doc = opts.metadata; const data = doc.attachments[attachment.digest].data; diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/getLocal.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/getLocal.js new file mode 100644 index 0000000000..0f9f2a02cd --- /dev/null +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/getLocal.js @@ -0,0 +1,37 @@ +import { createError, MISSING_DOC } from 'pouchdb-errors'; + +import { META_LOCAL_STORE, processAttachment } from './util'; + +// _getLocal() doesn't know if opts.binary is set or not, so assume it's not. +const BINARY_ATTACHMENTS = false; + +export default function (txn, id, api, callback) { + if (txn.error) { + return callback(txn.error); + } + + txn.txn.objectStore(META_LOCAL_STORE).get(id).onsuccess = function (e) { + const doc = e.target.result; + + if (!doc) { + callback(createError(MISSING_DOC, 'missing')); + return; + } + + const result = doc.revs[doc.rev].data; + result._id = doc.id; + result._rev = doc.rev; + + if (result._attachments) { + const processing = []; + for (const name in result._attachments) { + processing.push(processAttachment(name, doc, result, BINARY_ATTACHMENTS, api.blobSupport)); + } + Promise.all(processing) + .then(() => callback(null, result)) + .catch(callback); + } else { + callback(null, result); + } + }; +} diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/index.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/index.js index ebc4d00e81..8aecbf2083 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/index.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/index.js @@ -6,6 +6,7 @@ import setup from './setup'; // API implementations import info from './info'; import get from './get'; +import getLocal from './getLocal'; import getAttachment from './getAttachment'; import bulkDocs from './bulkDocs'; import allDocs from './allDocs'; @@ -16,7 +17,7 @@ import destroy from './destroy'; import {query, viewCleanup} from './find'; import purge from './purge'; -import { DOC_STORE } from './util'; +import { DOC_STORE, META_LOCAL_STORE } from './util'; var ADAPTER_NAME = 'indexeddb'; @@ -71,7 +72,6 @@ function IndexeddbPouch(dbOpts, callback) { // because in the time between getting the db handle and opening the // transaction it may have been invalidated by index changes. var $t = function (fun, stores, mode) { - stores = stores || [DOC_STORE]; mode = mode || 'readonly'; return function () { @@ -108,7 +108,10 @@ function IndexeddbPouch(dbOpts, callback) { return info(metadata, cb); }); - api._get = $t(get); + api._get = $t(get, [DOC_STORE]); + api._getLocal = $t(function (txn, id, callback) { + return getLocal(txn, id, api, callback); + }, [META_LOCAL_STORE]); api._bulkDocs = $(function (_, req, opts, callback) { bulkDocs(api, req, opts, metadata, dbOpts, idbChanges, callback); @@ -116,15 +119,15 @@ function IndexeddbPouch(dbOpts, callback) { api._allDocs = $t(function (txn, opts, cb) { allDocs(txn, metadata, opts, cb); - }); + }, [DOC_STORE]); api._getAttachment = getAttachment; api._changes = $t(function (txn, opts) { changes(txn, idbChanges, api, dbOpts, opts); - }); + }, [DOC_STORE]); - api._getRevisionTree = $t(getRevisionTree); + api._getRevisionTree = $t(getRevisionTree, [DOC_STORE]); api._doCompaction = $t(doCompaction, [DOC_STORE], 'readwrite'); api._customFindAbstractMapper = { diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js index c7cfad4b09..413b4d6e14 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js @@ -1,16 +1,17 @@ 'use strict'; import { uuid } from 'pouchdb-utils'; +import { isLocalId } from 'pouchdb-adapter-utils'; import { checkBlobSupport } from 'pouchdb-adapter-utils'; -import { META_STORE, DOC_STORE, rawIndexFields, naturalIndexName, correctIndexFields } from './util'; +import { META_LOCAL_STORE, DOC_STORE, rawIndexFields, naturalIndexName, correctIndexFields } from './util'; // // Core PouchDB schema version. Increment this if we, as a library, want to make // schema changes in indexeddb. See upgradePouchDbSchema() // -var POUCHDB_IDB_VERSION = 1; +var POUCHDB_IDB_VERSION = 2; // // Functions that manage a combinate indexeddb version, by combining the current @@ -71,7 +72,7 @@ function maintainNativeIndexes(openReq, reject) { var expectedIndexNames = Object.keys(expectedIndexes); // Delete any indexes that aren't system indexes or expected - var systemIndexNames = ['seq']; + var systemIndexNames = ['seq', 'deleted,id']; existingIndexNames.forEach(function (index) { if (systemIndexNames.indexOf(index) === -1 && expectedIndexNames.indexOf(index) === -1) { docStore.deleteIndex(index); @@ -93,16 +94,24 @@ function maintainNativeIndexes(openReq, reject) { }; } -function upgradePouchDbSchema(db, pouchdbVersion) { +function upgradePouchDbSchema(dbName, db, tx, pouchdbVersion) { if (pouchdbVersion < 1) { var docStore = db.createObjectStore(DOC_STORE, {keyPath : 'id'}); docStore.createIndex('seq', 'seq', {unique: true}); - db.createObjectStore(META_STORE, {keyPath: 'id'}); + db.createObjectStore(META_LOCAL_STORE, {keyPath: 'id'}); + } + + if (pouchdbVersion < 2) { + const docStore = tx.objectStore(DOC_STORE); + docStore.createIndex('deleted,id', [ 'deleted', 'id' ], {unique: true}); + if (dbName.includes('-mrview-')) { + docStore.deleteIndex('seq'); + } } // Declare more PouchDB schema changes here - // if (pouchdbVersion < 2) { .. } + // if (pouchdbVersion < 3) { .. } } function openDatabase(openDatabases, api, opts, resolve, reject) { @@ -124,11 +133,38 @@ function openDatabase(openDatabases, api, opts, resolve, reject) { throw new Error('Database was deleted while open'); } + const tx = e.target.transaction; var db = e.target.result; var pouchdbVersion = getPouchDbVersion(e.oldVersion); - upgradePouchDbSchema(db, pouchdbVersion); + upgradePouchDbSchema(opts.name, db, tx, pouchdbVersion); maintainNativeIndexes(openReq, reject); + + if (pouchdbVersion < 2) { + const docStore = openReq.transaction.objectStore(DOC_STORE); + const metaStore = openReq.transaction.objectStore(META_LOCAL_STORE); + + const allDocsReq = docStore.openCursor(); + allDocsReq.onsuccess = event => { + const cursor = event.target.result; + if (!cursor) { + return; + } + + const doc = cursor.value; + + if (!isLocalId(doc.id)) { + return cursor.continue(); + } + + // Move _local/ docs to the META_LOCAL_STORE + metaStore.put(doc).onsuccess = () => { + cursor.delete(doc).onsuccess = () => { + cursor.continue(); + }; + }; + }; + } }; openReq.onblocked = function (e) { @@ -168,15 +204,15 @@ function openDatabase(openDatabases, api, opts, resolve, reject) { } }; - var metadata = {id: META_STORE}; - var txn = idb.transaction([META_STORE], 'readwrite'); + var metadata = {id: META_LOCAL_STORE}; + var txn = idb.transaction([META_LOCAL_STORE], 'readwrite'); txn.oncomplete = function () { resolve({idb, metadata}); }; - var metaStore = txn.objectStore(META_STORE); - metaStore.get(META_STORE).onsuccess = function (e) { + var metaStore = txn.objectStore(META_LOCAL_STORE); + metaStore.get(META_LOCAL_STORE).onsuccess = function (e) { metadata = e.target.result || metadata; var changed = false; @@ -200,7 +236,7 @@ function openDatabase(openDatabases, api, opts, resolve, reject) { const createBlobDoc = blob => ({ id:'blob-support', blob }); - checkBlobSupport(txn, META_STORE, createBlobDoc).then(blobSupport => { + checkBlobSupport(txn, META_LOCAL_STORE, createBlobDoc).then(blobSupport => { // Unfortunate that we have to track this in both the metadata and on // api, but sometimes we have access to one, sometimes the other (and // sometimes both). We could change function signatures in index.js diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js index 213743dd26..3363a37f5e 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js @@ -9,7 +9,7 @@ import { import { sanitise } from './rewrite'; var DOC_STORE = 'docs'; -var META_STORE = 'meta'; +var META_LOCAL_STORE = 'meta'; function idbError(callback) { return function (evt) { @@ -110,7 +110,7 @@ function correctIndexFields(fields) { export { DOC_STORE, - META_STORE, + META_LOCAL_STORE, idbError, processAttachment, rawIndexFields, diff --git a/tests/integration/test.all_docs.js b/tests/integration/test.all_docs.js index a04431b90c..469751afa0 100644 --- a/tests/integration/test.all_docs.js +++ b/tests/integration/test.all_docs.js @@ -460,6 +460,147 @@ adapters.forEach(function (adapter) { }); + it('test total_rows with a variety of criteria * 100', function (done) { + var db = new PouchDB(dbs.name); + + const docs = []; + for (let i=0; i<1000; ++i) { + docs.push({ _id:i.toString().padStart(5, '0') }); + } + + db.bulkDocs({docs}).then(function (res) { + const deletes = []; + for (let i=300; i<400; ++i) { + docs[i]._deleted = true; + docs[i]._rev = res[i].rev; + deletes.push(docs[i]); + } + for (let i=700; i<800; ++i) { + docs[i]._deleted = true; + docs[i]._rev = res[i].rev; + deletes.push(docs[i]); + } + if (adapter === 'http') { + return testUtils.getServerType().then(serverType => { + if (serverType === 'pouchdb-express-router') { + // Workaround for https://github.com/pouchdb/pouchdb-express-router/issues/18 + return deletes.reduce( + (chain, doc) => chain.then(() => db.remove(doc)), + Promise.resolve(), + ); + } + return Promise.all(deletes.map(doc => db.remove(doc))) + .then(function (deleted) { + deleted.should.have.length(200); + }); + }); + } else { + return Promise.all(deletes.map(doc => db.remove(doc))) + .then(function (deleted) { + deleted.should.have.length(200); + }); + } + }).then(function () { + return db.allDocs(); + }).then(function (res) { + res.rows.should.have.length(800, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00500'}); + }).then(function (res) { + res.rows.should.have.length(400, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00500', skip : 200, limit : 1000}); + }).then(function (res) { + res.rows.should.have.length(200, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00500', limit : 0}); + }).then(function (res) { + res.rows.should.have + .length(0, 'correctly return rows, startkey w/ limit=0'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({keys : ['00500'], limit : 0}); + }).then(function (res) { + res.rows.should.have + .length(0, 'correctly return rows, keys w/ limit=0'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({limit : 0}); + }).then(function (res) { + res.rows.should.have.length(0, 'correctly return rows, limit=0'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00500', descending : true, skip : 1}); + }).then(function (res) { + res.rows.should.have.length(400, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00500', endkey : 'z'}); + }).then(function (res) { + res.rows.should.have.length(400, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00500', endkey : '00500'}); + }).then(function (res) { + res.rows.should.have.length(1, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00500', endkey : '00400'}); + }).then(function (res) { + res.rows.should.have.length(0, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00599', endkey : '00400', descending : true}); + }).then(function (res) { + res.rows.should.have.length(200, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey:'00599', endkey:'00400', descending:true, inclusive_end:false }); + }).then(function (res) { + res.rows.should.have.length(199, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00300', endkey : '00799', descending : false}); + }).then(function (res) { + res.rows.should.have.length(300, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey:'00300', endkey:'00799', descending:false, inclusive_end:false }); + }).then(function (res) { + res.rows.should.have.length(300, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '00799', endkey : '00300', descending : true}); + }).then(function (res) { + res.rows.should.have.length(300, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({startkey : '', endkey : '00000'}); + }).then(function (res) { + res.rows.should.have.length(1, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({keys : ['00000', '00100', '00300']}); + }).then(function (res) { + res.rows.should.have.length(3, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({keys : ['00000', '00100', '00000', '00200', '00100', '00100']}); + }).then(function (res) { + res.rows.should.have.length(6, 'correctly return rows'); + res.rows.map(function (row) { return row.key; }).should.deep.equal( + ['00000', '00100', '00000', '00200', '00100', '00100']); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({keys : []}); + }).then(function (res) { + res.rows.should.have.length(0, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({keys : ['00700']}); + }).then(function (res) { + res.rows.should.have.length(1, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({key : '00300'}); + }).then(function (res) { + res.rows.should.have.length(0, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({key : '00200'}); + }).then(function (res) { + res.rows.should.have.length(1, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + return db.allDocs({key : 'z'}); + }).then(function (res) { + res.rows.should.have.length(0, 'correctly return rows'); + res.total_rows.should.equal(800, 'correctly return total_rows'); + done(); + }, done); + }); + it('test total_rows with both skip and limit', function (done) { var db = new PouchDB(dbs.name); var docs = { diff --git a/tests/integration/test.attachments.js b/tests/integration/test.attachments.js index e87efbc2df..981ef6e284 100644 --- a/tests/integration/test.attachments.js +++ b/tests/integration/test.attachments.js @@ -1,5 +1,7 @@ 'use strict'; +const should = require('chai').should(); + var adapters = ['local', 'http']; var repl_adapters = [ ['local', 'http'], @@ -2050,12 +2052,84 @@ adapters.forEach(function (adapter) { } catch (caughtErr) { err = caughtErr; } + should.not.exist(res); + + if (adapter === 'local') { + err.message.should.equal('missing'); + // TODO indexeddb errors should probably have .reason set + if (db.adapter !== 'indexeddb') { + err.reason.should.equal('missing'); + } + } else if (adapter === 'http') { + const serverType = await testUtils.getServerType(); + if (serverType === 'couchdb') { + err.status.should.equal(400); + const body = await err.json(); + body.reason.should.equal('_local documents do not accept attachments.'); + } else if (serverType === 'pouchdb-express-router' || serverType === 'express-pouchdb') { + err.status.should.equal(404); + const body = await err.json(); + body.reason.should.equal('missing'); + } else { + throw new Error(`No handling for server type: '${serverType}'`); + } + } else { + throw new Error(`No handling for adapter: '${adapter}'`); + } + }); + + it('Test getAttachment for _local doc - should not return non-existent attachment', async () => { + const db = new PouchDB(dbs.name); + await db.put(binAttDocLocal); + + let res, err; + try { + res = await db.getAttachment('_local/bin_doc', 'not-real.txt'); + } catch (caughtErr) { + err = caughtErr; + } + should.not.exist(res); if (adapter === 'local') { - if (db.adapter === 'indexeddb') { - const data = await testUtils.readBlobPromise(res); - data.should.equal('This is a base64 encoded text', 'correct data'); + err.message.should.equal('missing'); + // TODO indexeddb errors should probably have .reason set + if (db.adapter !== 'indexeddb') { + err.reason.should.equal('missing'); + } + } else if (adapter === 'http') { + const serverType = await testUtils.getServerType(); + if (serverType === 'couchdb') { + err.status.should.equal(400); + const body = await err.json(); + body.reason.should.equal('_local documents do not accept attachments.'); + } else if (serverType === 'pouchdb-express-router' || serverType === 'express-pouchdb') { + err.status.should.equal(404); + const body = await err.json(); + body.reason.should.equal('missing'); } else { + throw new Error(`No handling for server type: '${serverType}'`); + } + } else { + throw new Error(`No handling for adapter: '${adapter}'`); + } + }); + + it('Test getAttachment for _local doc - should not return attachment on non-existent doc', async () => { + const db = new PouchDB(dbs.name); + await db.put(binAttDocLocal); + + let res, err; + try { + res = await db.getAttachment('_local/not_a_doc', 'not-real.txt'); + } catch (caughtErr) { + err = caughtErr; + } + should.not.exist(res); + + if (adapter === 'local') { + err.message.should.equal('missing'); + // TODO indexeddb errors should probably have .reason set + if (db.adapter !== 'indexeddb') { err.reason.should.equal('missing'); } } else if (adapter === 'http') {