diff --git a/bin/build-site.js b/bin/build-site.js index 87e57ffa2c..4468072bc3 100755 --- a/bin/build-site.js +++ b/bin/build-site.js @@ -30,7 +30,7 @@ function buildCSS() { } function buildJekyll(path) { - // Dont rebuild on website artifacts being written + // Don't rebuild on website artifacts being written if (path && /^_site/.test(path.relative)) { return; } diff --git a/bin/test-browser.js b/bin/test-browser.js index f9e1561f01..f0f91e2826 100755 --- a/bin/test-browser.js +++ b/bin/test-browser.js @@ -57,10 +57,10 @@ testUrl += '?'; testUrl += new URLSearchParams(pickBy(qs, identity)); class RemoteRunner { - constructor() { + constructor(browser) { + this.browser = browser; this.handlers = {}; - this.completed = false; - this.failed = false; + this.handleEvent = this.handleEvent.bind(this); } on(name, handler) { @@ -72,46 +72,47 @@ class RemoteRunner { handlers[name].push(handler); } - handleEvents(events) { - var handlers = this.handlers; - - events.forEach((event) => { - this.completed = this.completed || event.name === 'end'; - this.failed = this.failed || event.name === 'fail'; - + async handleEvent(event) { + try { var additionalProps = ['pass', 'fail', 'pending'].indexOf(event.name) === -1 ? {} : { slow: event.obj.slow ? function () { return event.obj.slow; } : function () { return 60; }, fullTitle: event.obj.fullTitle ? function () { return event.obj.fullTitle; } : undefined }; var obj = Object.assign({}, event.obj, additionalProps); - handlers[event.name].forEach(function (handler) { + this.handlers[event.name].forEach(function (handler) { handler(obj, event.err); }); - if (event.logs && event.logs.length > 0) { - event.logs.forEach(function (line) { - if (line.type === 'log') { - console.log(line.content); - } else if (line.type === 'error') { - console.error(line.content); - } else { - console.error('Invalid log line', line); - } - }); - console.log(); + switch (event.name) { + case 'fail': this.handleFailed(); break; + case 'end': this.handleEnd(); break; } - }); - } + } catch (e) { + console.error('Tests failed:', e); - bail() { - var handlers = this.handlers; + await this.browser.close(); + process.exit(3); + } + } - handlers['end'].forEach(function (handler) { - handler(); - }); + async handleEnd(failed) { + await this.browser.close(); + process.exit(!process.env.PERF && failed ? 1 : 0); + } - this.completed = true; + handleFailed() { + if (bail) { + try { + this.handlers['end'].forEach(function (handler) { + handler(); + }); + } catch (e) { + console.log('An error occurred while bailing:', e); + } finally { + this.handleEnd(true); + } + } } } @@ -142,7 +143,13 @@ async function startTest() { console.log('Starting', browserName, 'on', testUrl); - const runner = new RemoteRunner(); + const options = { + headless: true, + }; + const browser = await playwright[browserName].launch(options); + const page = await browser.newPage(); + + const runner = new RemoteRunner(browser); new MochaSpecReporter(runner); new BenchmarkConsoleReporter(runner); @@ -154,11 +161,14 @@ async function startTest() { new BenchmarkJsonReporter(runner); } - const options = { - headless: true, - }; - const browser = await playwright[browserName].launch(options); - const page = await browser.newPage(); + page.exposeFunction('handleMochaEvent', runner.handleEvent); + page.addInitScript(() => { + window.addEventListener('message', (e) => { + if (e.data.type === 'mocha') { + window.handleMochaEvent(e.data.details); + } + }); + }); page.on('pageerror', err => { if (browserName === 'webkit' && err.toString() @@ -174,46 +184,14 @@ async function startTest() { process.exit(1); }); - if (process.env.BROWSER_CONSOLE) { - page.on('console', message => { - const { url, lineNumber } = message.location(); - console.log('BROWSER', message.type().toUpperCase(), `${url}:${lineNumber}`, message.text()); - }); - } + page.on('console', message => { + console.log(message.text()); + }); await page.goto(testUrl); const userAgent = await page.evaluate('navigator.userAgent'); console.log('Testing on:', userAgent); - - const interval = setInterval(async () => { - try { - const events = await page.evaluate('window.testEvents()'); - runner.handleEvents(events); - - if (runner.completed || (runner.failed && bail)) { - if (!runner.completed && runner.failed) { - try { - runner.bail(); - } catch (e) { - // Temporary debugging of bailing failure - console.log('An error occurred while bailing:'); - console.log(e); - } - } - - clearInterval(interval); - await browser.close(); - process.exit(!process.env.PERF && runner.failed ? 1 : 0); - } - } catch (e) { - console.error('Tests failed:', e); - - clearInterval(interval); - await browser.close(); - process.exit(3); - } - }, 1000); } devserver.start(function () { diff --git a/docs/_posts/2014-04-01-pouchdb-2.1.0.md b/docs/_posts/2014-04-01-pouchdb-2.1.0.md index 92b7afbc93..2071b8072b 100644 --- a/docs/_posts/2014-04-01-pouchdb-2.1.0.md +++ b/docs/_posts/2014-04-01-pouchdb-2.1.0.md @@ -26,6 +26,6 @@ This release contains a database migration, which means that once you upgrade yo * Improve error reporting from replicator ([#1714](http://github.com/daleharvey/pouchdb/issues/1714)) * Switch to readAsArrayBuffer for IE binary support ([#1667](http://github.com/daleharvey/pouchdb/issues/1667)) * Add `npm run shell` for quick shell experimenting ([#1610](http://github.com/daleharvey/pouchdb/issues/1610)) - * Ensure we dont lose data when browsers upgrade from WebSQL to IDB ([#1289](http://github.com/daleharvey/pouchdb/issues/1289)) + * Ensure we don't lose data when browsers upgrade from WebSQL to IDB ([#1289](http://github.com/daleharvey/pouchdb/issues/1289)) ###### [Complete list of changes](https://github.com/daleharvey/pouchdb/compare/2.0.1...2.1.0) diff --git a/docs/_posts/2014-09-07-pouchdb-3.0.5.md b/docs/_posts/2014-09-07-pouchdb-3.0.5.md index 3134ac518a..b8319a76ad 100644 --- a/docs/_posts/2014-09-07-pouchdb-3.0.5.md +++ b/docs/_posts/2014-09-07-pouchdb-3.0.5.md @@ -25,7 +25,7 @@ We literally wrote [a custom JSON parser](https://github.com/nolanlawson/vuvuzel The ["infinite recursion" bug](https://github.com/pouchdb/pouchdb/issues/2543) shows up when you have many (> 1000) revisions to the same document. For instance, imagine a text editor that `put()`s a new version of the document for every keystroke. This is fine in traditional databases, but it can be a big problem in PouchDB/CouchDB, because we store the entire history of the database. -Make no mistake: inifinitely-large revision histories will now work in PouchDB 3.0.5. However, it's a lot like riding a bicycle without a helmet: you _can_ do it, but you probably shouldn't. +Make no mistake: infinitely-large revision histories will now work in PouchDB 3.0.5. However, it's a lot like riding a bicycle without a helmet: you _can_ do it, but you probably shouldn't. Your mental model for this can be something like Git. Imagine a new `git commit` for every keystroke, and you'll see why it might be a bit problematic. @@ -45,4 +45,4 @@ Your mental model for this can be something like Git. Imagine a new `git commit` Of course, you can always `compact()` to remove old versions, but this just removes the document data. PouchDB still needs to maintain a permanent tree of the `_rev` hashes in order for the replication algorithm to work properly. If this tree grows excessively, it can bloat your database, whether or not you do compaction. -What this means is that you should be very careful about updating your documents. Try to update your documents in larger batches, or use a pattern like [delta-pouch](https://github.com/redgeoff/delta-pouch) to avoid creating new revisions altogether. \ No newline at end of file +What this means is that you should be very careful about updating your documents. Try to update your documents in larger batches, or use a pattern like [delta-pouch](https://github.com/redgeoff/delta-pouch) to avoid creating new revisions altogether. diff --git a/docs/_posts/2014-10-26-10-things-i-learned-from-reading-and-writing-the-pouchdb-source.md b/docs/_posts/2014-10-26-10-things-i-learned-from-reading-and-writing-the-pouchdb-source.md index e9e35cf76b..b17172de48 100644 --- a/docs/_posts/2014-10-26-10-things-i-learned-from-reading-and-writing-the-pouchdb-source.md +++ b/docs/_posts/2014-10-26-10-things-i-learned-from-reading-and-writing-the-pouchdb-source.md @@ -43,7 +43,7 @@ User agent sniffing! Yes, we should be ashamed of ourselves. But here's why we d * Additionally, if you specify anywhere between 0 and 5000000, Safari and iOS will use that size as a hint for when, precisely, to show the popup. And in the case of PouchDB, we need to avoid the popup in our automated tests, because Selenium doesn't give us a way to press the "OK" button, meaning our tests would just fail if we request too much. So the ideal size to request is 0. * However, in **PhantomJS** and older WebKit (Safari ~5), if you request 0, then it will blow up. -For the recored, here's what the dreaded Safari popup looks like: +For the record, here's what the dreaded Safari popup looks like: {% include img.html src="safari_popup.png" alt="annoying Safari popup" %} diff --git a/docs/_posts/2014-11-10-3.1.0.md b/docs/_posts/2014-11-10-3.1.0.md index 9fdf1695c1..23bb46c88a 100644 --- a/docs/_posts/2014-11-10-3.1.0.md +++ b/docs/_posts/2014-11-10-3.1.0.md @@ -31,7 +31,7 @@ We keep a list of [plugins and related external projects]({{ site.baseurl }}/ext * PouchDB [Dump](https://github.com/nolanlawson/pouchdb-dump-cli) and [Load](https://github.com/nolanlawson/pouchdb-load) are designed to help you load data into your newly started PouchDB app faster than replication will do currently. * If you want to use PouchDB with Angular, you are in luck, there are now 4! adapters to help you integrate them: [angular-pouchdb](https://github.com/wspringer/angular-pouchdb), [Factoryng](https://github.com/redgeoff/factoryng), [ngPouch](https://github.com/jrhicks/ngPouch), [ng-pouchdb](https://github.com/danielzen/ng-pouchdb). - * Ember users arent left in the wild: [ember-pouchdb](https://github.com/taras/ember-pouchdb) should get you going with PouchDB and Ember. + * Ember users aren't left in the wild: [ember-pouchdb](https://github.com/taras/ember-pouchdb) should get you going with PouchDB and Ember. * If you are using Blobs with PouchDB, you probably want to take a look at [blob-util](https://github.com/nolanlawson/blob-util). ### Bug fixes Galore diff --git a/docs/_posts/2014-11-27-testing-pouchdb.md b/docs/_posts/2014-11-27-testing-pouchdb.md index d69f783e6f..b0a2b2db91 100644 --- a/docs/_posts/2014-11-27-testing-pouchdb.md +++ b/docs/_posts/2014-11-27-testing-pouchdb.md @@ -45,7 +45,7 @@ We use [Selenium](http://www.seleniumhq.org/) to drive tests in the browser, and We are also finding some platforms (particularly iPhone and Internet Explorer) will become unstable on Saucelabs. The errors almost always come from our code, but it is hard to maintain a reliable test suite when the underlying platform changes and you have little control over it. -Before settling on Selenium, I had previously tried out [Testling](https://ci.testling.com/), and similiarly to Tape it was very easy to get started but opinionated. It was also broken on OSX with pull requests containing fixes that hadn't been touched for months. Selenium had the advantage that it was very widely used, and new frameworks or platforms are likely to have WebDriver support early on. +Before settling on Selenium, I had previously tried out [Testling](https://ci.testling.com/), and similarly to Tape it was very easy to get started but opinionated. It was also broken on OSX with pull requests containing fixes that hadn't been touched for months. Selenium had the advantage that it was very widely used, and new frameworks or platforms are likely to have WebDriver support early on. I have however found Selenium as a project frustrating to use, from the first time visiting [http://www.seleniumhq.org/](http://www.seleniumhq.org/) to understanding what I needed to download and write a first test that started a browser, it was an unclear and confusing process. Even today getting ChromeDriver started correctly gets me confused, also the download cost of 30MB for what is mostly a proxy server is an annoyance. diff --git a/docs/_posts/2015-03-05-taming-the-async-beast-with-es7.md b/docs/_posts/2015-03-05-taming-the-async-beast-with-es7.md index c817962893..7a9b111ffc 100644 --- a/docs/_posts/2015-03-05-taming-the-async-beast-with-es7.md +++ b/docs/_posts/2015-03-05-taming-the-async-beast-with-es7.md @@ -324,7 +324,7 @@ So if you want to play with it yourself, I've put together a [small demo library ### Conclusion -Async functions are an empowering new concept in ES7. They give us back our lost `return`s and `try`/`catch`es, and they reward the knowledge we've already gained from writing synchronous code with new idiioms that look a lot like the old ones, but are much more performant. +Async functions are an empowering new concept in ES7. They give us back our lost `return`s and `try`/`catch`es, and they reward the knowledge we've already gained from writing synchronous code with new idioms that look a lot like the old ones, but are much more performant. Most importantly, async functions make APIs like PouchDB's a lot easier to work with. So hopefully this will lead to fewer user errors and confusion, as well as more elegant and readable code. diff --git a/docs/_posts/2015-08-03-pouchdb-4.0.0-ballast-overboard.md b/docs/_posts/2015-08-03-pouchdb-4.0.0-ballast-overboard.md index dae55da226..e251383df7 100644 --- a/docs/_posts/2015-08-03-pouchdb-4.0.0-ballast-overboard.md +++ b/docs/_posts/2015-08-03-pouchdb-4.0.0-ballast-overboard.md @@ -72,7 +72,7 @@ replication.on('paused', doneFun); * Fix attachment length for stubs ([#3963](https://github.com/pouchdb/pouchdb/issues/3963)) * Reduce the number of checkpoint mismatches ([#4009](https://github.com/pouchdb/pouchdb/issues/4009)) * Fallback from checkpoint mismatch correctly (less replication start from the beginning) ([#3999](https://github.com/pouchdb/pouchdb/issues/3999)) -* Dont fail replication when attachment request fails ([#4021](https://github.com/pouchdb/pouchdb/issues/4021)) +* Don't fail replication when attachment request fails ([#4021](https://github.com/pouchdb/pouchdb/issues/4021)) * Fix check for `localStorage` in Safari private mode ([#4070](https://github.com/pouchdb/pouchdb/issues/4070)) ### Coverage improvements diff --git a/docs/_posts/2015-11-03-cover-all.md b/docs/_posts/2015-11-03-cover-all.md index 83a1d55543..669dc2c066 100644 --- a/docs/_posts/2015-11-03-cover-all.md +++ b/docs/_posts/2015-11-03-cover-all.md @@ -24,7 +24,7 @@ We have had issues in the past with how to manage npm dependencies. Version rang ### Bugfixes * Explain http error caused by checking for `_bulk_get` ([#4435](http://github.com/pouchdb/pouchdb/issues/4435)) -* Dont create a database just to delete it ([#4467](http://github.com/pouchdb/pouchdb/issues/4467)) +* Don't create a database just to delete it ([#4467](http://github.com/pouchdb/pouchdb/issues/4467)) * Add friendly err msg to common leveldown err ([#4447](http://github.com/pouchdb/pouchdb/issues/4447)) * Avoid empty bulkGet() ([#4474](http://github.com/pouchdb/pouchdb/issues/4474)) * Fix http url with port 80 ([#247](https://github.com/pouchdb/express-pouchdb/issues/247)) diff --git a/docs/_posts/2016-06-06-pouchdb-5.4.0.md b/docs/_posts/2016-06-06-pouchdb-5.4.0.md index 7d240edd38..094a7dc034 100644 --- a/docs/_posts/2016-06-06-pouchdb-5.4.0.md +++ b/docs/_posts/2016-06-06-pouchdb-5.4.0.md @@ -92,7 +92,7 @@ This API was not removed, but will log a warning if you try to use it. * ([#3923](https://github.com/pouchdb/pouchdb/issues/3923)) - Add ability to set url as prefix * ([#4922](https://github.com/pouchdb/pouchdb/issues/4922)) - Do not call 'destroyed' twice for http instances * ([#3609](https://github.com/pouchdb/pouchdb/issues/3609)) - opts.view implies filter=_view -* ([#4444](https://github.com/pouchdb/pouchdb/issues/4444)) - Dont leak event emitters +* ([#4444](https://github.com/pouchdb/pouchdb/issues/4444)) - Don't leak event emitters * ([#5197](https://github.com/pouchdb/pouchdb/issues/5197)) - detect WebSQL invalid on WKWebView * ([#5200](https://github.com/pouchdb/pouchdb/issues/5200)) - Don't clone special objects like Workers * ([#5196](https://github.com/pouchdb/pouchdb/issues/5196)) - fix excessively long replication loop @@ -123,7 +123,7 @@ This API was not removed, but will log a warning if you try to use it. * ([#4575](https://github.com/pouchdb/pouchdb/issues/4575)) - Fix tests against CouchDB master * ([#5260](https://github.com/pouchdb/pouchdb/issues/5260)) - Catches unhandled rejections in tests -* ([#2426](https://github.com/pouchdb/pouchdb/issues/2426)) - Add test to ensure doc_ids dont prevent replication +* ([#2426](https://github.com/pouchdb/pouchdb/issues/2426)) - Add test to ensure doc_ids don't prevent replication * ([#4767](https://github.com/pouchdb/pouchdb/issues/4767)) - 100% code coverage * ([#5186](https://github.com/pouchdb/pouchdb/issues/5186)) - build in Node 5, test in other Nodes diff --git a/docs/_posts/2018-01-23-pouchdb-6.4.2.md b/docs/_posts/2018-01-23-pouchdb-6.4.2.md index be58534067..f8942e991b 100644 --- a/docs/_posts/2018-01-23-pouchdb-6.4.2.md +++ b/docs/_posts/2018-01-23-pouchdb-6.4.2.md @@ -52,7 +52,7 @@ Currently it looks likely that we will remove our [Promise Polyfill](https://git ## Other Changes -- [#7019](https://github.com/pouchdb/pouchdb/issues/7019) - Dont crash on empty HTTP requests +- [#7019](https://github.com/pouchdb/pouchdb/issues/7019) - Don't crash on empty HTTP requests - [#7008](https://github.com/pouchdb/pouchdb/issues/7008) - Add Glitch bug helper - [#6954](https://github.com/pouchdb/pouchdb/issues/6954) - Fix hang when storage quota is exceeded - [#6958](https://github.com/pouchdb/pouchdb/issues/6958) - Fix PouchDB find with zero values diff --git a/docs/_posts/2022-11-11-pouchdb-7.3.1.md b/docs/_posts/2022-11-11-pouchdb-7.3.1.md index 3d63433f62..a0bbb87f23 100644 --- a/docs/_posts/2022-11-11-pouchdb-7.3.1.md +++ b/docs/_posts/2022-11-11-pouchdb-7.3.1.md @@ -12,9 +12,9 @@ We're happy to announce the release of PouchDB 7.3.1, bringing in some fixes and ### Bugfixes -* [7db104c5](https://github.com/pouchdb/pouchdb/commit/7db104c56248738598feb7ca1f22bc3b7bbd69be) ([#8463](https://github.com/pouchdb/pouchdb/issues/8463)) - check if docFieldValue is also null value. typeof null returns object in javasctipy. Fixes a bug when querying against and array with a null value, $elemMatch and $allMatch. +* [7db104c5](https://github.com/pouchdb/pouchdb/commit/7db104c56248738598feb7ca1f22bc3b7bbd69be) ([#8463](https://github.com/pouchdb/pouchdb/issues/8463)) - check if docFieldValue is also null value. typeof null returns object in javascript. Fixes a bug when querying against and array with a null value, $elemMatch and $allMatch. * [719502dc](https://github.com/pouchdb/pouchdb/commit/719502dc6a75851dce9d0a35e44929b25943588c) ([#8225](https://github.com/pouchdb/pouchdb/issues/8225)) - Added check for blob.type setter -* [d5bb7bd6](https://github.com/pouchdb/pouchdb/commit/d5bb7bd6330339448f5918592bd69df78166bad2) ([#8389](https://github.com/pouchdb/pouchdb/pull/8389)) - `_id` was mappped to `data._id` instead of id +* [d5bb7bd6](https://github.com/pouchdb/pouchdb/commit/d5bb7bd6330339448f5918592bd69df78166bad2) ([#8389](https://github.com/pouchdb/pouchdb/pull/8389)) - `_id` was mapped to `data._id` instead of id * [bbeb7e55](https://github.com/pouchdb/pouchdb/commit/bbeb7e557658fb405b5cf91dec50a79faf5278a3) ([#8531](https://github.com/pouchdb/pouchdb/pull/8531)) - fix sorted find including deleted docs ### Testing diff --git a/docs/_posts/2022-12-14-pouchdb-8.0.0.md b/docs/_posts/2022-12-14-pouchdb-8.0.0.md index 87ed000058..c64a6212b2 100644 --- a/docs/_posts/2022-12-14-pouchdb-8.0.0.md +++ b/docs/_posts/2022-12-14-pouchdb-8.0.0.md @@ -96,7 +96,7 @@ Example result: * [774976a0](https://github.com/pouchdb/pouchdb/commit/774976a02781c3953727244706adfb0d5cce420a) feat: on-update view purging * [94ec8932](https://github.com/pouchdb/pouchdb/commit/94ec8932fd79a2993c82a5381ae34d64c286375f) feat(core): `purged_infos_limit` support * [ce8f8b30](https://github.com/pouchdb/pouchdb/commit/ce8f8b308f4509def96272693f797deac05b55e0) [wip] Purge leaves from multiple roots -* [4252a0f0](https://github.com/pouchdb/pouchdb/commit/4252a0f0ba71ab63f493709fe81e5e63baeb5dee) docs(purge): show result object seperately +* [4252a0f0](https://github.com/pouchdb/pouchdb/commit/4252a0f0ba71ab63f493709fe81e5e63baeb5dee) docs(purge): show result object separately * [b856173b](https://github.com/pouchdb/pouchdb/commit/b856173b436c201d86c0c63319eaf0dc8396a772) chore(purge): add code comments * [d5f4250a](https://github.com/pouchdb/pouchdb/commit/d5f4250a3539b62c430203441bcb02fa41e00ae2) chore(purge): remove unneeded logs and comments * [2c0ddbb0](https://github.com/pouchdb/pouchdb/commit/2c0ddbb01e9d01ba2b4ccc754ff855a15173208c) feat(purge): simplify implementation of removeLeafFromTree() diff --git a/docs/external.md b/docs/external.md index 74537514e1..35cad124bd 100644 --- a/docs/external.md +++ b/docs/external.md @@ -215,7 +215,7 @@ AngularJS binding for PouchDB. #### [ampersand-collection-pouchdb-mixin](https://github.com/svnlto/ampersand-collection-pouchdb-mixin) -A mixin for extending ampersand-collection with pouchdb persistance. +A mixin for extending ampersand-collection with pouchdb persistence. ### Backbone diff --git a/docs/getting-started.md b/docs/getting-started.md index 6898366c50..d7d7d7bf3b 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -53,7 +53,7 @@ PouchDB is now installed in your app and ready to use! (In production, you shoul The rest of the work will be done inside `app.js`. We will start by creating a database to enter your todos. To create a database simply instantiate a new PouchDB object with the name of the database: {% highlight js %} -// EDITING STARTS HERE (you dont need to edit anything above this line) +// EDITING STARTS HERE (you don't need to edit anything above this line) var db = new PouchDB('todos'); var remoteCouch = false; @@ -100,7 +100,7 @@ Once you have included this code, you should be able to refresh the page to see {% include anchor.html class="h3" title="Update the UI" hash="update_the_ui" %} -We dont want to refresh the page to see new items. More typically you would update the UI manually when you write data to it, however, in PouchDB you may be syncing data remotely, so you want to make sure you update whenever the remote data changes. To do this we will call `db.changes` which subscribes to updates to the database, wherever they come from. You can enter this code between the `remoteCouch` and `addTodo` declaration: +We don't want to refresh the page to see new items. More typically you would update the UI manually when you write data to it, however, in PouchDB you may be syncing data remotely, so you want to make sure you update whenever the remote data changes. To do this we will call `db.changes` which subscribes to updates to the database, wherever they come from. You can enter this code between the `remoteCouch` and `addTodo` declaration: {% highlight js %} var remoteCouch = false; @@ -127,7 +127,7 @@ function checkboxChanged(todo, event) { } {% endhighlight %} -This is similar to creating a document, however the document must also contain a `_rev` field (in addition to `_id`), otherwise the write will be rejected. This ensures that you dont accidently overwrite changes to a document. +This is similar to creating a document, however the document must also contain a `_rev` field (in addition to `_id`), otherwise the write will be rejected. This ensures that you don't accidentally overwrite changes to a document. You can test that this works by checking a todo item and refreshing the page. It should stay checked. @@ -141,7 +141,7 @@ function deleteButtonPressed(todo) { } {% endhighlight %} -Similiar to editing a document, both the `_id` and `_rev` properties are required. You may notice that we are passing around the full object that we previously read from the database. You can of course manually construct the object, like: `{_id: todo._id, _rev: todo._rev}`, but passing around the existing object is usually more convenient and less error prone. +Similar to editing a document, both the `_id` and `_rev` properties are required. You may notice that we are passing around the full object that we previously read from the database. You can of course manually construct the object, like: `{_id: todo._id, _rev: todo._rev}`, but passing around the existing object is usually more convenient and less error prone. {% include anchor.html class="h3" title="Complete rest of the Todo UI" hash="complete_todo_ui" %} @@ -189,7 +189,7 @@ You can check that CORS is now enabled by visiting [http://localhost:5984/_utils Now we will have the todo list sync. Back in `app.js` we need to specify the address of the remote database. Remember to replace `user`, `pass` and `myname.example.com` with the credentials of your own CouchDB instance: {% highlight js %} -// EDITING STARTS HERE (you dont need to edit anything above this line) +// EDITING STARTS HERE (you don't need to edit anything above this line) var db = new PouchDB('todos'); var remoteCouch = 'http://user:pass@myname.example.com/todos'; diff --git a/docs/gql.md b/docs/gql.md index d6d3b1d47e..19fdd346d4 100644 --- a/docs/gql.md +++ b/docs/gql.md @@ -169,7 +169,7 @@ The above query will return ## Functions -GQL contains a number of operators and functions that can operate on retrived documents. +GQL contains a number of operators and functions that can operate on retrieved documents. ### Aggregation Functions @@ -203,7 +203,7 @@ identifier as their input. Scalar functions may only appear in the select and l With these documents in the database - {name!: "pencil", price: 2, discount: 0.7, vender: "store1"}, + {name!: "pencil", price: 2, discount: 0.7, vendor: "store1"}, {name!: "pen", price:3, discount: 2, vendor: "store2"} The above query will return diff --git a/packages/node_modules/pouchdb-adapter-idb/src/index.js b/packages/node_modules/pouchdb-adapter-idb/src/index.js index ea6d13acdc..586ca50f03 100644 --- a/packages/node_modules/pouchdb-adapter-idb/src/index.js +++ b/packages/node_modules/pouchdb-adapter-idb/src/index.js @@ -15,7 +15,7 @@ import { import idbBulkDocs from './bulkDocs'; import idbAllDocs from './allDocs'; -import checkBlobSupport from './blobSupport'; +import { checkBlobSupport } from 'pouchdb-adapter-utils'; import countDocs from './countDocs'; import { MISSING_DOC, @@ -783,7 +783,7 @@ function init(api, opts, callback) { // if (!blobSupportPromise) { // make sure blob support is only checked once - blobSupportPromise = checkBlobSupport(txn); + blobSupportPromise = checkBlobSupport(txn, DETECT_BLOB_SUPPORT_STORE, 'key'); } blobSupportPromise.then(function (val) { diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/README.md b/packages/node_modules/pouchdb-adapter-indexeddb/README.md index 96cca95847..1c77694091 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/README.md +++ b/packages/node_modules/pouchdb-adapter-indexeddb/README.md @@ -53,8 +53,8 @@ This is highly unlikely to be fixed in the future by PouchDB, as it would requir ### Mango Queries with `partial_filter_selector` -Mango indexes with a `partial_filter_selector` are using map-reduce views. Simually to the `idb`-adabter. All performance gains of native IndexedDB indexes will be lost. +Mango indexes with a `partial_filter_selector` are using map-reduce views. All performance gains of native IndexedDB indexes will be lost. -Also ordering is CouchDB combatible. +Also ordering is CouchDB compatible. This fallback is required because IndexedDB indexes only work on all db entries. And not on a subsection. diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js index 7efd629686..26d95d79ca 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/allDocs.js @@ -77,7 +77,7 @@ export default function (txn, metadata, opts, callback) { return callback(txn.error); } - // TODO: Weird hack, I dont like it + // TODO: Weird hack, I don't like it if (opts.limit === 0) { var returnVal = { total_rows: metadata.doc_count, @@ -134,7 +134,8 @@ export default function (txn, metadata, opts, callback) { } if (opts.attachments && docData._attachments) { for (var name in docData._attachments) { - processing.push(processAttachment(name, doc, row.doc, opts.binary)); + processing.push(processAttachment(name, doc, row.doc, opts.binary, + metadata.idb_attachment_format)); } } } diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js index 81ece5fcc8..3cfd8057c9 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/bulkDocs.js @@ -10,7 +10,9 @@ import { } from 'pouchdb-errors'; import { - binaryStringToBlobOrBuffer as binStringToBlobOrBuffer + atob, + binaryStringToBlobOrBuffer as binStringToBlobOrBuffer, + blobOrBufferToBase64 as blufferToBase64, } from 'pouchdb-binary-utils'; import { parseDoc } from 'pouchdb-adapter-utils'; @@ -322,9 +324,24 @@ export default function (api, req, opts, metadata, dbOpts, idbChanges, callback) } catch (e) { return Promise.reject(createError(BAD_ARG, 'Attachment is not a valid base64 string')); } - attachment.data = binStringToBlobOrBuffer(binData, attachment.content_type); + if (metadata.idb_attachment_format === 'binary') { + attachment.data = binStringToBlobOrBuffer(binData, attachment.content_type); + } } else { binData = attachment.data; + if (metadata.idb_attachment_format === 'base64') { + // TODO could run these in parallel, if we cared + return new Promise(resolve => { + blufferToBase64(attachment.data, function (b64) { + attachment.data = b64; + md5(binData, function (result) { + attachment.digest = 'md5-' + result; + attachment.length = binData.size || binData.length || 0; + resolve(attachment); + }); + }); + }); + } } return new Promise(function (resolve) { diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/changes.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/changes.js index 523b36ee29..db6d9a87c5 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/changes.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/changes.js @@ -72,7 +72,7 @@ export default function (txn, idbChanges, api, dbOpts, opts) { if (opts.include_docs && opts.attachments && doc.data._attachments) { var promises = []; for (var name in doc.data._attachments) { - var p = processAttachment(name, doc, change.doc, opts.binary); + var p = processAttachment(name, doc, change.doc, opts.binary, api.blobSupport); // We add the processing promise to 2 arrays, one tracks all // the promises needed before we fire onChange, the other // ensure we process all attachments before onComplete diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js index 6c31942d88..b3d07789e1 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/getAttachment.js @@ -1,11 +1,24 @@ 'use strict'; -import { btoa, readAsBinaryString } from 'pouchdb-binary-utils'; +import { + base64StringToBlobOrBuffer as b64StringToBluffer, + btoa, + readAsBinaryString, +} from 'pouchdb-binary-utils'; export default function getAttachment(docId, attachId, attachment, opts, cb) { const doc = opts.metadata; const data = doc.attachments[attachment.digest].data; + if (typeof data === 'string') { + if (opts.binary) { + cb(null, b64StringToBluffer(data, attachment.content_type)); + } else { + cb(null, data); + } + return; + } + if (opts.binary) { return cb(null, data); } else { diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js index 103f2a5c37..c7cfad4b09 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/setup.js @@ -2,6 +2,8 @@ import { uuid } from 'pouchdb-utils'; +import { checkBlobSupport } from 'pouchdb-adapter-utils'; + import { META_STORE, DOC_STORE, rawIndexFields, naturalIndexName, correctIndexFields } from './util'; // @@ -193,7 +195,21 @@ function openDatabase(openDatabases, api, opts, resolve, reject) { metadata.db_uuid = uuid(); } - if (changed) { + if (!('idb_attachment_format' in metadata)) { + // There will be trouble if any browser _stops_ supporting blobs. + + const createBlobDoc = blob => ({ id:'blob-support', blob }); + + checkBlobSupport(txn, META_STORE, createBlobDoc).then(blobSupport => { + // Unfortunate that we have to track this in both the metadata and on + // api, but sometimes we have access to one, sometimes the other (and + // sometimes both). We could change function signatures in index.js + // to make this consistent. + api.blobSupport = metadata.idb_attachment_format = blobSupport ? 'binary' : 'base64'; + metaStore.put(metadata); + }); + } else if (changed) { + api.blobSupport = metadata.idb_attachment_format; metaStore.put(metadata); } }; diff --git a/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js b/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js index 91e923297b..213743dd26 100644 --- a/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js +++ b/packages/node_modules/pouchdb-adapter-indexeddb/src/util.js @@ -1,7 +1,11 @@ 'use strict'; import { createError, IDB_ERROR } from 'pouchdb-errors'; -import { btoa, readAsBinaryString } from 'pouchdb-binary-utils'; +import { + base64StringToBlobOrBuffer as b64StringToBluffer, + btoa, + readAsBinaryString, +} from 'pouchdb-binary-utils'; import { sanitise } from './rewrite'; var DOC_STORE = 'docs'; @@ -17,10 +21,22 @@ function idbError(callback) { }; } -function processAttachment(name, src, doc, isBinary) { +function processAttachment(name, src, doc, isBinary, attachmentFormat) { delete doc._attachments[name].stub; + if (attachmentFormat === 'base64') { + if (isBinary) { + const att = src.attachments[doc._attachments[name].digest]; + doc._attachments[name].data = b64StringToBluffer(att.data, att.content_type); + } else { + doc._attachments[name].data = + src.attachments[doc._attachments[name].digest].data; + } + delete doc._attachments[name].length; + return Promise.resolve(); + } + if (isBinary) { doc._attachments[name].data = src.attachments[doc._attachments[name].digest].data; diff --git a/packages/node_modules/pouchdb-adapter-idb/src/blobSupport.js b/packages/node_modules/pouchdb-adapter-utils/src/checkBlobSupport.js similarity index 66% rename from packages/node_modules/pouchdb-adapter-idb/src/blobSupport.js rename to packages/node_modules/pouchdb-adapter-utils/src/checkBlobSupport.js index a9f3c3e213..233fd2877b 100644 --- a/packages/node_modules/pouchdb-adapter-idb/src/blobSupport.js +++ b/packages/node_modules/pouchdb-adapter-utils/src/checkBlobSupport.js @@ -1,9 +1,12 @@ import { blob as createBlob } from 'pouchdb-binary-utils'; -import { DETECT_BLOB_SUPPORT_STORE } from './constants'; // // Blobs are not supported in all versions of IndexedDB, notably -// Chrome <37 and Android <5. In those versions, storing a blob will throw. +// Chrome <37, Android <5 and (some?) webkit-based browsers. +// In those versions, storing a blob will throw. +// +// Example Webkit error: +// > DataCloneError: Failed to store record in an IDBObjectStore: BlobURLs are not yet supported. // // Various other blob bugs exist in Chrome v37-42 (inclusive). // Detecting them is expensive and confusing to users, and Chrome 37-42 @@ -13,10 +16,21 @@ import { DETECT_BLOB_SUPPORT_STORE } from './constants'; // 404 bug: https://code.google.com/p/chromium/issues/detail?id=447916 // FileReader bug: https://code.google.com/p/chromium/issues/detail?id=447836 // -function checkBlobSupport(txn) { +function checkBlobSupport(txn, store, docIdOrCreateDoc) { return new Promise(function (resolve) { var blob = createBlob(['']); - var req = txn.objectStore(DETECT_BLOB_SUPPORT_STORE).put(blob, 'key'); + + let req; + if (typeof docIdOrCreateDoc === 'function') { + // Store may require a specific key path, in which case we can't store the + // blob directly in the store. + const createDoc = docIdOrCreateDoc; + const doc = createDoc(blob); + req = txn.objectStore(store).put(doc); + } else { + const docId = docIdOrCreateDoc; + req = txn.objectStore(store).put(blob, docId); + } req.onsuccess = function () { var matchedChrome = navigator.userAgent.match(/Chrome\/(\d+)/); diff --git a/packages/node_modules/pouchdb-adapter-utils/src/index.js b/packages/node_modules/pouchdb-adapter-utils/src/index.js index 64ae6212f1..97deba41db 100644 --- a/packages/node_modules/pouchdb-adapter-utils/src/index.js +++ b/packages/node_modules/pouchdb-adapter-utils/src/index.js @@ -1,4 +1,5 @@ import allDocsKeysQuery from './allDocsKeysQuery'; +import checkBlobSupport from './checkBlobSupport'; import parseDoc from './parseDoc'; import { invalidIdError, @@ -12,6 +13,7 @@ import updateDoc from './updateDoc'; export { allDocsKeysQuery, + checkBlobSupport, invalidIdError, isDeleted, isLocalId, diff --git a/packages/node_modules/pouchdb-core/src/adapter.js b/packages/node_modules/pouchdb-core/src/adapter.js index 87c19dd5c7..72bf580e7f 100644 --- a/packages/node_modules/pouchdb-core/src/adapter.js +++ b/packages/node_modules/pouchdb-core/src/adapter.js @@ -674,7 +674,7 @@ class AbstractPouchDB extends EventEmitter { }); }).bind(this); - // TODO: I dont like this, it forces an extra read for every + // TODO: I don't like this, it forces an extra read for every // attachment read and enforces a confusing api between // adapter.js and the adapter implementation this.getAttachment = adapterFun('getAttachment', function (docId, attachmentId, opts, callback) { diff --git a/packages/node_modules/pouchdb-merge/src/merge.js b/packages/node_modules/pouchdb-merge/src/merge.js index bceda239f2..e9e3c6e7b4 100644 --- a/packages/node_modules/pouchdb-merge/src/merge.js +++ b/packages/node_modules/pouchdb-merge/src/merge.js @@ -124,7 +124,7 @@ function doMerge(tree, path, dontExpand) { // The paths start at a different position, take the earliest path and // traverse up until it as at the same point from root as the path we // want to merge. If the keys match we return the longer path with the - // other merged After stemming we dont want to expand the trees + // other merged After stemming we don't want to expand the trees var t1 = branch.pos < path.pos ? branch : path; var t2 = branch.pos < path.pos ? path : branch; @@ -182,7 +182,7 @@ function doMerge(tree, path, dontExpand) { }; } -// To ensure we dont grow the revision tree infinitely, we stem old revisions +// To ensure we don't grow the revision tree infinitely, we stem old revisions function stem(tree, depth) { // First we break out the tree into a complete list of root to leaf paths var paths = rootToLeaf(tree); @@ -217,7 +217,7 @@ function stem(tree, depth) { }; } - // Then we remerge all those flat trees together, ensuring that we dont + // Then we remerge all those flat trees together, ensuring that we don't // connect trees that would go beyond the depth limit if (result) { result = doMerge(result, node, true).tree; diff --git a/packages/node_modules/pouchdb-utils/src/functionName.js b/packages/node_modules/pouchdb-utils/src/functionName.js index f03213e245..f457452828 100644 --- a/packages/node_modules/pouchdb-utils/src/functionName.js +++ b/packages/node_modules/pouchdb-utils/src/functionName.js @@ -7,7 +7,7 @@ function f() {} var hasName = f.name; var res; -// We dont run coverage in IE +// We don't run coverage in IE /* istanbul ignore else */ if (hasName) { res = function (fun) { diff --git a/tests/integration/utils.js b/tests/integration/utils.js index 0ed9487cb8..a2e84f5878 100644 --- a/tests/integration/utils.js +++ b/tests/integration/utils.js @@ -81,7 +81,7 @@ testUtils.base64Blob = function (blob, callback) { testUtils.adapterUrl = function (adapter, name) { // CouchDB master has problems with cycling databases rapidly - // so give tests seperate names + // so give tests separate names name += '_' + Date.now(); if (adapter === 'http') { diff --git a/tests/integration/webrunner.js b/tests/integration/webrunner.js index d86c57671b..4720b5e4b2 100644 --- a/tests/integration/webrunner.js +++ b/tests/integration/webrunner.js @@ -8,72 +8,31 @@ window.removeEventListener("load", startTests); if (remote) { - // Capture logs for test runner output - var logs = []; - - (function () { - - function serializeLogItem(obj, filter, space) { - if (typeof obj === 'string') { - return obj; - } else if (obj instanceof Error) { - return obj.stack; - } else { - return JSON.stringify(obj, filter, space); - } - } - - function wrappedLog(oldLog, type) { - return function () { - var args = Array.prototype.slice.call(arguments); - logs.push({ - type, - content: args.map(function (arg) { - return serializeLogItem(arg); - }).join(' ') - }); - oldLog.apply(console, arguments); - }; - } - - console.log = wrappedLog(console.log, 'log'); - console.error = wrappedLog(console.error, 'error'); - - })(); - - // Capture test events for test runner output - var testEventsBuffer = []; - - window.testEvents = function () { - var events = testEventsBuffer; - testEventsBuffer = []; - return events; - }; - mocha.reporter(function (runner) { var eventNames = ['start', 'end', 'suite', 'suite end', 'pass', 'pending', 'fail']; eventNames.forEach(function (name) { runner.on(name, function (obj, err) { - testEventsBuffer.push({ - name, - obj: obj && { - root: obj.root, - title: obj.title, - duration: obj.duration, - slow: typeof obj.slow === 'function' ? obj.slow() : undefined, - fullTitle: typeof obj.fullTitle === 'function' ? obj.fullTitle() : undefined - }, - err: err && { - actual: err.actual, - expected: err.expected, - showDiff: err.showDiff, - message: err.message, - stack: err.stack, - uncaught: err.uncaught + window.postMessage({ + type: 'mocha', + details: { + name, + obj: obj && { + root: obj.root, + title: obj.title, + duration: obj.duration, + slow: typeof obj.slow === 'function' ? obj.slow() : undefined, + fullTitle: typeof obj.fullTitle === 'function' ? obj.fullTitle() : undefined + }, + err: err && { + actual: err.actual, + expected: err.expected, + showDiff: err.showDiff, + message: err.message, + stack: err.stack, + uncaught: err.uncaught + }, }, - logs }); - logs = []; }); }); }); diff --git a/tests/performance/perf.reporter.js b/tests/performance/perf.reporter.js index 3f3c6aee91..365efb99e4 100644 --- a/tests/performance/perf.reporter.js +++ b/tests/performance/perf.reporter.js @@ -9,14 +9,12 @@ var results = { tests: {} }; -// Capture test events for test runner output -var testEventsBuffer = []; - -global.testEvents = function () { - var events = testEventsBuffer; - testEventsBuffer = []; - return events; -}; +function emitMochaEvent(details) { + // NodeJS perf testing just reports with console.log(). + if (typeof window !== 'undefined') { + window.postMessage({ type: 'mocha', details }); + } +} // fix for Firefox max timing entries capped to 150: // https://bugzilla.mozilla.org/show_bug.cgi?id=1331135 @@ -38,11 +36,11 @@ exports.log = log; exports.startSuite = function (suiteName) { log('Starting suite: ' + suiteName + '\n\n'); - testEventsBuffer.push({ name: 'suite', obj: { title: suiteName } }); + emitMochaEvent({ name: 'suite', obj: { title: suiteName } }); }; exports.endSuite = function (suiteName) { - testEventsBuffer.push({ name: 'suite end', obj: { title: suiteName } }); + emitMochaEvent({ name: 'suite end', obj: { title: suiteName } }); }; exports.start = function (testCase, iter) { @@ -61,8 +59,8 @@ exports.end = function (testCase) { obj.numIterations = obj.iterations.length; delete obj.iterations; // keep it simple when reporting log('median: ' + obj.median + ' ms\n'); - testEventsBuffer.push({ name: 'pass', obj: { title: testCase.name } }); - testEventsBuffer.push({ name: 'benchmark:result', obj }); + emitMochaEvent({ name: 'pass', obj: { title: testCase.name } }); + emitMochaEvent({ name: 'benchmark:result', obj }); }; exports.startIteration = function (testCase) { @@ -75,7 +73,7 @@ exports.endIteration = function (testCase) { }; exports.startAll = function () { - testEventsBuffer.push({ name: 'start' }); + emitMochaEvent({ name: 'start' }); }; exports.complete = function (adapter) { @@ -97,6 +95,6 @@ exports.complete = function (adapter) { results.adapter = adapter; console.log('=>', JSON.stringify(results, null, ' '), '<='); log('\nTests Complete!\n\n'); - testEventsBuffer.push({ name: 'end', obj: results }); + emitMochaEvent({ name: 'end', obj: results }); };