Skip to content

Commit

Permalink
Merge branch 'master' into shellcheck
Browse files Browse the repository at this point in the history
  • Loading branch information
alxndrsn authored Nov 22, 2023
2 parents 4da06b7 + 15cd6a0 commit 2c2ed9d
Show file tree
Hide file tree
Showing 49 changed files with 226 additions and 212 deletions.
2 changes: 1 addition & 1 deletion bin/build-site.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ function buildCSS() {
}

function buildJekyll(path) {
// Dont rebuild on website artifacts being written
// Don't rebuild on website artifacts being written
if (path && /^_site/.test(path.relative)) {
return;
}
Expand Down
120 changes: 49 additions & 71 deletions bin/test-browser.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@ testUrl += '?';
testUrl += new URLSearchParams(pickBy(qs, identity));

class RemoteRunner {
constructor() {
constructor(browser) {
this.browser = browser;
this.handlers = {};
this.completed = false;
this.failed = false;
this.handleEvent = this.handleEvent.bind(this);
}

on(name, handler) {
Expand All @@ -72,46 +72,47 @@ class RemoteRunner {
handlers[name].push(handler);
}

handleEvents(events) {
var handlers = this.handlers;

events.forEach((event) => {
this.completed = this.completed || event.name === 'end';
this.failed = this.failed || event.name === 'fail';

async handleEvent(event) {
try {
var additionalProps = ['pass', 'fail', 'pending'].indexOf(event.name) === -1 ? {} : {
slow: event.obj.slow ? function () { return event.obj.slow; } : function () { return 60; },
fullTitle: event.obj.fullTitle ? function () { return event.obj.fullTitle; } : undefined
};
var obj = Object.assign({}, event.obj, additionalProps);

handlers[event.name].forEach(function (handler) {
this.handlers[event.name].forEach(function (handler) {
handler(obj, event.err);
});

if (event.logs && event.logs.length > 0) {
event.logs.forEach(function (line) {
if (line.type === 'log') {
console.log(line.content);
} else if (line.type === 'error') {
console.error(line.content);
} else {
console.error('Invalid log line', line);
}
});
console.log();
switch (event.name) {
case 'fail': this.handleFailed(); break;
case 'end': this.handleEnd(); break;
}
});
}
} catch (e) {
console.error('Tests failed:', e);

bail() {
var handlers = this.handlers;
await this.browser.close();
process.exit(3);
}
}

handlers['end'].forEach(function (handler) {
handler();
});
async handleEnd(failed) {
await this.browser.close();
process.exit(!process.env.PERF && failed ? 1 : 0);
}

this.completed = true;
handleFailed() {
if (bail) {
try {
this.handlers['end'].forEach(function (handler) {
handler();
});
} catch (e) {
console.log('An error occurred while bailing:', e);
} finally {
this.handleEnd(true);
}
}
}
}

Expand Down Expand Up @@ -142,7 +143,13 @@ async function startTest() {

console.log('Starting', browserName, 'on', testUrl);

const runner = new RemoteRunner();
const options = {
headless: true,
};
const browser = await playwright[browserName].launch(options);
const page = await browser.newPage();

const runner = new RemoteRunner(browser);
new MochaSpecReporter(runner);
new BenchmarkConsoleReporter(runner);

Expand All @@ -154,11 +161,14 @@ async function startTest() {
new BenchmarkJsonReporter(runner);
}

const options = {
headless: true,
};
const browser = await playwright[browserName].launch(options);
const page = await browser.newPage();
page.exposeFunction('handleMochaEvent', runner.handleEvent);
page.addInitScript(() => {
window.addEventListener('message', (e) => {
if (e.data.type === 'mocha') {
window.handleMochaEvent(e.data.details);
}
});
});

page.on('pageerror', err => {
if (browserName === 'webkit' && err.toString()
Expand All @@ -174,46 +184,14 @@ async function startTest() {
process.exit(1);
});

if (process.env.BROWSER_CONSOLE) {
page.on('console', message => {
const { url, lineNumber } = message.location();
console.log('BROWSER', message.type().toUpperCase(), `${url}:${lineNumber}`, message.text());
});
}
page.on('console', message => {
console.log(message.text());
});

await page.goto(testUrl);

const userAgent = await page.evaluate('navigator.userAgent');
console.log('Testing on:', userAgent);

const interval = setInterval(async () => {
try {
const events = await page.evaluate('window.testEvents()');
runner.handleEvents(events);

if (runner.completed || (runner.failed && bail)) {
if (!runner.completed && runner.failed) {
try {
runner.bail();
} catch (e) {
// Temporary debugging of bailing failure
console.log('An error occurred while bailing:');
console.log(e);
}
}

clearInterval(interval);
await browser.close();
process.exit(!process.env.PERF && runner.failed ? 1 : 0);
}
} catch (e) {
console.error('Tests failed:', e);

clearInterval(interval);
await browser.close();
process.exit(3);
}
}, 1000);
}

devserver.start(function () {
Expand Down
2 changes: 1 addition & 1 deletion docs/_includes/api/replication.html
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
* `options.heartbeat`: Configure the heartbeat supported by CouchDB which keeps the change connection alive.
* `options.timeout`: Request timeout (in milliseconds).
* `options.batch_size`: Number of change feed items to process at a time. Defaults to 100. This affects the number of docs and attachments held in memory and the number sent at a time to the target server. You may need to adjust downward if targeting devices with low amounts of memory (e.g. phones) or if the documents and/or attachments are large in size or if there are many conflicted revisions. If your documents are small in size, then increasing this number will probably speed replication up.
* `options.batches_limit`: Number of batches to process at a time. Defaults to 10. This (along wtih `batch_size`) controls how many docs are kept in memory at a time, so the maximum docs in memory at once would equal `batch_size` × `batches_limit`.
* `options.batches_limit`: Number of batches to process at a time. Defaults to 10. This (along with `batch_size`) controls how many docs are kept in memory at a time, so the maximum docs in memory at once would equal `batch_size` × `batches_limit`.
* `options.back_off_function`: backoff function to be used in `retry` replication. This is a function that takes the current backoff as input (or 0 the first time) and returns a new backoff in milliseconds. You can use this to tweak when and how replication will try to reconnect to a remote database when the user goes offline. Defaults to a function that chooses a random backoff between 0 and 2 seconds and doubles every time it fails to connect. The default delay will never exceed 10 minutes. (See [Customizing retry replication](#customizing-retry-replication) below.)
* `options.checkpoint`: Can be used if you want to disable checkpoints on the source, target, or both. Setting this option to `false` will prevent writing checkpoints on both source and target. Setting it to `source` will only write checkpoints on the source. Setting it to `target` will only write checkpoints on the target.
* `options.style`: Specifies whether all revisions of a document including conflicts and deleted former conflicts (`all_docs`) or only the winning revision (`main_only`) should be replicated. This option is passed to the `changes` endpoint of the replication source. Defaults to `all_docs`.
Expand Down
2 changes: 1 addition & 1 deletion docs/_posts/2014-04-01-pouchdb-2.1.0.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,6 @@ This release contains a database migration, which means that once you upgrade yo
* Improve error reporting from replicator ([#1714](http://github.com/daleharvey/pouchdb/issues/1714))
* Switch to readAsArrayBuffer for IE binary support ([#1667](http://github.com/daleharvey/pouchdb/issues/1667))
* Add `npm run shell` for quick shell experimenting ([#1610](http://github.com/daleharvey/pouchdb/issues/1610))
* Ensure we dont lose data when browsers upgrade from WebSQL to IDB ([#1289](http://github.com/daleharvey/pouchdb/issues/1289))
* Ensure we don't lose data when browsers upgrade from WebSQL to IDB ([#1289](http://github.com/daleharvey/pouchdb/issues/1289))

###### [Complete list of changes](https://github.com/daleharvey/pouchdb/compare/2.0.1...2.1.0)
4 changes: 2 additions & 2 deletions docs/_posts/2014-09-07-pouchdb-3.0.5.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ We literally wrote [a custom JSON parser](https://github.com/nolanlawson/vuvuzel

The ["infinite recursion" bug](https://github.com/pouchdb/pouchdb/issues/2543) shows up when you have many (> 1000) revisions to the same document. For instance, imagine a text editor that `put()`s a new version of the document for every keystroke. This is fine in traditional databases, but it can be a big problem in PouchDB/CouchDB, because we store the entire history of the database.

Make no mistake: inifinitely-large revision histories will now work in PouchDB 3.0.5. However, it's a lot like riding a bicycle without a helmet: you _can_ do it, but you probably shouldn't.
Make no mistake: infinitely-large revision histories will now work in PouchDB 3.0.5. However, it's a lot like riding a bicycle without a helmet: you _can_ do it, but you probably shouldn't.

Your mental model for this can be something like Git. Imagine a new `git commit` for every keystroke, and you'll see why it might be a bit problematic.

Expand All @@ -45,4 +45,4 @@ Your mental model for this can be something like Git. Imagine a new `git commit`

Of course, you can always `compact()` to remove old versions, but this just removes the document data. PouchDB still needs to maintain a permanent tree of the `_rev` hashes in order for the replication algorithm to work properly. If this tree grows excessively, it can bloat your database, whether or not you do compaction.

What this means is that you should be very careful about updating your documents. Try to update your documents in larger batches, or use a pattern like [delta-pouch](https://github.com/redgeoff/delta-pouch) to avoid creating new revisions altogether.
What this means is that you should be very careful about updating your documents. Try to update your documents in larger batches, or use a pattern like [delta-pouch](https://github.com/redgeoff/delta-pouch) to avoid creating new revisions altogether.
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ User agent sniffing! Yes, we should be ashamed of ourselves. But here's why we d
* Additionally, if you specify anywhere between 0 and 5000000, Safari and iOS will use that size as a hint for when, precisely, to show the popup. And in the case of PouchDB, we need to avoid the popup in our automated tests, because Selenium doesn't give us a way to press the "OK" button, meaning our tests would just fail if we request too much. So the ideal size to request is 0.
* However, in **PhantomJS** and older WebKit (Safari ~5), if you request 0, then it will blow up.

For the recored, here's what the dreaded Safari popup looks like:
For the record, here's what the dreaded Safari popup looks like:

{% include img.html src="safari_popup.png" alt="annoying Safari popup" %}

Expand Down
2 changes: 1 addition & 1 deletion docs/_posts/2014-11-10-3.1.0.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ We keep a list of [plugins and related external projects]({{ site.baseurl }}/ext

* PouchDB [Dump](https://github.com/nolanlawson/pouchdb-dump-cli) and [Load](https://github.com/nolanlawson/pouchdb-load) are designed to help you load data into your newly started PouchDB app faster than replication will do currently.
* If you want to use PouchDB with Angular, you are in luck, there are now 4! adapters to help you integrate them: [angular-pouchdb](https://github.com/wspringer/angular-pouchdb), [Factoryng](https://github.com/redgeoff/factoryng), [ngPouch](https://github.com/jrhicks/ngPouch), [ng-pouchdb](https://github.com/danielzen/ng-pouchdb).
* Ember users arent left in the wild: [ember-pouchdb](https://github.com/taras/ember-pouchdb) should get you going with PouchDB and Ember.
* Ember users aren't left in the wild: [ember-pouchdb](https://github.com/taras/ember-pouchdb) should get you going with PouchDB and Ember.
* If you are using Blobs with PouchDB, you probably want to take a look at [blob-util](https://github.com/nolanlawson/blob-util).

### Bug fixes Galore
Expand Down
6 changes: 3 additions & 3 deletions docs/_posts/2014-11-27-testing-pouchdb.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ If I had one complaint about Mocha, it would be I often find myself seeing `does

Our tests run on every check-in and PR on [Travis CI](travis-ci.org). Travis an amazing platform; it was easy to get up and running and has been incredibly stable. PouchDB would be nowhere near the state it is today without it.

However, we are beginning to outgrow Travis's free open-source capacity, and the paid options are prohibitively expensive. So we are beginning to look around for self-hosted alternatives, albiet without a lot of options.
However, we are beginning to outgrow Travis's free open-source capacity, and the paid options are prohibitively expensive. So we are beginning to look around for self-hosted alternatives, albeit without a lot of options.

[Travis is open-source](https://github.com/travis-ci/travis-ci), however it is not a well-setup project for self-hosting – there isn't so much as a README to get started with. We previously used [Jenkins](http://jenkins-ci.org/), however I found it a huge maintenance burden. It's very possible someone with more experience in Jenkins could do a better job, though, and I am also keeping an eye on [Strider](http://stridercd.com/) as a possible alternative.

Expand All @@ -45,9 +45,9 @@ We use [Selenium](http://www.seleniumhq.org/) to drive tests in the browser, and

We are also finding some platforms (particularly iPhone and Internet Explorer) will become unstable on Saucelabs. The errors almost always come from our code, but it is hard to maintain a reliable test suite when the underlying platform changes and you have little control over it.

Before settling on Selenium, I had previously tried out [Testling](https://ci.testling.com/), and similiarly to Tape it was very easy to get started but opinionated. It was also broken on OSX with pull requests containing fixes that hadn't been touched for months. Selenium had the advantage that it was very widely used, and new frameworks or platforms are likely to have WebDriver support early on.
Before settling on Selenium, I had previously tried out [Testling](https://ci.testling.com/), and similarly to Tape it was very easy to get started but opinionated. It was also broken on OSX with pull requests containing fixes that hadn't been touched for months. Selenium had the advantage that it was very widely used, and new frameworks or platforms are likely to have WebDriver support early on.

I have however found Selenium as a project fustrating to use, from the first time visiting [http://www.seleniumhq.org/](http://www.seleniumhq.org/) to understanding what I needed to download and write a first test that started a browser, it was an unclear and confusing process. Even today getting ChromeDriver started correctly gets me confused, also the download cost of 30MB for what is mostly a proxy server is an annoyance.
I have however found Selenium as a project frustrating to use, from the first time visiting [http://www.seleniumhq.org/](http://www.seleniumhq.org/) to understanding what I needed to download and write a first test that started a browser, it was an unclear and confusing process. Even today getting ChromeDriver started correctly gets me confused, also the download cost of 30MB for what is mostly a proxy server is an annoyance.

I would love to see a project wrap up Selenium / ChromeDriver and possibly Cordova / Appium into a nice, well-documented module that installs and boots your browser(ish) platform of choice, ready to be driven by Selenium tests.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ changes.on('change', function(change) {
db.put({_id: 'doc', foo: 'bar'});
```

This test looks fine however inside `changes.cancel()` we may be doing some processing that happens asynchronously, specifically we may be aborting a HTTP request that we haven't processed the reply of yet and as we process the reply the next test may have started. This type of issue is extremely problematic as it can lead to unexpected behavour in tests that are not the cause of the problem.
This test looks fine however inside `changes.cancel()` we may be doing some processing that happens asynchronously, specifically we may be aborting a HTTP request that we haven't processed the reply of yet and as we process the reply the next test may have started. This type of issue is extremely problematic as it can lead to unexpected behaviour in tests that are not the cause of the problem.

In PouchDB we fix this with:

Expand Down
2 changes: 1 addition & 1 deletion docs/_posts/2015-02-03-fix-up-look-sharp.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ Calvin did the base work for this a long time ago, however we now officially sup

### Docs property in change events - [#3358](https://github.com/pouchdb/pouchdb/pull/3358)

A lot of developers wanted to know what documents were being referred to when they recieved a change event. Gregor from hood.ie implemented a new `docs` property in the change event, so you know exactly what documents have changed.
A lot of developers wanted to know what documents were being referred to when they received a change event. Gregor from hood.ie implemented a new `docs` property in the change event, so you know exactly what documents have changed.

### local_seq now deprecated - [#3367](https://github.com/pouchdb/pouchdb/issues/3367)

Expand Down
2 changes: 1 addition & 1 deletion docs/_posts/2015-03-05-taming-the-async-beast-with-es7.md
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ So if you want to play with it yourself, I've put together a [small demo library

### Conclusion

Async functions are an empowering new concept in ES7. They give us back our lost `return`s and `try`/`catch`es, and they reward the knowledge we've already gained from writing synchronous code with new idiioms that look a lot like the old ones, but are much more performant.
Async functions are an empowering new concept in ES7. They give us back our lost `return`s and `try`/`catch`es, and they reward the knowledge we've already gained from writing synchronous code with new idioms that look a lot like the old ones, but are much more performant.

Most importantly, async functions make APIs like PouchDB's a lot easier to work with. So hopefully this will lead to fewer user errors and confusion, as well as more elegant and readable code.

Expand Down
2 changes: 1 addition & 1 deletion docs/_posts/2015-05-07-pouchdb-3.5.0-vote-for-pouchdb.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Do you support better documentation for PouchDB users? Do you value faster CORS
### Bugfixes

* Fallback to the [fetch API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) where XMLHTTPRequest does not exist (e.g. in Service Workers) ([#3678](https://github.com/pouchdb/pouchdb/pull/3678))
* Stop triggering unneccesary OPTIONS requests during CORS replication ([#3573](https://github.com/pouchdb/pouchdb/issues/3573))
* Stop triggering unnecessary OPTIONS requests during CORS replication ([#3573](https://github.com/pouchdb/pouchdb/issues/3573))
* Multitudes of documentation fixes
* Fix es3ify behavior with dependencies ([#3688](https://github.com/pouchdb/pouchdb/issues/3688))
* Add source maps to dev environment ([#3731](https://github.com/pouchdb/pouchdb/pull/3731))
Expand Down
2 changes: 1 addition & 1 deletion docs/_posts/2015-08-03-pouchdb-4.0.0-ballast-overboard.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ replication.on('paused', doneFun);
* Fix attachment length for stubs ([#3963](https://github.com/pouchdb/pouchdb/issues/3963))
* Reduce the number of checkpoint mismatches ([#4009](https://github.com/pouchdb/pouchdb/issues/4009))
* Fallback from checkpoint mismatch correctly (less replication start from the beginning) ([#3999](https://github.com/pouchdb/pouchdb/issues/3999))
* Dont fail replication when attachment request fails ([#4021](https://github.com/pouchdb/pouchdb/issues/4021))
* Don't fail replication when attachment request fails ([#4021](https://github.com/pouchdb/pouchdb/issues/4021))
* Fix check for `localStorage` in Safari private mode ([#4070](https://github.com/pouchdb/pouchdb/issues/4070))
### Coverage improvements
Expand Down
Loading

0 comments on commit 2c2ed9d

Please sign in to comment.