Skip to content

Commit

Permalink
adapter-idb: replace var with let/const
Browse files Browse the repository at this point in the history
  • Loading branch information
alxndrsn committed Apr 10, 2024
1 parent 08649d9 commit 0108c24
Show file tree
Hide file tree
Showing 10 changed files with 291 additions and 291 deletions.
66 changes: 33 additions & 33 deletions packages/node_modules/pouchdb-adapter-idb/src/allDocs.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ import getAll from './getAll';

function allDocsKeys(keys, docStore, onBatch) {
// It's not guaranteed to be returned in right order
var valuesBatch = new Array(keys.length);
var count = 0;
const valuesBatch = new Array(keys.length);
let count = 0;
keys.forEach(function (key, index) {
docStore.get(key).onsuccess = function (event) {
if (event.target.result) {
Expand Down Expand Up @@ -66,16 +66,16 @@ function createKeyRange(start, end, inclusiveEnd, key, descending) {
}

function idbAllDocs(opts, idb, callback) {
var start = 'startkey' in opts ? opts.startkey : false;
var end = 'endkey' in opts ? opts.endkey : false;
var key = 'key' in opts ? opts.key : false;
var keys = 'keys' in opts ? opts.keys : false;
var skip = opts.skip || 0;
var limit = typeof opts.limit === 'number' ? opts.limit : -1;
var inclusiveEnd = opts.inclusive_end !== false;

var keyRange ;
var keyRangeError;
const start = 'startkey' in opts ? opts.startkey : false;
const end = 'endkey' in opts ? opts.endkey : false;
const key = 'key' in opts ? opts.key : false;
const keys = 'keys' in opts ? opts.keys : false;
let skip = opts.skip || 0;
const limit = typeof opts.limit === 'number' ? opts.limit : -1;
const inclusiveEnd = opts.inclusive_end !== false;

let keyRange;
let keyRangeError;
if (!keys) {
keyRange = createKeyRange(start, end, inclusiveEnd, key, opts.descending);
keyRangeError = keyRange && keyRange.error;
Expand All @@ -88,25 +88,25 @@ function idbAllDocs(opts, idb, callback) {
}
}

var stores = [DOC_STORE, BY_SEQ_STORE, META_STORE];
const stores = [DOC_STORE, BY_SEQ_STORE, META_STORE];

if (opts.attachments) {
stores.push(ATTACH_STORE);
}
var txnResult = openTransactionSafely(idb, stores, 'readonly');
const txnResult = openTransactionSafely(idb, stores, 'readonly');
if (txnResult.error) {
return callback(txnResult.error);
}
var txn = txnResult.txn;
const txn = txnResult.txn;
txn.oncomplete = onTxnComplete;
txn.onabort = idbError(callback);
var docStore = txn.objectStore(DOC_STORE);
var seqStore = txn.objectStore(BY_SEQ_STORE);
var metaStore = txn.objectStore(META_STORE);
var docIdRevIndex = seqStore.index('_doc_id_rev');
var results = [];
var docCount;
var updateSeq;
const docStore = txn.objectStore(DOC_STORE);
const seqStore = txn.objectStore(BY_SEQ_STORE);
const metaStore = txn.objectStore(META_STORE);
const docIdRevIndex = seqStore.index('_doc_id_rev');
const results = [];
let docCount;
let updateSeq;

metaStore.get(META_STORE).onsuccess = function (e) {
docCount = e.target.result.docCount;
Expand All @@ -116,7 +116,7 @@ function idbAllDocs(opts, idb, callback) {
if (opts.update_seq) {
// get max updateSeq
seqStore.openKeyCursor(null, 'prev').onsuccess = e => {
var cursor = e.target.result;
const cursor = e.target.result;
if (cursor && cursor.key) {
updateSeq = cursor.key;
}
Expand All @@ -126,11 +126,11 @@ function idbAllDocs(opts, idb, callback) {
// if the user specifies include_docs=true, then we don't
// want to block the main cursor while we're fetching the doc
function fetchDocAsynchronously(metadata, row, winningRev) {
var key = metadata.id + "::" + winningRev;
const key = metadata.id + "::" + winningRev;
docIdRevIndex.get(key).onsuccess = function onGetDoc(e) {
row.doc = decodeDoc(e.target.result) || {};
if (opts.conflicts) {
var conflicts = collectConflicts(metadata);
const conflicts = collectConflicts(metadata);
if (conflicts.length) {
row.doc._conflicts = conflicts;
}
Expand All @@ -140,14 +140,14 @@ function idbAllDocs(opts, idb, callback) {
}

function allDocsInner(winningRev, metadata) {
var row = {
const row = {
id: metadata.id,
key: metadata.id,
value: {
rev: winningRev
}
};
var deleted = metadata.deleted;
const deleted = metadata.deleted;
if (deleted) {
if (keys) {
results.push(row);
Expand All @@ -164,18 +164,18 @@ function idbAllDocs(opts, idb, callback) {
}

function processBatch(batchValues) {
for (var i = 0, len = batchValues.length; i < len; i++) {
for (let i = 0, len = batchValues.length; i < len; i++) {
if (results.length === limit) {
break;
}
var batchValue = batchValues[i];
const batchValue = batchValues[i];
if (batchValue.error && keys) {
// key was not found with "keys" requests
results.push(batchValue);
continue;
}
var metadata = decodeMetadata(batchValue);
var winningRev = metadata.winningRev;
const metadata = decodeMetadata(batchValue);
const winningRev = metadata.winningRev;
allDocsInner(winningRev, metadata);
}
}
Expand All @@ -191,15 +191,15 @@ function idbAllDocs(opts, idb, callback) {
}

function onGetAll(e) {
var values = e.target.result;
let values = e.target.result;
if (opts.descending) {
values = values.reverse();
}
processBatch(values);
}

function onResultsReady() {
var returnVal = {
const returnVal = {
total_rows: docCount,
offset: opts.skip,
rows: results
Expand Down
Loading

0 comments on commit 0108c24

Please sign in to comment.