Skip to content
This repository has been archived by the owner on Mar 23, 2023. It is now read-only.

Commit

Permalink
fix: benchmarks and perf tweaks
Browse files Browse the repository at this point in the history
  • Loading branch information
hugomrdias committed Apr 1, 2020
1 parent e929b4a commit 71d6804
Show file tree
Hide file tree
Showing 3 changed files with 181 additions and 28 deletions.
149 changes: 149 additions & 0 deletions bench.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
/* eslint-disable no-console */
'use strict'
const Benchmark = require('benchmark')
const randomBytes = require('iso-random-stream/src/random')
const IDBStore = require('./src')
const LevelStore = require('datastore-level')
const { Key } = require('interface-datastore')

// add tests
// new Benchmark.Suite('simple')
// .add('simple put idb', {
// defer: true,
// fn: async (d) => {
// const store = new IDBStore('hello1')
// await store.open()
// await store.put(new Key('/z/one'), Buffer.from('one'))
// await store.close()
// d.resolve()
// }
// })
// .add('simple put level', {
// defer: true,
// fn: async (d) => {
// const store = new LevelStore('hello2')
// await store.open()
// await store.put(new Key('/z/one'), Buffer.from('one'))
// await store.close()
// d.resolve()
// }
// })
// // add listeners
// .on('cycle', function (event) {
// console.log(String(event.target))
// })
// .on('complete', function () {
// console.log('Fastest is ' + this.filter('fastest').map('name'))
// })
// // run async
// .run({ async: true })

// new Benchmark.Suite('parallel')
// .add('parallel idb', {
// defer: true,
// fn: async (d) => {
// const store = new IDBStore('parallel idb')
// await store.open()
// const data = []
// for (let i = 0; i < 100; i++) {
// data.push([new Key(`/z/key${i}`), Buffer.from(`data${i}`)])
// }

// await Promise.all(data.map(d => store.put(d[0], d[1])))
// await Promise.all(data.map(d => store.get(d[0])))
// await store.close()
// d.resolve()
// }
// })
// .add('parallel level', {
// defer: true,
// fn: async (d) => {
// const store = new LevelStore('parallel level')
// await store.open()
// const data = []
// for (let i = 0; i < 100; i++) {
// data.push([new Key(`/z/key${i}`), Buffer.from(`data${i}`)])
// }

// await Promise.all(data.map(d => store.put(d[0], d[1])))
// await Promise.all(data.map(d => store.get(d[0])))
// await store.close()
// d.resolve()
// }
// })
// // add listeners
// .on('cycle', function (event) {
// console.log(String(event.target))
// })
// .on('complete', function () {
// console.log('Fastest is ' + this.filter('fastest').map('name'))
// })
// // run async
// .run({ async: true })

new Benchmark.Suite('batch')
.add('batch idb', {
defer: true,
fn: async (d) => {
const store = new IDBStore('batch idb')
await store.open()
const b = store.batch()
const count = 400
for (let i = 0; i < count; i++) {
b.put(new Key(`/a/hello${i}`), randomBytes(32))
b.put(new Key(`/q/hello${i}`), randomBytes(64))
b.put(new Key(`/z/hello${i}`), randomBytes(128))
}

await b.commit()

const total = async iterable => {
let count = 0
for await (const _ of iterable) count++ // eslint-disable-line
return count
}

await total(store.query({ prefix: '/a' }))
await total(store.query({ prefix: '/z' }))
await total(store.query({ prefix: '/q' }))
await store.close()
d.resolve()
}
})
.add('batch level', {
defer: true,
fn: async (d) => {
const store = new LevelStore('batch level')
await store.open()
const b = store.batch()
const count = 400
for (let i = 0; i < count; i++) {
b.put(new Key(`/a/hello${i}`), randomBytes(32))
b.put(new Key(`/q/hello${i}`), randomBytes(64))
b.put(new Key(`/z/hello${i}`), randomBytes(128))
}

await b.commit()

const total = async iterable => {
let count = 0
for await (const _ of iterable) count++ // eslint-disable-line
return count
}

await total(store.query({ prefix: '/a' }))
await total(store.query({ prefix: '/z' }))
await total(store.query({ prefix: '/q' }))
await store.close()
d.resolve()
}
})
// add listeners
.on('cycle', function (event) {
console.log(String(event.target))
})
.on('complete', function () {
console.log('Fastest is ' + this.filter('fastest').map('name'))
})
// run async
.run({ async: true })
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@
"aegir": "^21.4.5",
"chai": "^4.2.0",
"datastore-core": "ipfs/js-datastore-core#fix/add-buffer",
"dirty-chai": "^2.0.1"
"datastore-level": "^0.14.1",
"dirty-chai": "^2.0.1",
"iso-random-stream": "^1.1.1"
}
}
56 changes: 29 additions & 27 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,34 @@ function typedarrayToBuffer (arr) {
}
}

const queryIt = async function * (q, store, location) {
let cursor = await store.transaction(location).store.openCursor()
let limit = 0

if (cursor && q.offset && q.offset > 0) {
cursor = await cursor.advance(q.offset)
}

while (cursor) {
// limit
if (q.limit !== undefined && q.limit === limit) {
return
}
limit++

const key = new Key(Buffer.from(cursor.key))
const value = Buffer.from(cursor.value)
if (!q.prefix || (q.prefix && key.toString().startsWith(q.prefix))) {
if (q.keysOnly) {
yield { key }
} else {
yield { key, value }
}
}
cursor = await cursor.continue()
}
}

class IdbDatastore {
constructor (location) {
this.location = location
Expand Down Expand Up @@ -135,34 +163,8 @@ class IdbDatastore {
if (this.store === null) {
throw new Error('Datastore needs to be opened.')
}
let limit = 0

let it = (async function * (store, location) {
let cursor = await store.transaction(location).store.openCursor()

if (cursor && q.offset && q.offset > 0) {
cursor = await cursor.advance(q.offset)
}

while (cursor) {
// limit
if (q.limit !== undefined && q.limit === limit) {
return
}
limit++

const key = new Key(Buffer.from(cursor.key))
const value = Buffer.from(cursor.value)
if (!q.prefix || (q.prefix && key.toString().startsWith(q.prefix))) {
if (q.keysOnly) {
yield { key }
} else {
yield { key, value }
}
}
cursor = await cursor.continue()
}
})(this.store, this.location)
let it = queryIt(q, this.store, this.location)

if (Array.isArray(q.filters)) {
it = q.filters.reduce((it, f) => filter(it, f), it)
Expand Down

0 comments on commit 71d6804

Please sign in to comment.