Skip to content
This repository has been archived by the owner on Oct 1, 2021. It is now read-only.

Commit

Permalink
chore: run migration tests against s3-datastore (#87)
Browse files Browse the repository at this point in the history
Refactors tests to have one list of different repo configs, and to run all tests against all configs.  This lets us test migrations against level, fs, s3, etc just by adding a new config to the list in `test/browser.js` and/or `test/node.js`
  • Loading branch information
achingbrain authored Apr 15, 2021
1 parent b8433ab commit 5e93b62
Show file tree
Hide file tree
Showing 11 changed files with 340 additions and 190 deletions.
5 changes: 2 additions & 3 deletions migrations/migration-8/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,8 @@ async function process (repoPath, repoOptions, onProgress, keyFunction) {

let blockCount

blockCount = await length(blockstore.query({
keysOnly: true,
filters: [({ key }) => {
blockCount = await length(blockstore.queryKeys({
filters: [(key) => {
const newKey = keyFunction(key)

return newKey.toString() !== key.toString()
Expand Down
3 changes: 1 addition & 2 deletions migrations/migration-9/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

const CID = require('cids')
const dagpb = require('ipld-dag-pb')
// @ts-ignore https://github.com/rvagg/cborg/pull/5
const cbor = require('cborg')
const multicodec = require('multicodec')
const multibase = require('multibase')
Expand Down Expand Up @@ -94,7 +93,7 @@ async function pinsToDAG (blockstore, datastore, pinstore, onProgress) {
let recursivePins = []
let directPins = []
let counter = 0
const pinCount = await length(pinstore.query({ keysOnly: true }))
const pinCount = await length(pinstore.queryKeys({}))

for await (const { key, value } of pinstore.query({})) {
counter++
Expand Down
15 changes: 9 additions & 6 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,10 @@
"dependencies": {
"cborg": "^1.0.4",
"cids": "^1.0.0",
"datastore-core": "^3.0.0",
"datastore-core": "^4.0.0",
"debug": "^4.1.0",
"fnv1a": "^1.0.1",
"interface-datastore": "^3.0.3",
"interface-datastore": "^4.0.0",
"ipld-dag-pb": "^0.22.1",
"it-length": "^1.0.1",
"multibase": "^4.0.1",
Expand All @@ -66,10 +66,12 @@
"@ipld/car": "^0.1.3",
"@types/debug": "^4.1.5",
"@types/varint": "^6.0.0",
"aegir": "^32.1.0",
"aegir": "^33.0.0",
"assert": "^2.0.0",
"datastore-fs": "^3.0.0",
"datastore-level": "^4.0.0",
"aws-sdk": "^2.884.0",
"datastore-fs": "^4.0.0",
"datastore-level": "^5.0.0",
"datastore-s3": "^5.0.0",
"events": "^3.2.0",
"it-all": "^1.0.2",
"just-safe-set": "^2.1.0",
Expand All @@ -79,7 +81,8 @@
"npm-run-all": "^4.1.5",
"readable-stream": "^3.6.0",
"rimraf": "^3.0.0",
"sinon": "^9.0.2",
"sinon": "^10.0.0",
"stand-in": "^4.2.0",
"util": "^0.12.3"
},
"engines": {
Expand Down
6 changes: 4 additions & 2 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ async function migrate (path, repoOptions, toVersion, { ignoreLock = false, onPr
let lock

if (!isDryRun && !ignoreLock) {
lock = await repoLock.lock(currentVersion, path)
lock = await repoLock.lock(currentVersion, path, repoOptions)
}

try {
Expand Down Expand Up @@ -185,7 +185,9 @@ async function revert (path, repoOptions, toVersion, { ignoreLock = false, onPro
verifyAvailableMigrations(migrations, toVersion, currentVersion, true)

let lock
if (!isDryRun && !ignoreLock) lock = await repoLock.lock(currentVersion, path)
if (!isDryRun && !ignoreLock) {
lock = await repoLock.lock(currentVersion, path, repoOptions)
}

log(`Reverting from version ${currentVersion} to ${toVersion}`)

Expand Down
9 changes: 8 additions & 1 deletion src/repo/lock.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
const debug = require('debug')
// @ts-ignore
const { lock: properLock } = require('proper-lockfile')
const { lock: memoryLock } = require('./lock-memory')

const log = debug('ipfs:repo:migrator:repo_fs_lock')
const lockFile = 'repo.lock'
Expand All @@ -12,8 +13,14 @@ const lockFile = 'repo.lock'
*
* @param {number} version
* @param {string} dir
* @param {object} [repoOptions]
* @param {string} [repoOptions.lock]
*/
async function lock (version, dir) {
async function lock (version, dir, repoOptions) {
if (repoOptions && repoOptions.lock === 'memory') {
return memoryLock(version, dir)
}

const file = `${dir}/${lockFile}`
log('locking %s', file)
const release = await properLock(dir, { lockfilePath: file })
Expand Down
137 changes: 97 additions & 40 deletions test/browser.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,40 +2,16 @@
'use strict'

const DatastoreLevel = require('datastore-level')
const { createRepo, createAndLoadRepo } = require('./fixtures/repo')

const repoOptions = {
lock: 'memory',
storageBackends: {
root: DatastoreLevel,
blocks: DatastoreLevel,
keys: DatastoreLevel,
datastore: DatastoreLevel,
pins: DatastoreLevel
},
storageBackendOptions: {
root: {
extension: '',
prefix: '',
version: 2
},
blocks: {
sharding: false,
prefix: '',
version: 2
},
keys: {
sharding: false,
prefix: '',
version: 2
},
datastore: {
sharding: false,
prefix: '',
version: 2
}
const DatastoreS3 = require('datastore-s3')
const mockS3 = require('./fixtures/mock-s3')
const S3 = require('aws-sdk').S3
const s3Instance = new S3({
params: {
Bucket: 'test'
}
}
})
mockS3(s3Instance)
const { createRepo } = require('./fixtures/repo')

async function deleteDb (dir) {
return new Promise((resolve) => {
Expand All @@ -50,7 +26,7 @@ async function deleteDb (dir) {
})
}

async function repoCleanup (dir) {
async function cleanup (dir) {
await deleteDb(dir)
await deleteDb('level-js-' + dir)

Expand All @@ -60,26 +36,107 @@ async function repoCleanup (dir) {
}
}

describe('Browser specific tests', () => {
const CONFIGURATIONS = [{
name: 'local',
cleanup,
repoOptions: {
lock: 'memory',
storageBackends: {
root: DatastoreLevel,
blocks: DatastoreLevel,
keys: DatastoreLevel,
datastore: DatastoreLevel,
pins: DatastoreLevel
},
storageBackendOptions: {
root: {
extension: '',
prefix: '',
version: 2
},
blocks: {
sharding: false,
prefix: '',
version: 2
},
keys: {
sharding: false,
prefix: '',
version: 2
},
datastore: {
sharding: false,
prefix: '',
version: 2
}
}
}
}, {
name: 'with s3',
cleanup: () => {},
repoOptions: {
lock: 'memory',
storageBackends: {
root: DatastoreS3,
blocks: DatastoreS3,
datastore: DatastoreS3,
keys: DatastoreS3,
pins: DatastoreS3
},
storageBackendOptions: {
root: {
sharding: true,
extension: '',
s3: s3Instance,
createIfMissing: false
},
blocks: {
sharding: true,
extension: '.data',
s3: s3Instance,
createIfMissing: false
},
datastore: {
sharding: true,
s3: s3Instance,
createIfMissing: false
},
keys: {
sharding: true,
s3: s3Instance,
createIfMissing: false
},
pins: {
sharding: true,
s3: s3Instance,
createIfMissing: false
}
}
}
}]

CONFIGURATIONS.forEach(({ name, repoOptions, cleanup }) => {
const setup = () => createRepo(repoOptions)

describe('lock.js tests', () => {
describe('mem-lock tests', () => {
require('./lock-test')(require('../src/repo/lock-memory'), () => createRepo(repoOptions), repoCleanup, repoOptions)
require('./lock-test')(require('../src/repo/lock-memory'), setup, cleanup, repoOptions)
})
})

describe('version tests', () => {
require('./version-test')(() => createRepo(repoOptions), repoCleanup, repoOptions)
require('./version-test')(setup, cleanup, repoOptions)
})

describe('migrations tests', () => {
require('./migrations')(() => createRepo(repoOptions), repoCleanup)
require('./migrations')(setup, cleanup, repoOptions)
})

describe('init tests', () => {
require('./init-test')(() => createRepo(repoOptions), repoCleanup, repoOptions)
require('./init-test')(setup, cleanup, repoOptions)
})

describe('integration tests', () => {
require('./integration-test')(() => createAndLoadRepo(repoOptions), repoCleanup, repoOptions)
require('./integration-test')(setup, cleanup, repoOptions)
})
})
82 changes: 82 additions & 0 deletions test/fixtures/mock-s3.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
/* eslint-env mocha */
'use strict'

const { expect } = require('aegir/utils/chai')
const standin = require('stand-in')
const { Buffer } = require('buffer')

class S3Error extends Error {
constructor (message, code) {
super(message)
this.code = message
this.statusCode = code
}
}

const s3Resolve = (res) => ({ promise: () => Promise.resolve(res) })
const s3Reject = (err) => ({ promise: () => Promise.reject(err) })

/**
* Mocks out the s3 calls made by datastore-s3
*
* @param {S3Instance} s3
* @returns {void}
*/
module.exports = function (s3) {
const mocks = {}
const storage = {}

mocks.deleteObject = standin.replace(s3, 'deleteObject', (stand, params) => {
expect(params.Key).to.be.a('string')
if (storage[params.Key]) {
delete storage[params.Key]
return s3Resolve({})
}
return s3Reject(new S3Error('NotFound', 404))
})

mocks.getObject = standin.replace(s3, 'getObject', (stand, params) => {
expect(params.Key).to.be.a('string')
if (storage[params.Key]) {
return s3Resolve({ Body: storage[params.Key] })
}
return s3Reject(new S3Error('NotFound', 404))
})

mocks.headBucket = standin.replace(s3, 'headBucket', (stand, params) => {
expect(params.Bucket).to.be.a('string')
return s3Resolve()
})

mocks.headObject = standin.replace(s3, 'headObject', (stand, params) => {
expect(params.Key).to.be.a('string')
if (storage[params.Key]) {
return s3Resolve({})
}
return s3Reject(new S3Error('NotFound', 404))
})

mocks.listObjectV2 = standin.replace(s3, 'listObjectsV2', (stand, params) => {
expect(params.Prefix).to.be.a('string')
const results = {
Contents: []
}

for (const k in storage) {
if (k.startsWith(params.Prefix)) {
results.Contents.push({
Key: k
})
}
}

return s3Resolve(results)
})

mocks.upload = standin.replace(s3, 'upload', (stand, params) => {
expect(params.Key).to.be.a('string')
expect(params.Body).to.be.instanceof(Buffer)
storage[params.Key] = params.Body
return s3Resolve({})
})
}
Loading

0 comments on commit 5e93b62

Please sign in to comment.