From 4e89d3bfeef0b64ccb7ccc09185a9d682ab376e3 Mon Sep 17 00:00:00 2001 From: Alex Potsides Date: Mon, 24 Apr 2023 14:47:46 +0100 Subject: [PATCH 1/2] feat!: make peerstore atomic (#75) Instead of having separate books for addresses, protocols etc, just have simple save/merge/patch methods for updating peer data. This means we can update peer data in one call instead of needing to make multiple async calls. --- .gitignore | 2 + README.md | 170 ------ package.json | 16 +- src/address-book.ts | 367 ------------ src/errors.ts | 3 +- src/index.ts | 200 ++++--- src/key-book.ts | 140 ----- src/metadata-book.ts | 244 -------- src/pb/peer.proto | 17 +- src/pb/peer.ts | 224 ++++++-- src/pb/tags.proto | 11 - src/pb/tags.ts | 145 ----- src/proto-book.ts | 234 -------- src/store.ts | 291 ++++------ src/utils/bytes-to-peer.ts | 41 ++ src/utils/dedupe-addresses.ts | 51 ++ src/utils/peer-data-to-datastore-peer.ts | 116 ++++ src/utils/peer-id-to-datastore-key.ts | 15 + src/utils/to-peer-pb.ts | 237 ++++++++ test/address-book.spec.ts | 689 ----------------------- test/index.spec.ts | 165 ++++++ test/key-book.spec.ts | 129 ----- test/merge.spec.ts | 210 +++++++ test/metadata-book.spec.ts | 358 ------------ test/patch.spec.ts | 195 +++++++ test/peer-store.spec.ts | 324 ----------- test/proto-book.spec.ts | 388 ------------- test/save.spec.ts | 252 +++++++++ test/utils/dedupe-addresses.spec.ts | 79 +++ 29 files changed, 1767 insertions(+), 3546 deletions(-) delete mode 100644 src/address-book.ts delete mode 100644 src/key-book.ts delete mode 100644 src/metadata-book.ts delete mode 100644 src/pb/tags.proto delete mode 100644 src/pb/tags.ts delete mode 100644 src/proto-book.ts create mode 100644 src/utils/bytes-to-peer.ts create mode 100644 src/utils/dedupe-addresses.ts create mode 100644 src/utils/peer-data-to-datastore-peer.ts create mode 100644 src/utils/peer-id-to-datastore-key.ts create mode 100644 src/utils/to-peer-pb.ts delete mode 100644 test/address-book.spec.ts create mode 100644 test/index.spec.ts delete mode 100644 test/key-book.spec.ts create mode 100644 test/merge.spec.ts delete mode 100644 test/metadata-book.spec.ts create mode 100644 test/patch.spec.ts delete mode 100644 test/peer-store.spec.ts delete mode 100644 test/proto-book.spec.ts create mode 100644 test/save.spec.ts create mode 100644 test/utils/dedupe-addresses.spec.ts diff --git a/.gitignore b/.gitignore index 9a3cb9f..7ad9e67 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,9 @@ node_modules +build dist .docs .coverage node_modules package-lock.json yarn.lock +.vscode diff --git a/README.md b/README.md index b55bf4f..6d7dffe 100644 --- a/README.md +++ b/README.md @@ -11,26 +11,6 @@ - [Install](#install) - [Browser ` ``` -## Description - -Libp2p's PeerStore is responsible for keeping an updated register with the relevant information of the known peers. It should be the single source of truth for all peer data, where a subsystem can learn about peers' data and where someone can listen for updates. The PeerStore comprises four main components: `addressBook`, `keyBook`, `protocolBook` and `metadataBook`. - -The PeerStore manages the high level operations on its inner books. Moreover, the PeerStore should be responsible for notifying interested parties of relevant events, through its Event Emitter. - -### Submitting records to the PeerStore - -Several libp2p subsystems will perform operations that might gather relevant information about peers. - -#### Identify - -- The Identify protocol automatically runs on every connection when multiplexing is enabled. The protocol will put the multiaddrs and protocols provided by the peer to the PeerStore. -- In the background, the Identify Service is also waiting for protocol change notifications of peers via the IdentifyPush protocol. Peers may leverage the `identify-push` message to communicate protocol changes to all connected peers, so that their PeerStore can be updated with the updated protocols. -- While it is currently not supported in js-libp2p, future iterations may also support the [IdentifyDelta protocol](https://github.com/libp2p/specs/pull/176). -- Taking into account that the Identify protocol records are directly from the peer, they should be considered the source of truth and weighted accordingly. - -#### Peer Discovery - -- Libp2p discovery protocols aim to discover new peers in the network. In a typical discovery protocol, addresses of the peer are discovered along with its peer id. Once this happens, a libp2p discovery protocol should emit a `peer` event with the information of the discovered peer and this information will be added to the PeerStore by libp2p. - -#### Dialer - -- Libp2p API supports dialing a peer given a `multiaddr`, and no prior knowledge of the peer. If the node is able to establish a connection with the peer, it and its multiaddr is added to the PeerStore. -- When a connection is being upgraded, more precisely after its encryption, or even in a discovery protocol, a libp2p node can get to know other parties public keys. In this scenario, libp2p will add the peer's public key to its `KeyBook`. - -#### DHT - -- On some DHT operations, such as finding providers for a given CID, nodes may exchange peer data as part of the query. This passive peer discovery should result in the DHT emitting the `peer` event in the same way [Peer Discovery](#peerdiscovery) does. - -### Retrieving records from the PeerStore - -When data in the PeerStore is updated the PeerStore will emit events based on the changes, to allow applications and other subsystems to take action on those changes. Any subsystem interested in these notifications should subscribe the [`PeerStore events`][peer-store-events]. - -#### Peer - -- Each time a new peer is discovered, the PeerStore should emit a [`peer` event][peer-store-events], so that interested parties can leverage this peer and establish a connection with it. - -#### Protocols - -- When the known protocols of a peer change, the PeerStore emits a [`change:protocols` event][peer-store-events]. - -#### Multiaddrs - -- When the known listening `multiaddrs` of a peer change, the PeerStore emits a [`change:multiaddrs` event][peer-store-events]. - -### PeerStore implementation - -The PeerStore wraps four main components: `addressBook`, `keyBook`, `protocolBook` and `metadataBook`. Moreover, it provides a high level API for those components, as well as data events. - -### Components - -#### Address Book - -The `addressBook` keeps the known multiaddrs of a peer. The multiaddrs of each peer may change over time and the Address Book must account for this. - -`Map` - -A `peerId.toString()` identifier mapping to a `Address` object, which should have the following structure: - -```js -{ - multiaddr: -} -``` - -#### Key Book - -The `keyBook` tracks the public keys of the peers by keeping their [`PeerId`][peer-id]. - -`Map>` - -A `peerId.toString()` identifier mapping to a `Set` of protocol identifier strings. - -#### Metadata Book - -The `metadataBook` keeps track of the known metadata of a peer. Its metadata is stored in a key value fashion, where a key identifier (`string`) represents a metadata value (`Uint8Array`). - -`Map>` - -A `peerId.toString()` identifier mapping to the peer metadata Map. - -### API - -For the complete API documentation, you should check the [API.md](https://libp2p.github.io/js-libp2p-peer-store). - -Access to its underlying books: - -- `peerStore.addressBook.*` -- `peerStore.keyBook.*` -- `peerStore.metadataBook.*` -- `peerStore.protoBook.*` - -### Events - -- `peer` - emitted when a new peer is added. -- `change:multiaddrs` - emitted when a known peer has a different set of multiaddrs. -- `change:protocols` - emitted when a known peer supports a different set of protocols. -- `change:pubkey` - emitted when a peer's public key is known. -- `change:metadata` - emitted when known metadata of a peer changes. - -## Data Persistence - -The data stored in the PeerStore can be persisted if configured appropriately. Keeping a record of the peers already discovered by the peer, as well as their known data aims to improve the efficiency of peers joining the network after being offline. - -The libp2p node will need to receive a [datastore](https://github.com/ipfs/interface-datastore), in order to persist this data across restarts. A [datastore](https://github.com/ipfs/interface-datastore) stores its data in a key-value fashion. As a result, we need coherent keys so that we do not overwrite data. - -The PeerStore should not continuously update the datastore whenever data is changed. Instead, it should only store new data after reaching a certain threshold of "dirty" peers, as well as when the node is stopped, in order to batch writes to the datastore. - -The peer id will be appended to the datastore key for each data namespace. The namespaces were defined as follows: - -**AddressBook** - -All the known peer addresses are stored with a key pattern as follows: - -`/peers/addrs/` - -**ProtoBook** - -All the known peer protocols are stored with a key pattern as follows: - -`/peers/protos/` - -**KeyBook** - -All public keys are stored under the following pattern: - -` /peers/keys/` - -**MetadataBook** - -Metadata is stored under the following key pattern: - -`/peers/metadata//` - -## Future Considerations - -- If multiaddr TTLs are added, the PeerStore may schedule jobs to delete all addresses that exceed the TTL to prevent AddressBook bloating -- Further API methods will probably need to be added in the context of multiaddr validity and confidence. -- When improving libp2p configuration for specific runtimes, we should take into account the PeerStore recommended datastore. -- When improving libp2p configuration, we should think about a possible way of allowing the configuration of Bootstrap to be influenced by the persisted peers, as a way to decrease the load on Bootstrap nodes. - ## API Docs - diff --git a/package.json b/package.json index 31c2337..4bb0cc2 100644 --- a/package.json +++ b/package.json @@ -131,7 +131,7 @@ "clean": "aegir clean", "lint": "aegir lint", "dep-check": "aegir dep-check -i protons", - "generate": "protons src/pb/peer.proto src/pb/tags.proto", + "generate": "protons src/pb/*.proto", "build": "aegir build", "test": "aegir test", "test:chrome": "aegir test -t browser --cov", @@ -144,17 +144,16 @@ "docs": "aegir docs" }, "dependencies": { + "@libp2p/crypto": "^1.0.15", + "@libp2p/interface-libp2p": "^2.0.0", "@libp2p/interface-peer-id": "^2.0.0", - "@libp2p/interface-peer-info": "^1.0.3", - "@libp2p/interface-peer-store": "^1.2.2", - "@libp2p/interface-record": "^2.0.1", + "@libp2p/interface-peer-store": "^2.0.1", "@libp2p/interfaces": "^3.2.0", - "@libp2p/logger": "^2.0.0", + "@libp2p/logger": "^2.0.7", "@libp2p/peer-id": "^2.0.0", - "@libp2p/peer-record": "^5.0.0", "@multiformats/multiaddr": "^12.0.0", "interface-datastore": "^8.0.0", - "mortice": "^3.0.0", + "mortice": "^3.0.1", "multiformats": "^11.0.0", "protons-runtime": "^5.0.0", "uint8arraylist": "^2.1.1", @@ -162,12 +161,11 @@ }, "devDependencies": { "@libp2p/peer-id-factory": "^2.0.0", - "@libp2p/utils": "^3.0.2", "aegir": "^38.1.6", "datastore-core": "^9.0.1", "delay": "^5.0.0", "p-defer": "^4.0.0", - "p-wait-for": "^5.0.0", + "p-event": "^5.0.1", "protons": "^7.0.2", "sinon": "^15.0.1" } diff --git a/src/address-book.ts b/src/address-book.ts deleted file mode 100644 index 0552d8c..0000000 --- a/src/address-book.ts +++ /dev/null @@ -1,367 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { isMultiaddr } from '@multiformats/multiaddr' -import { codes } from './errors.js' -import { PeerRecord, RecordEnvelope } from '@libp2p/peer-record' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Address, AddressFilter, Peer, PeerMultiaddrsChangeData, PeerStore } from '@libp2p/interface-peer-store' -import type { Store } from './store.js' -import type { Envelope } from '@libp2p/interface-record' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { PeerInfo } from '@libp2p/interface-peer-info' -import type { Multiaddr } from '@multiformats/multiaddr' - -const log = logger('libp2p:peer-store:address-book') -const EVENT_NAME = 'change:multiaddrs' - -async function allowAll (): Promise { - return true -} - -export class PeerStoreAddressBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - private readonly addressFilter: AddressFilter - - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store, addressFilter?: AddressFilter) { - this.dispatchEvent = dispatchEvent - this.store = store - this.addressFilter = addressFilter ?? allowAll - } - - /** - * ConsumePeerRecord adds addresses from a signed peer record contained in a record envelope. - * This will return a boolean that indicates if the record was successfully processed and added - * into the AddressBook. - */ - async consumePeerRecord (envelope: Envelope): Promise { - log.trace('consumePeerRecord await write lock') - const release = await this.store.lock.writeLock() - log.trace('consumePeerRecord got write lock') - - let peerId - let peer: Peer | undefined - let updatedPeer - - try { - let peerRecord - try { - peerRecord = PeerRecord.createFromProtobuf(envelope.payload) - } catch (err: any) { - log.error('invalid peer record received') - return false - } - - peerId = peerRecord.peerId - const multiaddrs = peerRecord.multiaddrs - - // Verify peerId - if (!peerId.equals(envelope.peerId)) { - log('signing key does not match PeerId in the PeerRecord') - return false - } - - // ensure the record has multiaddrs - if (multiaddrs == null || multiaddrs.length === 0) { - return false - } - - if (await this.store.has(peerId)) { - peer = await this.store.load(peerId) - - if (peer.peerRecordEnvelope != null) { - const storedEnvelope = await RecordEnvelope.createFromProtobuf(peer.peerRecordEnvelope) - const storedRecord = PeerRecord.createFromProtobuf(storedEnvelope.payload) - - // ensure seq is greater than, or equal to, the last received - if (storedRecord.seqNumber >= peerRecord.seqNumber) { - log('sequence number was lower or equal to existing sequence number - stored: %d received: %d', storedRecord.seqNumber, peerRecord.seqNumber) - return false - } - } - } - - const addresses = await filterMultiaddrs(peerId, multiaddrs, this.addressFilter, true) - - // Replace unsigned addresses by the new ones from the record - // TODO: Once we have ttls for the addresses, we should merge these in - updatedPeer = await this.store.patchOrCreate(peerId, { - addresses, - peerRecordEnvelope: envelope.marshal().subarray() - }) - - log('stored provided peer record for %p', peerRecord.peerId) - } finally { - log.trace('consumePeerRecord release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: updatedPeer.addresses.map(({ multiaddr }) => multiaddr), - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - - return true - } - - async getRawEnvelope (peerId: PeerId): Promise { - log.trace('getRawEnvelope await read lock') - const release = await this.store.lock.readLock() - log.trace('getRawEnvelope got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.peerRecordEnvelope - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('getRawEnvelope release read lock') - release() - } - } - - /** - * Get an Envelope containing a PeerRecord for the given peer. - * Returns undefined if no record exists. - */ - async getPeerRecord (peerId: PeerId): Promise { - const raw = await this.getRawEnvelope(peerId) - - if (raw == null) { - return undefined - } - - return await RecordEnvelope.createFromProtobuf(raw) - } - - async get (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('get wait for read lock') - const release = await this.store.lock.readLock() - log.trace('get got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.addresses - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('get release read lock') - release() - } - - return [] - } - - async set (peerId: PeerId, multiaddrs: Multiaddr[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(multiaddrs)) { - log.error('multiaddrs must be an array of Multiaddrs') - throw new CodeError('multiaddrs must be an array of Multiaddrs', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let hasPeer = false - let peer: Peer | undefined - let updatedPeer - - try { - const addresses = await filterMultiaddrs(peerId, multiaddrs, this.addressFilter) - - // No valid addresses found - if (addresses.length === 0) { - return - } - - try { - peer = await this.store.load(peerId) - hasPeer = true - - if (new Set([ - ...addresses.map(({ multiaddr }) => multiaddr.toString()), - ...peer.addresses.map(({ multiaddr }) => multiaddr.toString()) - ]).size === peer.addresses.length && addresses.length === peer.addresses.length) { - // not changing anything, no need to update - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.patchOrCreate(peerId, { addresses }) - - log('set multiaddrs for %p', peerId) - } finally { - log.trace('set multiaddrs for %p', peerId) - log('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - - // Notify the existence of a new peer - if (!hasPeer) { - this.dispatchEvent(new CustomEvent('peer', { - detail: { - id: peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - protocols: updatedPeer.protocols - } - })) - } - } - - async add (peerId: PeerId, multiaddrs: Multiaddr[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(multiaddrs)) { - log.error('multiaddrs must be an array of Multiaddrs') - throw new CodeError('multiaddrs must be an array of Multiaddrs', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('add await write lock') - const release = await this.store.lock.writeLock() - log.trace('add got write lock') - - let hasPeer = false - let peer: Peer | undefined - let updatedPeer - - try { - const addresses = await filterMultiaddrs(peerId, multiaddrs, this.addressFilter) - - // No valid addresses found - if (addresses.length === 0) { - return - } - - try { - peer = await this.store.load(peerId) - hasPeer = true - - if (new Set([ - ...addresses.map(({ multiaddr }) => multiaddr.toString()), - ...peer.addresses.map(({ multiaddr }) => multiaddr.toString()) - ]).size === peer.addresses.length) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.mergeOrCreate(peerId, { addresses }) - - log('added multiaddrs for %p', peerId) - } finally { - log.trace('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - - // Notify the existence of a new peer - if (!hasPeer) { - this.dispatchEvent(new CustomEvent('peer', { - detail: { - id: peerId, - multiaddrs: updatedPeer.addresses.map(addr => addr.multiaddr), - protocols: updatedPeer.protocols - } - })) - } - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - addresses: [] - }) - } finally { - log.trace('delete release write lock') - release() - } - - if (peer != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - multiaddrs: [], - oldMultiaddrs: peer == null ? [] : peer.addresses.map(({ multiaddr }) => multiaddr) - } - })) - } - } -} - -async function filterMultiaddrs (peerId: PeerId, multiaddrs: Multiaddr[], addressFilter: AddressFilter, isCertified: boolean = false): Promise { - const output: Address[] = [] - - await Promise.all( - multiaddrs.map(async multiaddr => { - if (!isMultiaddr(multiaddr)) { - log.error('multiaddr must be an instance of Multiaddr') - throw new CodeError('multiaddr must be an instance of Multiaddr', codes.ERR_INVALID_PARAMETERS) - } - - const include = await addressFilter(peerId, multiaddr) - - if (!include) { - return - } - - output.push({ - multiaddr, - isCertified - }) - }) - ) - - return output -} diff --git a/src/errors.ts b/src/errors.ts index 60efb24..48c52e7 100644 --- a/src/errors.ts +++ b/src/errors.ts @@ -1,5 +1,4 @@ export const codes = { - ERR_INVALID_PARAMETERS: 'ERR_INVALID_PARAMETERS', - ERR_NOT_FOUND: 'ERR_NOT_FOUND' + ERR_INVALID_PARAMETERS: 'ERR_INVALID_PARAMETERS' } diff --git a/src/index.ts b/src/index.ts index 4970254..29355d3 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,79 +1,79 @@ -import { logger } from '@libp2p/logger' -import { EventEmitter } from '@libp2p/interfaces/events' -import { PeerStoreAddressBook } from './address-book.js' -import { PeerStoreKeyBook } from './key-book.js' -import { PeerStoreMetadataBook } from './metadata-book.js' -import { PeerStoreProtoBook } from './proto-book.js' -import { PersistentStore, Store } from './store.js' -import type { PeerStore, AddressBook, KeyBook, MetadataBook, ProtoBook, PeerStoreEvents, PeerStoreInit, Peer, TagOptions } from '@libp2p/interface-peer-store' +import type { EventEmitter } from '@libp2p/interfaces/events' +import { PersistentStore, PeerUpdate } from './store.js' +import type { PeerStore, Peer, PeerData } from '@libp2p/interface-peer-store' import type { PeerId } from '@libp2p/interface-peer-id' -import { CodeError } from '@libp2p/interfaces/errors' -import { Tag, Tags } from './pb/tags.js' import type { Datastore } from 'interface-datastore' +import type { Multiaddr } from '@multiformats/multiaddr' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' +import { logger } from '@libp2p/logger' const log = logger('libp2p:peer-store') export interface PersistentPeerStoreComponents { peerId: PeerId datastore: Datastore + events: EventEmitter +} + +/** + * Return true to allow storing the passed multiaddr for the passed peer + */ +export interface AddressFilter { + (peerId: PeerId, multiaddr: Multiaddr): Promise +} + +export interface PersistentPeerStoreInit { + addressFilter?: AddressFilter } /** * An implementation of PeerStore that stores data in a Datastore */ -export class PersistentPeerStore extends EventEmitter implements PeerStore { - public addressBook: AddressBook - public keyBook: KeyBook - public metadataBook: MetadataBook - public protoBook: ProtoBook - - private readonly components: PersistentPeerStoreComponents - private readonly store: Store - - constructor (components: PersistentPeerStoreComponents, init: PeerStoreInit = {}) { - super() - - this.components = components - this.store = new PersistentStore(components) - this.addressBook = new PeerStoreAddressBook(this.dispatchEvent.bind(this), this.store, init.addressFilter) - this.keyBook = new PeerStoreKeyBook(this.dispatchEvent.bind(this), this.store) - this.metadataBook = new PeerStoreMetadataBook(this.dispatchEvent.bind(this), this.store) - this.protoBook = new PeerStoreProtoBook(this.dispatchEvent.bind(this), this.store) +export class PersistentPeerStore implements PeerStore { + private readonly store: PersistentStore + private readonly events: EventEmitter + private readonly peerId: PeerId + + constructor (components: PersistentPeerStoreComponents, init: PersistentPeerStoreInit = {}) { + this.events = components.events + this.peerId = components.peerId + this.store = new PersistentStore(components, init) } async forEach (fn: (peer: Peer) => void): Promise { - log.trace('getPeers await read lock') + log.trace('forEach await read lock') const release = await this.store.lock.readLock() - log.trace('getPeers got read lock') + log.trace('forEach got read lock') try { for await (const peer of this.store.all()) { - if (peer.id.equals(this.components.peerId)) { - // Skip self peer if present - continue - } - fn(peer) } } finally { - log.trace('getPeers release read lock') + log.trace('forEach release read lock') release() } } async all (): Promise { - const output: Peer[] = [] + log.trace('all await read lock') + const release = await this.store.lock.readLock() + log.trace('all got read lock') - await this.forEach(peer => { - output.push(peer) - }) + try { + const output: Peer[] = [] + + for await (const peer of this.store.all()) { + output.push(peer) + } - return output + return output + } finally { + log.trace('all release read lock') + release() + } } - /** - * Delete the information of the given peer in every book - */ async delete (peerId: PeerId): Promise { log.trace('delete await write lock') const release = await this.store.lock.writeLock() @@ -87,9 +87,19 @@ export class PersistentPeerStore extends EventEmitter implement } } - /** - * Get the stored information of a given peer - */ + async has (peerId: PeerId): Promise { + log.trace('has await read lock') + const release = await this.store.lock.readLock() + log.trace('has got read lock') + + try { + return await this.store.has(peerId) + } finally { + log.trace('has release read lock') + release() + } + } + async get (peerId: PeerId): Promise { log.trace('get await read lock') const release = await this.store.lock.readLock() @@ -103,82 +113,66 @@ export class PersistentPeerStore extends EventEmitter implement } } - /** - * Returns true if we have a record of the peer - */ - async has (peerId: PeerId): Promise { - log.trace('has await read lock') - const release = await this.store.lock.readLock() - log.trace('has got read lock') + async save (id: PeerId, data: PeerData): Promise { + log.trace('save await write lock') + const release = await this.store.lock.writeLock() + log.trace('save got write lock') try { - return await this.store.has(peerId) + const result = await this.store.save(id, data) + + this.#emitIfUpdated(id, result) + + return result.peer } finally { - log.trace('has release read lock') + log.trace('save release write lock') release() } } - async tagPeer (peerId: PeerId, tag: string, options: TagOptions = {}): Promise { - const providedValue = options.value ?? 0 - const value = Math.round(providedValue) - const ttl = options.ttl ?? undefined + async patch (id: PeerId, data: PeerData): Promise { + log.trace('patch await write lock') + const release = await this.store.lock.writeLock() + log.trace('patch got write lock') - if (value !== providedValue || value < 0 || value > 100) { - throw new CodeError('Tag value must be between 0-100', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - } + try { + const result = await this.store.patch(id, data) - const buf = await this.metadataBook.getValue(peerId, 'tags') - let tags: Tag[] = [] + this.#emitIfUpdated(id, result) - if (buf != null) { - tags = Tags.decode(buf).tags + return result.peer + } finally { + log.trace('patch release write lock') + release() } - - // do not allow duplicate tags - tags = tags.filter(t => t.name !== tag) - - tags.push({ - name: tag, - value, - expiry: ttl == null ? undefined : BigInt(Date.now() + ttl) - }) - - await this.metadataBook.setValue(peerId, 'tags', Tags.encode({ tags }).subarray()) } - async unTagPeer (peerId: PeerId, tag: string): Promise { - const buf = await this.metadataBook.getValue(peerId, 'tags') - let tags: Tag[] = [] + async merge (id: PeerId, data: PeerData): Promise { + log.trace('merge await write lock') + const release = await this.store.lock.writeLock() + log.trace('merge got write lock') - if (buf != null) { - tags = Tags.decode(buf).tags - } + try { + const result = await this.store.merge(id, data) - tags = tags.filter(t => t.name !== tag) + this.#emitIfUpdated(id, result) - await this.metadataBook.setValue(peerId, 'tags', Tags.encode({ tags }).subarray()) + return result.peer + } finally { + log.trace('merge release write lock') + release() + } } - async getTags (peerId: PeerId): Promise> { - const buf = await this.metadataBook.getValue(peerId, 'tags') - let tags: Tag[] = [] - - if (buf != null) { - tags = Tags.decode(buf).tags + #emitIfUpdated (id: PeerId, result: PeerUpdate): void { + if (!result.updated) { + return } - const now = BigInt(Date.now()) - const unexpiredTags = tags.filter(tag => tag.expiry == null || tag.expiry > now) - - if (unexpiredTags.length !== tags.length) { - // remove any expired tags - await this.metadataBook.setValue(peerId, 'tags', Tags.encode({ tags: unexpiredTags }).subarray()) + if (this.peerId.equals(id)) { + this.events.safeDispatchEvent('self:peer:update', { detail: result }) + } else { + this.events.safeDispatchEvent('peer:update', { detail: result }) } - - return unexpiredTags.map(t => ({ - name: t.name, - value: t.value ?? 0 - })) } } diff --git a/src/key-book.ts b/src/key-book.ts deleted file mode 100644 index 078964b..0000000 --- a/src/key-book.ts +++ /dev/null @@ -1,140 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { equals as uint8arrayEquals } from 'uint8arrays/equals' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Store } from './store.js' -import type { PeerStore, KeyBook, PeerPublicKeyChangeData, Peer } from '@libp2p/interface-peer-store' -import type { PeerId } from '@libp2p/interface-peer-id' - -const log = logger('libp2p:peer-store:key-book') - -const EVENT_NAME = 'change:pubkey' - -export class PeerStoreKeyBook implements KeyBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - - /** - * The KeyBook is responsible for keeping the known public keys of a peer - */ - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store) { - this.dispatchEvent = dispatchEvent - this.store = store - } - - /** - * Set the Peer public key - */ - async set (peerId: PeerId, publicKey: Uint8Array): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!(publicKey instanceof Uint8Array)) { - log.error('publicKey must be an instance of Uint8Array to store data') - throw new CodeError('publicKey must be an instance of PublicKey', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let updatedKey = false - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - - if ((peer.pubKey != null) && uint8arrayEquals(peer.pubKey, publicKey)) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - pubKey: publicKey - }) - updatedKey = true - } finally { - log.trace('set release write lock') - release() - } - - if (updatedKey) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - publicKey, - oldPublicKey: peer == null ? undefined : peer.pubKey - } - })) - } - } - - /** - * Get Public key of the given PeerId, if stored - */ - async get (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('get await write lock') - const release = await this.store.lock.readLock() - log.trace('get got write lock') - - try { - const peer = await this.store.load(peerId) - - return peer.pubKey - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log('get release write lock') - release() - } - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - pubKey: undefined - }) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('delete release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - publicKey: undefined, - oldPublicKey: peer == null ? undefined : peer.pubKey - } - })) - } -} diff --git a/src/metadata-book.ts b/src/metadata-book.ts deleted file mode 100644 index 698a009..0000000 --- a/src/metadata-book.ts +++ /dev/null @@ -1,244 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { equals as uint8ArrayEquals } from 'uint8arrays/equals' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Store } from './store.js' -import type { PeerStore, MetadataBook, PeerMetadataChangeData, Peer } from '@libp2p/interface-peer-store' -import type { PeerId } from '@libp2p/interface-peer-id' - -const log = logger('libp2p:peer-store:metadata-book') - -const EVENT_NAME = 'change:metadata' - -export class PeerStoreMetadataBook implements MetadataBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - - /** - * The MetadataBook is responsible for keeping metadata - * about known peers - */ - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store) { - this.dispatchEvent = dispatchEvent - this.store = store - } - - /** - * Get the known data of a provided peer - */ - async get (peerId: PeerId): Promise> { - peerId = peerIdFromPeerId(peerId) - - log.trace('get await read lock') - const release = await this.store.lock.readLock() - log.trace('get got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.metadata - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('get release read lock') - release() - } - - return new Map() - } - - /** - * Get specific metadata value, if it exists - */ - async getValue (peerId: PeerId, key: string): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('getValue await read lock') - const release = await this.store.lock.readLock() - log.trace('getValue got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.metadata.get(key) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('getValue release write lock') - release() - } - } - - async set (peerId: PeerId, metadata: Map): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!(metadata instanceof Map)) { - log.error('valid metadata must be provided to store data') - throw new CodeError('valid metadata must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.mergeOrCreate(peerId, { - metadata - }) - } finally { - log.trace('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata, - oldMetadata: peer == null ? new Map() : peer.metadata - } - })) - } - - /** - * Set metadata key and value of a provided peer - */ - async setValue (peerId: PeerId, key: string, value: Uint8Array): Promise { - peerId = peerIdFromPeerId(peerId) - - if (typeof key !== 'string' || !(value instanceof Uint8Array)) { - log.error('valid key and value must be provided to store data') - throw new CodeError('valid key and value must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('setValue await write lock') - const release = await this.store.lock.writeLock() - log.trace('setValue got write lock') - - let peer: Peer | undefined - let updatedPeer - - try { - try { - peer = await this.store.load(peerId) - const existingValue = peer.metadata.get(key) - - if (existingValue != null && uint8ArrayEquals(value, existingValue)) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.mergeOrCreate(peerId, { - metadata: new Map([[key, value]]) - }) - } finally { - log.trace('setValue release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata: updatedPeer.metadata, - oldMetadata: peer == null ? new Map() : peer.metadata - } - })) - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - if (peer != null) { - await this.store.patch(peerId, { - metadata: new Map() - }) - } - } finally { - log.trace('delete release write lock') - release() - } - - if (peer != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata: new Map(), - oldMetadata: peer.metadata - } - })) - } - } - - async deleteValue (peerId: PeerId, key: string): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('deleteValue await write lock') - const release = await this.store.lock.writeLock() - log.trace('deleteValue got write lock') - - let metadata - let peer: Peer | undefined - - try { - peer = await this.store.load(peerId) - metadata = peer.metadata - - metadata.delete(key) - - await this.store.patch(peerId, { - metadata - }) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('deleteValue release write lock') - release() - } - - if (metadata != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - metadata, - oldMetadata: peer == null ? new Map() : peer.metadata - } - })) - } - } -} diff --git a/src/pb/peer.proto b/src/pb/peer.proto index 1c9cc16..01c3be1 100644 --- a/src/pb/peer.proto +++ b/src/pb/peer.proto @@ -7,14 +7,17 @@ message Peer { // The protocols the peer supports repeated string protocols = 2; - // Any peer metadata - repeated Metadata metadata = 3; - // The public key of the peer - optional bytes pub_key = 4; + optional bytes public_key = 4; // The most recently received signed PeerRecord optional bytes peer_record_envelope = 5; + + // Any peer metadata + map metadata = 6; + + // Any tags the peer has + map tags = 7; } // Address represents a single multiaddr @@ -25,7 +28,7 @@ message Address { optional bool isCertified = 2; } -message Metadata { - string key = 1; - bytes value = 2; +message Tag { + uint32 value = 1; // tag value 0-100 + optional uint64 expiry = 2; // ms timestamp after which the tag is no longer valid } diff --git a/src/pb/peer.ts b/src/pb/peer.ts index ac612d1..9ceb63f 100644 --- a/src/pb/peer.ts +++ b/src/pb/peer.ts @@ -11,12 +11,148 @@ import type { Uint8ArrayList } from 'uint8arraylist' export interface Peer { addresses: Address[] protocols: string[] - metadata: Metadata[] - pubKey?: Uint8Array + publicKey?: Uint8Array peerRecordEnvelope?: Uint8Array + metadata: Map + tags: Map } export namespace Peer { + export interface Peer$metadataEntry { + key: string + value: Uint8Array + } + + export namespace Peer$metadataEntry { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.key != null && obj.key !== '')) { + w.uint32(10) + w.string(obj.key) + } + + if ((obj.value != null && obj.value.byteLength > 0)) { + w.uint32(18) + w.bytes(obj.value) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + key: '', + value: new Uint8Array(0) + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.key = reader.string() + break + case 2: + obj.value = reader.bytes() + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Peer$metadataEntry.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Peer$metadataEntry => { + return decodeMessage(buf, Peer$metadataEntry.codec()) + } + } + + export interface Peer$tagsEntry { + key: string + value?: Tag + } + + export namespace Peer$tagsEntry { + let _codec: Codec + + export const codec = (): Codec => { + if (_codec == null) { + _codec = message((obj, w, opts = {}) => { + if (opts.lengthDelimited !== false) { + w.fork() + } + + if ((obj.key != null && obj.key !== '')) { + w.uint32(10) + w.string(obj.key) + } + + if (obj.value != null) { + w.uint32(18) + Tag.codec().encode(obj.value, w) + } + + if (opts.lengthDelimited !== false) { + w.ldelim() + } + }, (reader, length) => { + const obj: any = { + key: '' + } + + const end = length == null ? reader.len : reader.pos + length + + while (reader.pos < end) { + const tag = reader.uint32() + + switch (tag >>> 3) { + case 1: + obj.key = reader.string() + break + case 2: + obj.value = Tag.codec().decode(reader, reader.uint32()) + break + default: + reader.skipType(tag & 7) + break + } + } + + return obj + }) + } + + return _codec + } + + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Peer$tagsEntry.codec()) + } + + export const decode = (buf: Uint8Array | Uint8ArrayList): Peer$tagsEntry => { + return decodeMessage(buf, Peer$tagsEntry.codec()) + } + } + let _codec: Codec export const codec = (): Codec => { @@ -40,16 +176,9 @@ export namespace Peer { } } - if (obj.metadata != null) { - for (const value of obj.metadata) { - w.uint32(26) - Metadata.codec().encode(value, w) - } - } - - if (obj.pubKey != null) { + if (obj.publicKey != null) { w.uint32(34) - w.bytes(obj.pubKey) + w.bytes(obj.publicKey) } if (obj.peerRecordEnvelope != null) { @@ -57,6 +186,20 @@ export namespace Peer { w.bytes(obj.peerRecordEnvelope) } + if (obj.metadata != null && obj.metadata.size !== 0) { + for (const [key, value] of obj.metadata.entries()) { + w.uint32(50) + Peer.Peer$metadataEntry.codec().encode({ key, value }, w) + } + } + + if (obj.tags != null && obj.tags.size !== 0) { + for (const [key, value] of obj.tags.entries()) { + w.uint32(58) + Peer.Peer$tagsEntry.codec().encode({ key, value }, w) + } + } + if (opts.lengthDelimited !== false) { w.ldelim() } @@ -64,7 +207,8 @@ export namespace Peer { const obj: any = { addresses: [], protocols: [], - metadata: [] + metadata: new Map(), + tags: new Map() } const end = length == null ? reader.len : reader.pos + length @@ -79,15 +223,22 @@ export namespace Peer { case 2: obj.protocols.push(reader.string()) break - case 3: - obj.metadata.push(Metadata.codec().decode(reader, reader.uint32())) - break case 4: - obj.pubKey = reader.bytes() + obj.publicKey = reader.bytes() break case 5: obj.peerRecordEnvelope = reader.bytes() break + case 6: { + const entry = Peer.Peer$metadataEntry.codec().decode(reader, reader.uint32()) + obj.metadata.set(entry.key, entry.value) + break + } + case 7: { + const entry = Peer.Peer$tagsEntry.codec().decode(reader, reader.uint32()) + obj.tags.set(entry.key, entry.value) + break + } default: reader.skipType(tag & 7) break @@ -177,29 +328,29 @@ export namespace Address { } } -export interface Metadata { - key: string - value: Uint8Array +export interface Tag { + value: number + expiry?: bigint } -export namespace Metadata { - let _codec: Codec +export namespace Tag { + let _codec: Codec - export const codec = (): Codec => { + export const codec = (): Codec => { if (_codec == null) { - _codec = message((obj, w, opts = {}) => { + _codec = message((obj, w, opts = {}) => { if (opts.lengthDelimited !== false) { w.fork() } - if ((obj.key != null && obj.key !== '')) { - w.uint32(10) - w.string(obj.key) + if ((obj.value != null && obj.value !== 0)) { + w.uint32(8) + w.uint32(obj.value) } - if ((obj.value != null && obj.value.byteLength > 0)) { - w.uint32(18) - w.bytes(obj.value) + if (obj.expiry != null) { + w.uint32(16) + w.uint64(obj.expiry) } if (opts.lengthDelimited !== false) { @@ -207,8 +358,7 @@ export namespace Metadata { } }, (reader, length) => { const obj: any = { - key: '', - value: new Uint8Array(0) + value: 0 } const end = length == null ? reader.len : reader.pos + length @@ -218,10 +368,10 @@ export namespace Metadata { switch (tag >>> 3) { case 1: - obj.key = reader.string() + obj.value = reader.uint32() break case 2: - obj.value = reader.bytes() + obj.expiry = reader.uint64() break default: reader.skipType(tag & 7) @@ -236,11 +386,11 @@ export namespace Metadata { return _codec } - export const encode = (obj: Partial): Uint8Array => { - return encodeMessage(obj, Metadata.codec()) + export const encode = (obj: Partial): Uint8Array => { + return encodeMessage(obj, Tag.codec()) } - export const decode = (buf: Uint8Array | Uint8ArrayList): Metadata => { - return decodeMessage(buf, Metadata.codec()) + export const decode = (buf: Uint8Array | Uint8ArrayList): Tag => { + return decodeMessage(buf, Tag.codec()) } } diff --git a/src/pb/tags.proto b/src/pb/tags.proto deleted file mode 100644 index 90e172f..0000000 --- a/src/pb/tags.proto +++ /dev/null @@ -1,11 +0,0 @@ -syntax = "proto3"; - -message Tags { - repeated Tag tags = 1; -} - -message Tag { - string name = 1; // e.g. 'priority' - optional uint32 value = 2; // tag value 0-100 - optional uint64 expiry = 3; // ms timestamp after which the tag is no longer valid -} diff --git a/src/pb/tags.ts b/src/pb/tags.ts deleted file mode 100644 index 0d68422..0000000 --- a/src/pb/tags.ts +++ /dev/null @@ -1,145 +0,0 @@ -/* eslint-disable import/export */ -/* eslint-disable complexity */ -/* eslint-disable @typescript-eslint/no-namespace */ -/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */ -/* eslint-disable @typescript-eslint/no-empty-interface */ - -import { encodeMessage, decodeMessage, message } from 'protons-runtime' -import type { Codec } from 'protons-runtime' -import type { Uint8ArrayList } from 'uint8arraylist' - -export interface Tags { - tags: Tag[] -} - -export namespace Tags { - let _codec: Codec - - export const codec = (): Codec => { - if (_codec == null) { - _codec = message((obj, w, opts = {}) => { - if (opts.lengthDelimited !== false) { - w.fork() - } - - if (obj.tags != null) { - for (const value of obj.tags) { - w.uint32(10) - Tag.codec().encode(value, w) - } - } - - if (opts.lengthDelimited !== false) { - w.ldelim() - } - }, (reader, length) => { - const obj: any = { - tags: [] - } - - const end = length == null ? reader.len : reader.pos + length - - while (reader.pos < end) { - const tag = reader.uint32() - - switch (tag >>> 3) { - case 1: - obj.tags.push(Tag.codec().decode(reader, reader.uint32())) - break - default: - reader.skipType(tag & 7) - break - } - } - - return obj - }) - } - - return _codec - } - - export const encode = (obj: Partial): Uint8Array => { - return encodeMessage(obj, Tags.codec()) - } - - export const decode = (buf: Uint8Array | Uint8ArrayList): Tags => { - return decodeMessage(buf, Tags.codec()) - } -} - -export interface Tag { - name: string - value?: number - expiry?: bigint -} - -export namespace Tag { - let _codec: Codec - - export const codec = (): Codec => { - if (_codec == null) { - _codec = message((obj, w, opts = {}) => { - if (opts.lengthDelimited !== false) { - w.fork() - } - - if ((obj.name != null && obj.name !== '')) { - w.uint32(10) - w.string(obj.name) - } - - if (obj.value != null) { - w.uint32(16) - w.uint32(obj.value) - } - - if (obj.expiry != null) { - w.uint32(24) - w.uint64(obj.expiry) - } - - if (opts.lengthDelimited !== false) { - w.ldelim() - } - }, (reader, length) => { - const obj: any = { - name: '' - } - - const end = length == null ? reader.len : reader.pos + length - - while (reader.pos < end) { - const tag = reader.uint32() - - switch (tag >>> 3) { - case 1: - obj.name = reader.string() - break - case 2: - obj.value = reader.uint32() - break - case 3: - obj.expiry = reader.uint64() - break - default: - reader.skipType(tag & 7) - break - } - } - - return obj - }) - } - - return _codec - } - - export const encode = (obj: Partial): Uint8Array => { - return encodeMessage(obj, Tag.codec()) - } - - export const decode = (buf: Uint8Array | Uint8ArrayList): Tag => { - return decodeMessage(buf, Tag.codec()) - } -} diff --git a/src/proto-book.ts b/src/proto-book.ts deleted file mode 100644 index 9479e7e..0000000 --- a/src/proto-book.ts +++ /dev/null @@ -1,234 +0,0 @@ -import { logger } from '@libp2p/logger' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { peerIdFromPeerId } from '@libp2p/peer-id' -import { CustomEvent } from '@libp2p/interfaces/events' -import type { Store } from './store.js' -import type { Peer, PeerProtocolsChangeData, PeerStore, ProtoBook } from '@libp2p/interface-peer-store' -import type { PeerId } from '@libp2p/interface-peer-id' - -const log = logger('libp2p:peer-store:proto-book') - -const EVENT_NAME = 'change:protocols' - -export class PeerStoreProtoBook implements ProtoBook { - private readonly dispatchEvent: PeerStore['dispatchEvent'] - private readonly store: Store - - /** - * The ProtoBook is responsible for keeping the known supported - * protocols of a peer - */ - constructor (dispatchEvent: PeerStore['dispatchEvent'], store: Store) { - this.dispatchEvent = dispatchEvent - this.store = store - } - - async get (peerId: PeerId): Promise { - log.trace('get wait for read lock') - const release = await this.store.lock.readLock() - log.trace('get got read lock') - - try { - const peer = await this.store.load(peerId) - - return peer.protocols - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } finally { - log.trace('get release read lock') - release() - } - - return [] - } - - async set (peerId: PeerId, protocols: string[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(protocols)) { - log.error('protocols must be provided to store data') - throw new CodeError('protocols must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('set await write lock') - const release = await this.store.lock.writeLock() - log.trace('set got write lock') - - let peer - let updatedPeer - - try { - try { - peer = await this.store.load(peerId) - - if (new Set([ - ...protocols - ]).size === peer.protocols.length) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.patchOrCreate(peerId, { - protocols - }) - - log('stored provided protocols for %p', peerId) - } finally { - log.trace('set release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: updatedPeer.protocols, - oldProtocols: peer == null ? [] : peer.protocols - } - })) - } - - async add (peerId: PeerId, protocols: string[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(protocols)) { - log.error('protocols must be provided to store data') - throw new CodeError('protocols must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('add await write lock') - const release = await this.store.lock.writeLock() - log.trace('add got write lock') - - let peer: Peer | undefined - let updatedPeer - - try { - try { - peer = await this.store.load(peerId) - - if (new Set([ - ...peer.protocols, - ...protocols - ]).size === peer.protocols.length) { - return - } - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.mergeOrCreate(peerId, { - protocols - }) - - log('added provided protocols for %p', peerId) - } finally { - log.trace('add release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: updatedPeer.protocols, - oldProtocols: peer == null ? [] : peer.protocols - } - })) - } - - async remove (peerId: PeerId, protocols: string[]): Promise { - peerId = peerIdFromPeerId(peerId) - - if (!Array.isArray(protocols)) { - log.error('protocols must be provided to store data') - throw new CodeError('protocols must be provided', codes.ERR_INVALID_PARAMETERS) - } - - log.trace('remove await write lock') - const release = await this.store.lock.writeLock() - log.trace('remove got write lock') - - let peer: Peer | undefined - let updatedPeer: Peer - - try { - try { - peer = await this.store.load(peerId) - const protocolSet = new Set(peer.protocols) - - for (const protocol of protocols) { - protocolSet.delete(protocol) - } - - if (peer.protocols.length === protocolSet.size) { - return - } - - protocols = Array.from(protocolSet) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - updatedPeer = await this.store.patchOrCreate(peerId, { - protocols - }) - } finally { - log.trace('remove release write lock') - release() - } - - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: updatedPeer.protocols, - oldProtocols: peer == null ? [] : peer.protocols - } - })) - } - - async delete (peerId: PeerId): Promise { - peerId = peerIdFromPeerId(peerId) - - log.trace('delete await write lock') - const release = await this.store.lock.writeLock() - log.trace('delete got write lock') - let peer: Peer | undefined - - try { - try { - peer = await this.store.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - } - - await this.store.patchOrCreate(peerId, { - protocols: [] - }) - } finally { - log.trace('delete release write lock') - release() - } - - if (peer != null) { - this.dispatchEvent(new CustomEvent(EVENT_NAME, { - detail: { - peerId, - protocols: [], - oldProtocols: peer.protocols - } - })) - } - } -} diff --git a/src/store.ts b/src/store.ts index 9108fb9..0f658e0 100644 --- a/src/store.ts +++ b/src/store.ts @@ -1,241 +1,154 @@ -import { logger } from '@libp2p/logger' import { peerIdFromBytes } from '@libp2p/peer-id' -import { CodeError } from '@libp2p/interfaces/errors' -import { codes } from './errors.js' -import { Key } from 'interface-datastore/key' import { base32 } from 'multiformats/bases/base32' -import { multiaddr } from '@multiformats/multiaddr' -import { Metadata, Peer as PeerPB } from './pb/peer.js' -import mortice from 'mortice' -import { equals as uint8arrayEquals } from 'uint8arrays/equals' -import type { Peer } from '@libp2p/interface-peer-store' +import { Peer as PeerPB } from './pb/peer.js' +import type { Peer, PeerData } from '@libp2p/interface-peer-store' import type { PeerId } from '@libp2p/interface-peer-id' -import type { PersistentPeerStoreComponents } from './index.js' - -const log = logger('libp2p:peer-store:store') - -const NAMESPACE_COMMON = '/peers/' - -export interface Store { - has: (peerId: PeerId) => Promise - save: (peer: Peer) => Promise - load: (peerId: PeerId) => Promise - delete: (peerId: PeerId) => Promise - merge: (peerId: PeerId, data: Partial) => Promise - mergeOrCreate: (peerId: PeerId, data: Partial) => Promise - patch: (peerId: PeerId, data: Partial) => Promise - patchOrCreate: (peerId: PeerId, data: Partial) => Promise - all: () => AsyncIterable - - lock: { - readLock: () => Promise<() => void> - writeLock: () => Promise<() => void> - } +import type { AddressFilter, PersistentPeerStoreComponents, PersistentPeerStoreInit } from './index.js' +import { equals as uint8ArrayEquals } from 'uint8arrays/equals' +import { NAMESPACE_COMMON, peerIdToDatastoreKey } from './utils/peer-id-to-datastore-key.js' +import { bytesToPeer } from './utils/bytes-to-peer.js' +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from './errors.js' +import type { Datastore } from 'interface-datastore' +import type { PeerUpdate as PeerUpdateExternal } from '@libp2p/interface-libp2p' +import mortice, { Mortice } from 'mortice' +import { toPeerPB } from './utils/to-peer-pb.js' + +/** + * Event detail emitted when peer data changes + */ +export interface PeerUpdate extends PeerUpdateExternal { + updated: boolean } export class PersistentStore { - private readonly components: PersistentPeerStoreComponents - public lock: any - - constructor (components: PersistentPeerStoreComponents) { - this.components = components + private readonly peerId: PeerId + private readonly datastore: Datastore + public readonly lock: Mortice + private readonly addressFilter?: AddressFilter + + constructor (components: PersistentPeerStoreComponents, init: PersistentPeerStoreInit = {}) { + this.peerId = components.peerId + this.datastore = components.datastore + this.addressFilter = init.addressFilter this.lock = mortice({ name: 'peer-store', singleProcess: true }) } - _peerIdToDatastoreKey (peerId: PeerId): Key { - if (peerId.type == null) { - log.error('peerId must be an instance of peer-id to store data') - throw new CodeError('peerId must be an instance of peer-id', codes.ERR_INVALID_PARAMETERS) - } - - const b32key = peerId.toCID().toString() - return new Key(`${NAMESPACE_COMMON}${b32key}`) - } - async has (peerId: PeerId): Promise { - return await this.components.datastore.has(this._peerIdToDatastoreKey(peerId)) + return await this.datastore.has(peerIdToDatastoreKey(peerId)) } async delete (peerId: PeerId): Promise { - await this.components.datastore.delete(this._peerIdToDatastoreKey(peerId)) + if (this.peerId.equals(peerId)) { + throw new CodeError('Cannot delete self peer', codes.ERR_INVALID_PARAMETERS) + } + + await this.datastore.delete(peerIdToDatastoreKey(peerId)) } async load (peerId: PeerId): Promise { - const buf = await this.components.datastore.get(this._peerIdToDatastoreKey(peerId)) - const peer = PeerPB.decode(buf) - const metadata = new Map() + const buf = await this.datastore.get(peerIdToDatastoreKey(peerId)) - for (const meta of peer.metadata) { - metadata.set(meta.key, meta.value) - } - - return { - ...peer, - id: peerId, - addresses: peer.addresses.map(({ multiaddr: ma, isCertified }) => { - return { - multiaddr: multiaddr(ma), - isCertified: isCertified ?? false - } - }), - metadata, - pubKey: peer.pubKey ?? undefined, - peerRecordEnvelope: peer.peerRecordEnvelope ?? undefined - } + return await bytesToPeer(peerId, buf) } - async save (peer: Peer): Promise { - if (peer.pubKey != null && peer.id.publicKey != null && !uint8arrayEquals(peer.pubKey, peer.id.publicKey)) { - log.error('peer publicKey bytes do not match peer id publicKey bytes') - throw new CodeError('publicKey bytes do not match peer id publicKey bytes', codes.ERR_INVALID_PARAMETERS) - } - - // dedupe addresses - const addressSet = new Set() - const addresses = peer.addresses - .filter(address => { - if (addressSet.has(address.multiaddr.toString())) { - return false - } - - addressSet.add(address.multiaddr.toString()) - return true - }) - .sort((a, b) => { - return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) - }) - .map(({ multiaddr, isCertified }) => ({ - multiaddr: multiaddr.bytes, - isCertified - })) - - const metadata: Metadata[] = [] - - ;[...peer.metadata.keys()].sort().forEach(key => { - const value = peer.metadata.get(key) - - if (value != null) { - metadata.push({ key, value }) - } - }) + async save (peerId: PeerId, data: PeerData): Promise { + const { + existingBuf, + existingPeer + } = await this.#findExistingPeer(peerId) - const buf = PeerPB.encode({ - addresses, - protocols: peer.protocols.sort(), - pubKey: peer.pubKey, - metadata, - peerRecordEnvelope: peer.peerRecordEnvelope + const peerPb: PeerPB = await toPeerPB(peerId, data, 'patch', { + addressFilter: this.addressFilter }) - await this.components.datastore.put(this._peerIdToDatastoreKey(peer.id), buf.subarray()) - - return await this.load(peer.id) + return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } - async patch (peerId: PeerId, data: Partial): Promise { - const peer = await this.load(peerId) + async patch (peerId: PeerId, data: Partial): Promise { + const { + existingBuf, + existingPeer + } = await this.#findExistingPeer(peerId) + + const peerPb: PeerPB = await toPeerPB(peerId, data, 'patch', { + addressFilter: this.addressFilter, + existingPeer + }) - return await this._patch(peerId, data, peer) + return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } - async patchOrCreate (peerId: PeerId, data: Partial): Promise { - let peer: Peer + async merge (peerId: PeerId, data: PeerData): Promise { + const { + existingBuf, + existingPeer + } = await this.#findExistingPeer(peerId) - try { - peer = await this.load(peerId) - } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { - throw err - } - - peer = { id: peerId, addresses: [], protocols: [], metadata: new Map() } - } + const peerPb: PeerPB = await toPeerPB(peerId, data, 'merge', { + addressFilter: this.addressFilter, + existingPeer + }) - return await this._patch(peerId, data, peer) + return await this.#saveIfDifferent(peerId, peerPb, existingBuf, existingPeer) } - async _patch (peerId: PeerId, data: Partial, peer: Peer): Promise { - return await this.save({ - ...peer, - ...data, - id: peerId - }) - } + async * all (): AsyncGenerator { + for await (const { key, value } of this.datastore.query({ + prefix: NAMESPACE_COMMON + })) { + // /peers/${peer-id-as-libp2p-key-cid-string-in-base-32} + const base32Str = key.toString().split('/')[2] + const buf = base32.decode(base32Str) + const peerId = peerIdFromBytes(buf) - async merge (peerId: PeerId, data: Partial): Promise { - const peer = await this.load(peerId) + if (peerId.equals(this.peerId)) { + // Skip self peer if present + continue + } - return await this._merge(peerId, data, peer) + yield bytesToPeer(peerId, value) + } } - async mergeOrCreate (peerId: PeerId, data: Partial): Promise { - /** @type {Peer} */ - let peer - + async #findExistingPeer (peerId: PeerId): Promise<{ existingBuf?: Uint8Array, existingPeer?: Peer }> { try { - peer = await this.load(peerId) + const existingBuf = await this.datastore.get(peerIdToDatastoreKey(peerId)) + const existingPeer = await bytesToPeer(peerId, existingBuf) + + return { + existingBuf, + existingPeer + } } catch (err: any) { - if (err.code !== codes.ERR_NOT_FOUND) { + if (err.code !== 'ERR_NOT_FOUND') { throw err } - - peer = { id: peerId, addresses: [], protocols: [], metadata: new Map() } } - return await this._merge(peerId, data, peer) + return {} } - async _merge (peerId: PeerId, data: Partial, peer: Peer): Promise { - // if the peer has certified addresses, use those in - // favour of the supplied versions - const addresses = new Map() - - peer.addresses.forEach((addr) => { - addresses.set(addr.multiaddr.toString(), addr.isCertified) - }) - - ;(data.addresses ?? []).forEach(addr => { - const addrString = addr.multiaddr.toString() - const isAlreadyCertified = Boolean(addresses.get(addrString)) - - const isCertified = isAlreadyCertified || addr.isCertified + async #saveIfDifferent (peerId: PeerId, peer: PeerPB, existingBuf?: Uint8Array, existingPeer?: Peer): Promise { + const buf = PeerPB.encode(peer) - addresses.set(addrString, isCertified) - }) + if (existingBuf != null && uint8ArrayEquals(buf, existingBuf)) { + return { + peer: await bytesToPeer(peerId, buf), + previous: existingPeer, + updated: false + } + } - return await this.save({ - id: peerId, - addresses: Array.from(addresses.entries()).map(([addrStr, isCertified]) => { - return { - multiaddr: multiaddr(addrStr), - isCertified - } - }), - protocols: Array.from(new Set([ - ...(peer.protocols ?? []), - ...(data.protocols ?? []) - ])), - metadata: new Map([ - ...(peer.metadata?.entries() ?? []), - ...(data.metadata?.entries() ?? []) - ]), - pubKey: data.pubKey ?? (peer != null ? peer.pubKey : undefined), - peerRecordEnvelope: data.peerRecordEnvelope ?? (peer != null ? peer.peerRecordEnvelope : undefined) - }) - } + await this.datastore.put(peerIdToDatastoreKey(peerId), buf) - async * all (): AsyncGenerator { - for await (const key of this.components.datastore.queryKeys({ - prefix: NAMESPACE_COMMON - })) { - // /peers/${peer-id-as-libp2p-key-cid-string-in-base-32} - const base32Str = key.toString().split('/')[2] - const buf = base32.decode(base32Str) - - yield this.load(peerIdFromBytes(buf)) + return { + peer: await bytesToPeer(peerId, buf), + previous: existingPeer, + updated: true } } } diff --git a/src/utils/bytes-to-peer.ts b/src/utils/bytes-to-peer.ts new file mode 100644 index 0000000..06021a6 --- /dev/null +++ b/src/utils/bytes-to-peer.ts @@ -0,0 +1,41 @@ +import { multiaddr } from '@multiformats/multiaddr' +import { Peer as PeerPB } from '../pb/peer.js' +import type { Peer, Tag } from '@libp2p/interface-peer-store' +import { createFromPubKey } from '@libp2p/peer-id-factory' +import { unmarshalPublicKey } from '@libp2p/crypto/keys' +import type { PeerId } from '@libp2p/interface-peer-id' + +export async function bytesToPeer (peerId: PeerId, buf: Uint8Array): Promise { + const peer = PeerPB.decode(buf) + + if (peer.publicKey != null && peerId.publicKey == null) { + peerId = await createFromPubKey(unmarshalPublicKey(peer.publicKey)) + } + + const tags = new Map() + + // remove any expired tags + const now = BigInt(Date.now()) + + for (const [key, tag] of peer.tags.entries()) { + if (tag.expiry != null && tag.expiry < now) { + continue + } + + tags.set(key, tag) + } + + return { + ...peer, + id: peerId, + addresses: peer.addresses.map(({ multiaddr: ma, isCertified }) => { + return { + multiaddr: multiaddr(ma), + isCertified: isCertified ?? false + } + }), + metadata: peer.metadata, + peerRecordEnvelope: peer.peerRecordEnvelope ?? undefined, + tags + } +} diff --git a/src/utils/dedupe-addresses.ts b/src/utils/dedupe-addresses.ts new file mode 100644 index 0000000..f39cd8c --- /dev/null +++ b/src/utils/dedupe-addresses.ts @@ -0,0 +1,51 @@ +import { isMultiaddr, multiaddr } from '@multiformats/multiaddr' +import type { Address as AddressPB } from '../pb/peer.js' +import type { Address } from '@libp2p/interface-peer-store' +import type { AddressFilter } from '../index.js' +import type { PeerId } from '@libp2p/interface-peer-id' +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' + +export async function dedupeFilterAndSortAddresses (peerId: PeerId, filter: AddressFilter, addresses: Array
): Promise { + const addressMap = new Map() + + for (const addr of addresses) { + if (addr == null) { + continue + } + + if (addr.multiaddr instanceof Uint8Array) { + addr.multiaddr = multiaddr(addr.multiaddr) + } + + if (!isMultiaddr(addr.multiaddr)) { + throw new CodeError('Multiaddr was invalid', codes.ERR_INVALID_PARAMETERS) + } + + if (!(await filter(peerId, addr.multiaddr))) { + continue + } + + const isCertified = addr.isCertified ?? false + const maStr = addr.multiaddr.toString() + const existingAddr = addressMap.get(maStr) + + if (existingAddr != null) { + addr.isCertified = existingAddr.isCertified || isCertified + } else { + addressMap.set(maStr, { + multiaddr: addr.multiaddr, + isCertified + }) + } + } + + return [...addressMap.values()] + .sort((a, b) => { + return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) + }) + .map(({ isCertified, multiaddr }) => ({ + isCertified, + multiaddr: multiaddr.bytes + })) +} diff --git a/src/utils/peer-data-to-datastore-peer.ts b/src/utils/peer-data-to-datastore-peer.ts new file mode 100644 index 0000000..5603518 --- /dev/null +++ b/src/utils/peer-data-to-datastore-peer.ts @@ -0,0 +1,116 @@ + +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' +import { isMultiaddr } from '@multiformats/multiaddr' +import type { Peer as PeerPB } from '../pb/peer.js' +import { equals as uint8arrayEquals } from 'uint8arrays/equals' +import type { PeerData } from '@libp2p/interface-peer-store' +import type { PeerId } from '@libp2p/interface-peer-id' + +export function toDatastorePeer (peerId: PeerId, data: PeerData): PeerPB { + if (data == null) { + throw new CodeError('Invalid PeerData', codes.ERR_INVALID_PARAMETERS) + } + + if (data.publicKey != null && peerId.publicKey != null && !uint8arrayEquals(data.publicKey, peerId.publicKey)) { + throw new CodeError('publicKey bytes do not match peer id publicKey bytes', codes.ERR_INVALID_PARAMETERS) + } + + // merge addresses and multiaddrs, and dedupe + const addressSet = new Set() + + const output: PeerPB = { + addresses: (data.addresses ?? []) + .concat((data.multiaddrs ?? []).map(multiaddr => ({ multiaddr, isCertified: false }))) + .filter(address => { + if (!isMultiaddr(address.multiaddr)) { + throw new CodeError('Invalid mulitaddr', codes.ERR_INVALID_PARAMETERS) + } + + if (addressSet.has(address.multiaddr.toString())) { + return false + } + + addressSet.add(address.multiaddr.toString()) + return true + }) + .sort((a, b) => { + return a.multiaddr.toString().localeCompare(b.multiaddr.toString()) + }) + .map(({ multiaddr, isCertified }) => ({ + multiaddr: multiaddr.bytes, + isCertified + })), + protocols: (data.protocols ?? []).sort(), + metadata: new Map(), + tags: new Map(), + publicKey: data.publicKey, + peerRecordEnvelope: data.peerRecordEnvelope + } + + // remove invalid metadata + if (data.metadata != null) { + const metadataEntries = data.metadata instanceof Map ? data.metadata.entries() : Object.entries(data.metadata) + + for (const [key, value] of metadataEntries) { + if (typeof key !== 'string') { + throw new CodeError('Peer metadata keys must be strings', codes.ERR_INVALID_PARAMETERS) + } + + if (value == null) { + continue + } + + if (!(value instanceof Uint8Array)) { + throw new CodeError('Peer metadata values must be Uint8Arrays', codes.ERR_INVALID_PARAMETERS) + } + + output.metadata.set(key, value) + } + } + + if (data.tags != null) { + const tagsEntries = data.tags instanceof Map ? data.tags.entries() : Object.entries(data.tags) + + for (const [key, value] of tagsEntries) { + if (typeof key !== 'string') { + throw new CodeError('Peer tag keys must be strings', codes.ERR_INVALID_PARAMETERS) + } + + if (value == null) { + continue + } + + const tag = { + name: key, + ttl: value.ttl, + value: value.value ?? 0 + } + + if (tag.value < 0 || tag.value > 100) { + throw new CodeError('Tag value must be between 0-100', codes.ERR_INVALID_PARAMETERS) + } + + if (parseInt(`${tag.value}`, 10) !== tag.value) { + throw new CodeError('Tag value must be an integer', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.ttl != null) { + if (tag.ttl < 0) { + throw new CodeError('Tag ttl must be between greater than 0', codes.ERR_INVALID_PARAMETERS) + } + + if (parseInt(`${tag.ttl}`, 10) !== tag.ttl) { + throw new CodeError('Tag ttl must be an integer', codes.ERR_INVALID_PARAMETERS) + } + } + + output.tags.set(tag.name, { + value: tag.value, + expiry: tag.ttl == null ? undefined : BigInt(Date.now() + tag.ttl) + }) + } + } + + return output +} diff --git a/src/utils/peer-id-to-datastore-key.ts b/src/utils/peer-id-to-datastore-key.ts new file mode 100644 index 0000000..2f577dd --- /dev/null +++ b/src/utils/peer-id-to-datastore-key.ts @@ -0,0 +1,15 @@ +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' +import { Key } from 'interface-datastore/key' +import { isPeerId, PeerId } from '@libp2p/interface-peer-id' + +export const NAMESPACE_COMMON = '/peers/' + +export function peerIdToDatastoreKey (peerId: PeerId): Key { + if (!isPeerId(peerId) || peerId.type == null) { + throw new CodeError('Invalid PeerId', codes.ERR_INVALID_PARAMETERS) + } + + const b32key = peerId.toCID().toString() + return new Key(`${NAMESPACE_COMMON}${b32key}`) +} diff --git a/src/utils/to-peer-pb.ts b/src/utils/to-peer-pb.ts new file mode 100644 index 0000000..b8abd9c --- /dev/null +++ b/src/utils/to-peer-pb.ts @@ -0,0 +1,237 @@ +import type { PeerId } from '@libp2p/interface-peer-id' +import type { Address, Peer, PeerData, TagOptions } from '@libp2p/interface-peer-store' +import { CodeError } from '@libp2p/interfaces/errors' +import { codes } from '../errors.js' +import { equals as uint8arrayEquals } from 'uint8arrays/equals' +import type { AddressFilter } from '../index.js' +import type { Tag, Peer as PeerPB } from '../pb/peer.js' +import { dedupeFilterAndSortAddresses } from './dedupe-addresses.js' + +export interface ToPBPeerOptions { + addressFilter?: AddressFilter + existingPeer?: Peer +} + +export async function toPeerPB (peerId: PeerId, data: Partial, strategy: 'merge' | 'patch', options: ToPBPeerOptions): Promise { + if (data == null) { + throw new CodeError('Invalid PeerData', codes.ERR_INVALID_PARAMETERS) + } + + if (data.publicKey != null && peerId.publicKey != null && !uint8arrayEquals(data.publicKey, peerId.publicKey)) { + throw new CodeError('publicKey bytes do not match peer id publicKey bytes', codes.ERR_INVALID_PARAMETERS) + } + + const existingPeer = options.existingPeer + + if (existingPeer != null && !peerId.equals(existingPeer.id)) { + throw new CodeError('peer id did not match existing peer id', codes.ERR_INVALID_PARAMETERS) + } + + let addresses: Address[] = existingPeer?.addresses ?? [] + let protocols: Set = new Set(existingPeer?.protocols ?? []) + let metadata: Map = existingPeer?.metadata ?? new Map() + let tags: Map = existingPeer?.tags ?? new Map() + let peerRecordEnvelope: Uint8Array | undefined = existingPeer?.peerRecordEnvelope + + // when patching, we replace the original fields with passed values + if (strategy === 'patch') { + if (data.multiaddrs != null || data.addresses != null) { + addresses = [] + + if (data.multiaddrs != null) { + addresses.push(...data.multiaddrs.map(multiaddr => ({ + isCertified: false, + multiaddr + }))) + } + + if (data.addresses != null) { + addresses.push(...data.addresses) + } + } + + if (data.protocols != null) { + protocols = new Set(data.protocols) + } + + if (data.metadata != null) { + const metadataEntries = data.metadata instanceof Map ? [...data.metadata.entries()] : Object.entries(data.metadata) + + metadata = createSortedMap(metadataEntries, { + validate: validateMetadata + }) + } + + if (data.tags != null) { + const tagsEntries = data.tags instanceof Map ? [...data.tags.entries()] : Object.entries(data.tags) + + tags = createSortedMap(tagsEntries, { + validate: validateTag, + map: mapTag + }) + } + + if (data.peerRecordEnvelope != null) { + peerRecordEnvelope = data.peerRecordEnvelope + } + } + + // when merging, we join the original fields with passed values + if (strategy === 'merge') { + if (data.multiaddrs != null) { + addresses.push(...data.multiaddrs.map(multiaddr => ({ + isCertified: false, + multiaddr + }))) + } + + if (data.addresses != null) { + addresses.push(...data.addresses) + } + + if (data.protocols != null) { + protocols = new Set([...protocols, ...data.protocols]) + } + + if (data.metadata != null) { + const metadataEntries = data.metadata instanceof Map ? [...data.metadata.entries()] : Object.entries(data.metadata) + + for (const [key, value] of metadataEntries) { + if (value == null) { + metadata.delete(key) + } else { + metadata.set(key, value) + } + } + + metadata = createSortedMap([...metadata.entries()], { + validate: validateMetadata + }) + } + + if (data.tags != null) { + const tagsEntries = data.tags instanceof Map ? [...data.tags.entries()] : Object.entries(data.tags) + const mergedTags: Map = new Map(tags) + + for (const [key, value] of tagsEntries) { + if (value == null) { + mergedTags.delete(key) + } else { + mergedTags.set(key, value) + } + } + + tags = createSortedMap([...mergedTags.entries()], { + validate: validateTag, + map: mapTag + }) + } + + if (data.peerRecordEnvelope != null) { + peerRecordEnvelope = data.peerRecordEnvelope + } + } + + const output: PeerPB = { + addresses: await dedupeFilterAndSortAddresses(peerId, options.addressFilter ?? (async () => true), addresses), + protocols: [...protocols.values()].sort((a, b) => { + return a.localeCompare(b) + }), + metadata, + tags, + + publicKey: existingPeer?.id.publicKey ?? data.publicKey ?? peerId.publicKey, + peerRecordEnvelope + } + + // Ed25519 and secp256k1 have their public key embedded in them so no need to duplicate it + if (peerId.type !== 'RSA') { + delete output.publicKey + } + + return output +} + +interface CreateSortedMapOptions { + validate: (key: string, value: V) => void + map?: (key: string, value: V) => R +} + +/** + * In JS maps are ordered by insertion order so create a new map with the + * keys inserted in alphabetical order. + */ +function createSortedMap (entries: Array<[string, V | undefined]>, options: CreateSortedMapOptions): Map { + const output = new Map() + + for (const [key, value] of entries) { + if (value == null) { + continue + } + + options.validate(key, value) + } + + for (const [key, value] of entries.sort(([a], [b]) => { + return a.localeCompare(b) + })) { + if (value != null) { + output.set(key, options.map?.(key, value) ?? value) + } + } + + return output +} + +function validateMetadata (key: string, value: Uint8Array): void { + if (typeof key !== 'string') { + throw new CodeError('Metadata key must be a string', codes.ERR_INVALID_PARAMETERS) + } + + if (!(value instanceof Uint8Array)) { + throw new CodeError('Metadata value must be a Uint8Array', codes.ERR_INVALID_PARAMETERS) + } +} + +function validateTag (key: string, tag: TagOptions): void { + if (typeof key !== 'string') { + throw new CodeError('Tag name must be a string', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.value != null) { + if (parseInt(`${tag.value}`, 10) !== tag.value) { + throw new CodeError('Tag value must be an integer', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.value < 0 || tag.value > 100) { + throw new CodeError('Tag value must be between 0-100', codes.ERR_INVALID_PARAMETERS) + } + } + + if (tag.ttl != null) { + if (parseInt(`${tag.ttl}`, 10) !== tag.ttl) { + throw new CodeError('Tag ttl must be an integer', codes.ERR_INVALID_PARAMETERS) + } + + if (tag.ttl < 0) { + throw new CodeError('Tag ttl must be between greater than 0', codes.ERR_INVALID_PARAMETERS) + } + } +} + +function mapTag (key: string, tag: any): Tag { + let expiry: bigint | undefined + + if (tag.expiry != null) { + expiry = tag.expiry + } + + if (tag.ttl != null) { + expiry = BigInt(Date.now() + Number(tag.ttl)) + } + + return { + value: tag.value ?? 0, + expiry + } +} diff --git a/test/address-book.spec.ts b/test/address-book.spec.ts deleted file mode 100644 index 35a7b25..0000000 --- a/test/address-book.spec.ts +++ /dev/null @@ -1,689 +0,0 @@ -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ["error", 6] */ - -import { expect } from 'aegir/chai' -import { multiaddr } from '@multiformats/multiaddr' -import { arrayEquals } from '@libp2p/utils/array-equals' -import type { PeerId } from '@libp2p/interface-peer-id' -import pDefer from 'p-defer' -import { MemoryDatastore } from 'datastore-core/memory' -import { PersistentPeerStore } from '../src/index.js' -import { RecordEnvelope, PeerRecord } from '@libp2p/peer-record' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { AddressBook } from '@libp2p/interface-peer-store' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' - -const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') -const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') -const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') - -describe('addressBook', () => { - let peerId: PeerId - - before(async () => { - peerId = await createEd25519PeerId() - }) - - describe('addressBook.set', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.set('invalid peerId') - } catch (err: any) { - expect(err).to.have.property('code', codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if no addresses provided', async () => { - try { - // @ts-expect-error invalid input - await ab.set(peerId) - } catch (err: any) { - expect(err).to.have.property('code', codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no addresses should throw error') - }) - - it('throws invalid parameters error if invalid multiaddrs are provided', async () => { - try { - // @ts-expect-error invalid input - await ab.set(peerId, ['invalid multiaddr']) - } catch (err: any) { - expect(err).to.have.property('code', codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid multiaddrs should throw error') - }) - - it('replaces the stored content by default and emit change event', async () => { - const defer = pDefer() - const supportedMultiaddrs = [addr1, addr2] - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(supportedMultiaddrs) - defer.resolve() - }, { - once: true - }) - - await ab.set(peerId, supportedMultiaddrs) - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) - - return await defer.promise - }) - - it('emits on set if not storing the exact same content', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1, addr2] - const supportedMultiaddrsB = [addr2] - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrsA) - - // set 2 (same content) - await ab.set(peerId, supportedMultiaddrsB) - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrsB) - - await defer.promise - }) - - it('does not emit on set if it is storing the exact same content', async () => { - const defer = pDefer() - - const supportedMultiaddrs = [addr1, addr2] - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrs) - - // set 2 (same content) - await ab.set(peerId, supportedMultiaddrs) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - }) - - describe('addressBook.add', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.add('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if no addresses provided', async () => { - try { - // @ts-expect-error invalid input - await ab.add(peerId) - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no addresses provided should throw error') - }) - - it('throws invalid parameters error if invalid multiaddrs are provided', async () => { - try { - // @ts-expect-error invalid input - await ab.add(peerId, ['invalid multiaddr']) - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid multiaddr should throw error') - }) - - it('does not emit event if no addresses are added', async () => { - const defer = pDefer() - - peerStore.addEventListener('peer', () => { - defer.reject() - }) - - await ab.add(peerId, []) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - - it('emits peer event if new peer has addresses added', async () => { - const defer = pDefer() - - peerStore.addEventListener('peer', () => { - defer.resolve() - }) - - await ab.add(await createEd25519PeerId(), [ - multiaddr('/ip4/42.14.53.21/tcp/3981') - ]) - await defer.promise - }) - - it('emits peer event if new peer has addresses set', async () => { - const defer = pDefer() - - peerStore.addEventListener('peer', () => { - defer.resolve() - }) - - await ab.set(await createEd25519PeerId(), [ - multiaddr('/ip4/42.14.53.21/tcp/3981') - ]) - await defer.promise - }) - - it('adds the new content and emits change event', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1, addr2] - const supportedMultiaddrsB = [addr3] - const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) - - let changeTrigger = 2 - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { multiaddrs } = evt.detail - changeTrigger-- - if (changeTrigger === 0 && arrayEquals(multiaddrs, finalMultiaddrs)) { - defer.resolve() - } - }) - - // Replace - await ab.set(peerId, supportedMultiaddrsA) - let addresses = await ab.get(peerId) - let multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrsA) - - // Add - await ab.add(peerId, supportedMultiaddrsB) - addresses = await ab.get(peerId) - multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(finalMultiaddrs) - - return await defer.promise - }) - - it('emits on add if the content to add not exists', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1] - const supportedMultiaddrsB = [addr2] - const finalMultiaddrs = supportedMultiaddrsA.concat(supportedMultiaddrsB) - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrsA) - - // set 2 (content already existing) - await ab.add(peerId, supportedMultiaddrsB) - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(finalMultiaddrs) - - await defer.promise - }) - - it('does not emit on add if the content to add already exists', async () => { - const defer = pDefer() - - const supportedMultiaddrsA = [addr1, addr2] - const supportedMultiaddrsB = [addr2] - - let changeCounter = 0 - peerStore.addEventListener('change:multiaddrs', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await ab.set(peerId, supportedMultiaddrsA) - - // set 2 (content already existing) - await ab.add(peerId, supportedMultiaddrsB) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - - it('does not add replicated content', async () => { - // set 1 - await ab.set(peerId, [addr1, addr1]) - - const addresses = await ab.get(peerId) - expect(addresses).to.have.lengthOf(1) - }) - }) - - describe('addressBook.get', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.get('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('returns empty if no multiaddrs are known for the provided peer', async () => { - const addresses = await ab.get(peerId) - - expect(addresses).to.be.empty() - }) - - it('returns the multiaddrs stored', async () => { - const supportedMultiaddrs = [addr1, addr2] - - await ab.set(peerId, supportedMultiaddrs) - - const addresses = await ab.get(peerId) - const multiaddrs = addresses.map((mi) => mi.multiaddr) - expect(multiaddrs).to.have.deep.members(supportedMultiaddrs) - }) - }) - - describe('addressBook.delete', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await ab.delete('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('does not emit an event if no records exist for the peer', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:multiaddrs', () => { - defer.reject() - }) - - await ab.delete(peerId) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - - it('emits an event if the record exists', async () => { - const defer = pDefer() - - const supportedMultiaddrs = [addr1, addr2] - await ab.set(peerId, supportedMultiaddrs) - - // Listen after set - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { multiaddrs } = evt.detail - expect(multiaddrs.length).to.eql(0) - defer.resolve() - }) - - await ab.delete(peerId) - - return await defer.promise - }) - }) - - describe('certified records', () => { - let peerStore: PersistentPeerStore - let ab: AddressBook - - describe('consumes a valid peer record and stores its data', () => { - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('no previous data in AddressBook', async () => { - const multiaddrs = [addr1, addr2] - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Validate AddressBook addresses - const addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - - it('emits change:multiaddrs event when adding multiaddrs', async () => { - const defer = pDefer() - const multiaddrs = [addr1, addr2] - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - return await defer.promise - }) - - it('emits change:multiaddrs event with same data currently in AddressBook (not certified)', async () => { - const defer = pDefer() - const multiaddrs = [addr1, addr2] - - // Set addressBook data - await ab.set(peerId, multiaddrs) - - // Validate data exists, but not certified - let addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(false) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - - // Create peer record - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Wait event - await defer.promise - - // Validate data exists and certified - addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - - it('emits change:multiaddrs event with previous partial data in AddressBook (not certified)', async () => { - const defer = pDefer() - const multiaddrs = [addr1, addr2] - - // Set addressBook data - await ab.set(peerId, [addr1]) - - // Validate data exists, but not certified - let addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(1) - expect(addrs[0].isCertified).to.eql(false) - expect(addrs[0].multiaddr.equals(addr1)).to.eql(true) - - // Create peer record - const peerRecord = new PeerRecord({ - peerId, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Wait event - await defer.promise - - // Validate data exists and certified - addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrs.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrs[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - - it('with previous different data in AddressBook (not certified)', async () => { - const defer = pDefer() - const multiaddrsUncertified = [addr3] - const multiaddrsCertified = [addr1, addr2] - - // Set addressBook data - await ab.set(peerId, multiaddrsUncertified) - - // Validate data exists, but not certified - let addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrsUncertified.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(false) - expect(multiaddrsUncertified[index].equals(addr.multiaddr)).to.eql(true) - }) - - // Create peer record - const peerRecord = new PeerRecord({ - peerId, - multiaddrs: multiaddrsCertified - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - peerStore.addEventListener('change:multiaddrs', (evt) => { - const { peerId, multiaddrs } = evt.detail - expect(peerId).to.exist() - expect(multiaddrs).to.eql(multiaddrs) - defer.resolve() - }, { - once: true - }) - - // consume peer record - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(true) - - // Wait event - await defer.promise - - // Validate data exists and certified - addrs = await ab.get(peerId) - expect(addrs).to.exist() - expect(addrs).to.have.lengthOf(multiaddrsCertified.length) - addrs.forEach((addr, index) => { - expect(addr.isCertified).to.eql(true) - expect(multiaddrsCertified[index].equals(addr.multiaddr)).to.eql(true) - }) - }) - }) - - describe('fails to consume invalid peer records', () => { - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - ab = peerStore.addressBook - }) - - it('invalid peer record', async () => { - const invalidEnvelope = { - payload: uint8ArrayFromString('invalid-peerRecord') - } - - // @ts-expect-error invalid input - const consumed = await ab.consumePeerRecord(invalidEnvelope) - expect(consumed).to.eql(false) - }) - - it('peer that created the envelope is not the same as the peer record', async () => { - const multiaddrs = [addr1, addr2] - - // Create peer record - const peerId2 = await createEd25519PeerId() - const peerRecord = new PeerRecord({ - peerId: peerId2, - multiaddrs - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(false) - }) - - it('does not store an outdated record', async () => { - const multiaddrs = [addr1, addr2] - const peerRecord1 = new PeerRecord({ - peerId, - multiaddrs, - seqNumber: BigInt(Date.now()) - }) - const peerRecord2 = new PeerRecord({ - peerId, - multiaddrs, - seqNumber: BigInt(Date.now() - 1) - }) - const envelope1 = await RecordEnvelope.seal(peerRecord1, peerId) - const envelope2 = await RecordEnvelope.seal(peerRecord2, peerId) - - // Consume envelope1 (bigger seqNumber) - let consumed = await ab.consumePeerRecord(envelope1) - expect(consumed).to.eql(true) - - consumed = await ab.consumePeerRecord(envelope2) - expect(consumed).to.eql(false) - }) - - it('empty multiaddrs', async () => { - const peerRecord = new PeerRecord({ - peerId, - multiaddrs: [] - }) - const envelope = await RecordEnvelope.seal(peerRecord, peerId) - - const consumed = await ab.consumePeerRecord(envelope) - expect(consumed).to.eql(false) - }) - }) - }) -}) diff --git a/test/index.spec.ts b/test/index.spec.ts new file mode 100644 index 0000000..526c15f --- /dev/null +++ b/test/index.spec.ts @@ -0,0 +1,165 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import delay from 'delay' +import { EventEmitter } from '@libp2p/interfaces/events' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') + +describe('PersistentPeerStore', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + let events: EventEmitter + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) + }) + + it('has an empty map of peers', async () => { + const peers = await peerStore.all() + expect(peers.length).to.equal(0) + }) + + describe('has', () => { + it('has peer data', async () => { + await expect(peerStore.has(otherPeerId)).to.eventually.be.false() + await peerStore.save(otherPeerId, { + multiaddrs: [ + addr1 + ] + }) + await expect(peerStore.has(otherPeerId)).to.eventually.be.true() + }) + }) + + describe('delete', () => { + it('deletes peer data', async () => { + await expect(peerStore.has(otherPeerId)).to.eventually.be.false() + await peerStore.save(otherPeerId, { + multiaddrs: [ + addr1 + ] + }) + await expect(peerStore.has(otherPeerId)).to.eventually.be.true() + await peerStore.delete(otherPeerId) + await expect(peerStore.has(otherPeerId)).to.eventually.be.false() + }) + + it('does not allow deleting the self peer', async () => { + await expect(peerStore.has(peerId)).to.eventually.be.false() + await peerStore.save(peerId, { + multiaddrs: [ + addr1 + ] + }) + + await expect(peerStore.delete(peerId)).to.eventually.be.rejected() + .with.property('code', 'ERR_INVALID_PARAMETERS') + }) + }) + + describe('tags', () => { + it('tags a peer', async () => { + const name = 'a-tag' + const peer = await peerStore.save(otherPeerId, { + tags: { + [name]: {} + } + }) + + expect(peer).to.have.property('tags') + .that.deep.equals(new Map([[name, { value: 0 }]]), 'Peer did not contain tag') + }) + + it('tags a peer with a value', async () => { + const name = 'a-tag' + const value = 50 + const peer = await peerStore.save(peerId, { + tags: { + [name]: { value } + } + }) + + expect(peer).to.have.property('tags') + .that.deep.equals(new Map([[name, { value }]]), 'Peer did not contain tag with a value') + }) + + it('tags a peer with a valid value', async () => { + const name = 'a-tag' + + await expect(peerStore.save(peerId, { + tags: { + [name]: { value: -1 } + } + }), 'PeerStore contain tag for peer where value was too small') + .to.eventually.be.rejected().with.property('code', 'ERR_INVALID_PARAMETERS') + + await expect(peerStore.save(peerId, { + tags: { + [name]: { value: 101 } + } + }), 'PeerStore contain tag for peer where value was too large') + .to.eventually.be.rejected().with.property('code', 'ERR_INVALID_PARAMETERS') + + await expect(peerStore.save(peerId, { + tags: { + [name]: { value: 5.5 } + } + }), 'PeerStore contain tag for peer where value was not an integer') + .to.eventually.be.rejected().with.property('code', 'ERR_INVALID_PARAMETERS') + }) + + it('tags a peer with an expiring value', async () => { + const name = 'a-tag' + const value = 50 + const peer = await peerStore.save(peerId, { + tags: { + [name]: { + value, + ttl: 50 + } + } + }) + + expect(peer).to.have.property('tags') + .that.has.key(name) + + await delay(100) + + const updatedPeer = await peerStore.get(peerId) + + expect(updatedPeer).to.have.property('tags') + .that.does.not.have.key(name) + }) + + it('untags a peer', async () => { + const name = 'a-tag' + const peer = await peerStore.save(peerId, { + tags: { + [name]: {} + } + }) + + expect(peer).to.have.property('tags') + .that.has.key(name) + + const updatedPeer = await peerStore.patch(peerId, { + tags: {} + }) + + expect(updatedPeer).to.have.property('tags') + .that.does.not.have.key(name) + }) + }) +}) diff --git a/test/key-book.spec.ts b/test/key-book.spec.ts deleted file mode 100644 index 1f25a37..0000000 --- a/test/key-book.spec.ts +++ /dev/null @@ -1,129 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import sinon from 'sinon' -import { MemoryDatastore } from 'datastore-core/memory' -import { PersistentPeerStore } from '../src/index.js' -import pDefer from 'p-defer' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { KeyBook } from '@libp2p/interface-peer-store' - -describe('keyBook', () => { - let peerId: PeerId - let peerStore: PersistentPeerStore - let kb: KeyBook - let datastore: MemoryDatastore - - beforeEach(async () => { - peerId = await createEd25519PeerId() - datastore = new MemoryDatastore() - peerStore = new PersistentPeerStore({ peerId, datastore }) - kb = peerStore.keyBook - }) - - it('throws invalid parameters error if invalid PeerId is provided in set', async () => { - try { - // @ts-expect-error invalid input - await kb.set('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if invalid PeerId is provided in get', async () => { - try { - // @ts-expect-error invalid input - await kb.get('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('stores the peerId in the book and returns the public key', async () => { - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await kb.set(peerId, peerId.publicKey) - - // Get public key - const pubKey = await kb.get(peerId) - expect(peerId.publicKey).to.equalBytes(pubKey) - }) - - it('should not store if already stored', async () => { - const spy = sinon.spy(datastore, 'put') - const peer = await createEd25519PeerId() - - if (peer.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await kb.set(peer, peer.publicKey) - await kb.set(peer, peer.publicKey) - - expect(spy).to.have.property('callCount', 1) - }) - - it('should emit an event when setting a key', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:pubkey', (evt) => { - const { peerId: id, publicKey } = evt.detail - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - expect(id.toString()).to.equal(peerId.toString()) - expect(publicKey).to.equalBytes(peerId.publicKey) - defer.resolve() - }) - - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await kb.set(peerId, peerId.publicKey) - await defer.promise - }) - - it('should not set when key does not match', async () => { - const edKey = await createEd25519PeerId() - - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - // Set PeerId - await expect(kb.set(edKey, peerId.publicKey)).to.eventually.be.rejectedWith(/bytes do not match/) - }) - - it('should emit an event when deleting a key', async () => { - const defer = pDefer() - - if (peerId.publicKey == null) { - throw new Error('Public key was missing') - } - - await kb.set(peerId, peerId.publicKey) - - peerStore.addEventListener('change:pubkey', (evt) => { - const { peerId: id, publicKey } = evt.detail - expect(id.toString()).to.equal(peerId.toString()) - expect(publicKey).to.be.undefined() - defer.resolve() - }) - - await kb.delete(peerId) - await defer.promise - }) -}) diff --git a/test/merge.spec.ts b/test/merge.spec.ts new file mode 100644 index 0000000..bd5587b --- /dev/null +++ b/test/merge.spec.ts @@ -0,0 +1,210 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import type { PeerData } from '@libp2p/interface-peer-store' +import { pEvent } from 'p-event' +import { EventEmitter } from '@libp2p/interfaces/events' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') +const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') + +describe('merge', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + let events: EventEmitter + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) + }) + + it('emits peer:update event on merge', async () => { + const eventPromise = pEvent(events, 'peer:update') + + await peerStore.merge(otherPeerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('emits self:peer:update event on merge for self peer', async () => { + const eventPromise = pEvent(events, 'self:peer:update') + + await peerStore.merge(peerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('merges multiaddrs', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.merge(otherPeerId, { + multiaddrs: [ + addr3 + ] + }) + + expect(updated).to.have.property('addresses').that.deep.equals([{ + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr3, + isCertified: false + }, { + multiaddr: addr2, + isCertified: false + }]) + + // other fields should be untouched + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('merges metadata', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]), + baz: Uint8Array.from([6, 7, 8]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.merge(otherPeerId, { + metadata: { + bar: Uint8Array.from([3, 4, 5]), + baz: undefined + } + }) + + expect(updated).to.have.property('metadata').that.deep.equals( + new Map([ + ['foo', Uint8Array.from([0, 1, 2])], + ['bar', Uint8Array.from([3, 4, 5])] + ]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('merges tags', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 }, + tag3: { value: 50 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.patch(otherPeerId, peer) + const updated = await peerStore.merge(otherPeerId, { + tags: { + tag2: { value: 20 }, + tag3: undefined + } + }) + + expect(updated).to.have.property('tags').that.deep.equals( + new Map([ + ['tag1', { value: 10 }], + ['tag2', { value: 20 }] + ]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('merges peer record envelope', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.merge(otherPeerId, { + peerRecordEnvelope: Uint8Array.from([6, 7, 8]) + }) + + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals( + Uint8Array.from([6, 7, 8]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + }) +}) diff --git a/test/metadata-book.spec.ts b/test/metadata-book.spec.ts deleted file mode 100644 index bf79c0b..0000000 --- a/test/metadata-book.spec.ts +++ /dev/null @@ -1,358 +0,0 @@ - -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { MemoryDatastore } from 'datastore-core/memory' -import pDefer from 'p-defer' -import { PersistentPeerStore } from '../src/index.js' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { MetadataBook } from '@libp2p/interface-peer-store' - -describe('metadataBook', () => { - let peerId: PeerId - - before(async () => { - peerId = await createEd25519PeerId() - }) - - describe('metadataBook.set', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.set('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('throws invalid parameters error if no metadata provided', async () => { - try { - // @ts-expect-error invalid input - await mb.set(peerId) - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no key provided should throw error') - }) - - it('throws invalid parameters error if no value provided', async () => { - try { - // @ts-expect-error invalid input - await mb.setValue(peerId, 'location') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('no value provided should throw error') - }) - - it('throws invalid parameters error if value is not a buffer', async () => { - try { - // @ts-expect-error invalid input - await mb.setValue(peerId, 'location', 'mars') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid value provided should throw error') - }) - - it('stores the content and emit change event', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - peerStore.addEventListener('change:metadata', (evt) => { - const { peerId, metadata } = evt.detail - expect(peerId).to.exist() - expect(metadata.get(metadataKey)).to.equalBytes(metadataValue) - defer.resolve() - }, { - once: true - }) - - await mb.setValue(peerId, metadataKey, metadataValue) - - const value = await mb.getValue(peerId, metadataKey) - expect(value).to.equalBytes(metadataValue) - - const peerMetadata = await mb.get(peerId) - expect(peerMetadata).to.exist() - expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue) - - return await defer.promise - }) - - it('emits on set if not storing the exact same content', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue1 = uint8ArrayFromString('mars') - const metadataValue2 = uint8ArrayFromString('saturn') - - let changeCounter = 0 - peerStore.addEventListener('change:metadata', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await mb.setValue(peerId, metadataKey, metadataValue1) - - // set 2 (same content) - await mb.setValue(peerId, metadataKey, metadataValue2) - - const value = await mb.getValue(peerId, metadataKey) - expect(value).to.equalBytes(metadataValue2) - - const peerMetadata = await mb.get(peerId) - expect(peerMetadata).to.exist() - expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue2) - - return await defer.promise - }) - - it('does not emit on set if it is storing the exact same content', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - let changeCounter = 0 - peerStore.addEventListener('change:metadata', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await mb.setValue(peerId, metadataKey, metadataValue) - - // set 2 (same content) - await mb.setValue(peerId, metadataKey, metadataValue) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - }) - - describe('metadataBook.get', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.get('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('returns empty if no metadata is known for the provided peer', async () => { - const metadata = await mb.get(peerId) - - expect(metadata).to.be.empty() - }) - - it('returns the metadata stored', async () => { - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - const metadata = new Map() - metadata.set(metadataKey, metadataValue) - - await mb.set(peerId, metadata) - - const peerMetadata = await mb.get(peerId) - expect(peerMetadata).to.exist() - expect(peerMetadata.get(metadataKey)).to.equalBytes(metadataValue) - }) - }) - - describe('metadataBook.getValue', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.getValue('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('returns undefined if no metadata is known for the provided peer', async () => { - const metadataKey = 'location' - const metadata = await mb.getValue(peerId, metadataKey) - - expect(metadata).to.not.exist() - }) - - it('returns the metadata value stored for the given key', async () => { - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - const value = await mb.getValue(peerId, metadataKey) - expect(value).to.exist() - expect(value).to.equalBytes(metadataValue) - }) - - it('returns undefined if no metadata is known for the provided peer and key', async () => { - const metadataKey = 'location' - const metadataBadKey = 'nickname' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - const metadata = await mb.getValue(peerId, metadataBadKey) - expect(metadata).to.not.exist() - }) - }) - - describe('metadataBook.delete', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.delete('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('should not emit event if no records exist for the peer', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:metadata', () => { - defer.reject() - }) - - await mb.delete(peerId) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - - it('should emit an event if the record exists for the peer', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - // Listen after set - peerStore.addEventListener('change:metadata', () => { - defer.resolve() - }) - - await mb.delete(peerId) - - return await defer.promise - }) - }) - - describe('metadataBook.deleteValue', () => { - let peerStore: PersistentPeerStore - let mb: MetadataBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - mb = peerStore.metadataBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - try { - // @ts-expect-error invalid input - await mb.deleteValue('invalid peerId') - } catch (err: any) { - expect(err.code).to.equal(codes.ERR_INVALID_PARAMETERS) - return - } - throw new Error('invalid peerId should throw error') - }) - - it('should not emit event if no records exist for the peer', async () => { - const defer = pDefer() - const metadataKey = 'location' - - peerStore.addEventListener('change:metadata', () => { - defer.reject() - }) - - await mb.deleteValue(peerId, metadataKey) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - - it('should emit event if a record exists for the peer', async () => { - const defer = pDefer() - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('mars') - - await mb.setValue(peerId, metadataKey, metadataValue) - - // Listen after set - peerStore.addEventListener('change:metadata', () => { - defer.resolve() - }) - - await mb.deleteValue(peerId, metadataKey) - - return await defer.promise - }) - }) -}) diff --git a/test/patch.spec.ts b/test/patch.spec.ts new file mode 100644 index 0000000..b219a34 --- /dev/null +++ b/test/patch.spec.ts @@ -0,0 +1,195 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import type { PeerData } from '@libp2p/interface-peer-store' +import { pEvent } from 'p-event' +import { EventEmitter } from '@libp2p/interfaces/events' +import type { Libp2pEvents } from '@libp2p/interface-libp2p' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') +const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') + +describe('patch', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + let events: EventEmitter + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) + }) + + it('emits peer:update event on patch', async () => { + const eventPromise = pEvent(events, 'peer:update') + + await peerStore.patch(otherPeerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('emits self:peer:update event on patch for self peer', async () => { + const eventPromise = pEvent(events, 'self:peer:update') + + await peerStore.patch(peerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('replaces multiaddrs', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { + multiaddrs: [ + addr3 + ] + }) + + // upated field + expect(updated).to.have.property('addresses').that.deep.equals([{ + multiaddr: addr3, + isCertified: false + }]) + + // other fields should be untouched + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('replaces metadata', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { + metadata: { + bar: Uint8Array.from([3, 4, 5]) + } + }) + + expect(updated).to.have.property('metadata').that.deep.equals( + new Map([['bar', Uint8Array.from([3, 4, 5])]]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('replaces tags', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { + tags: { + tag2: { value: 20 } + } + }) + + expect(updated).to.have.property('tags').that.deep.equals( + new Map([['tag2', { value: 20 }]]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals(original.peerRecordEnvelope) + }) + + it('replaces peer record envelope', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const original = await peerStore.save(otherPeerId, peer) + const updated = await peerStore.patch(otherPeerId, { + peerRecordEnvelope: Uint8Array.from([6, 7, 8]) + }) + + expect(updated).to.have.property('peerRecordEnvelope').that.deep.equals( + Uint8Array.from([6, 7, 8]) + ) + + // other fields should be untouched + expect(updated).to.have.property('addresses').that.deep.equals(original.addresses) + expect(updated).to.have.property('metadata').that.deep.equals(original.metadata) + expect(updated).to.have.property('tags').that.deep.equals(original.tags) + expect(updated).to.have.property('protocols').that.deep.equals(original.protocols) + }) +}) diff --git a/test/peer-store.spec.ts b/test/peer-store.spec.ts deleted file mode 100644 index 6a07953..0000000 --- a/test/peer-store.spec.ts +++ /dev/null @@ -1,324 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import { PersistentPeerStore } from '../src/index.js' -import { multiaddr } from '@multiformats/multiaddr' -import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' -import { MemoryDatastore } from 'datastore-core/memory' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import delay from 'delay' - -const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') -const addr2 = multiaddr('/ip4/127.0.0.1/tcp/8001') -const addr3 = multiaddr('/ip4/127.0.0.1/tcp/8002') -const addr4 = multiaddr('/ip4/127.0.0.1/tcp/8003') - -const proto1 = '/protocol1' -const proto2 = '/protocol2' -const proto3 = '/protocol3' - -describe('peer-store', () => { - let peerIds: PeerId[] - before(async () => { - peerIds = await Promise.all([ - createEd25519PeerId(), - createEd25519PeerId(), - createEd25519PeerId(), - createEd25519PeerId(), - createEd25519PeerId() - ]) - }) - - describe('empty books', () => { - let peerStore: PersistentPeerStore - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - }) - - it('has an empty map of peers', async () => { - const peers = await peerStore.all() - expect(peers.length).to.equal(0) - }) - - it('deletes a peerId', async () => { - await peerStore.addressBook.set(peerIds[0], [multiaddr('/ip4/127.0.0.1/tcp/4001')]) - await expect(peerStore.has(peerIds[0])).to.eventually.be.true() - await peerStore.delete(peerIds[0]) - await expect(peerStore.has(peerIds[0])).to.eventually.be.false() - }) - - it('sets the peer\'s public key to the KeyBook', async () => { - if (peerIds[0].publicKey == null) { - throw new Error('Public key was missing') - } - - await peerStore.keyBook.set(peerIds[0], peerIds[0].publicKey) - await expect(peerStore.keyBook.get(peerIds[0])).to.eventually.deep.equal(peerIds[0].publicKey) - }) - }) - - describe('previously populated books', () => { - let peerStore: PersistentPeerStore - - beforeEach(async () => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - - // Add peer0 with { addr1, addr2 } and { proto1 } - await peerStore.addressBook.set(peerIds[0], [addr1, addr2]) - await peerStore.protoBook.set(peerIds[0], [proto1]) - - // Add peer1 with { addr3 } and { proto2, proto3 } - await peerStore.addressBook.set(peerIds[1], [addr3]) - await peerStore.protoBook.set(peerIds[1], [proto2, proto3]) - - // Add peer2 with { addr4 } - await peerStore.addressBook.set(peerIds[2], [addr4]) - - // Add peer3 with { addr4 } and { proto2 } - await peerStore.addressBook.set(peerIds[3], [addr4]) - await peerStore.protoBook.set(peerIds[3], [proto2]) - }) - - it('has peers', async () => { - const peers = await peerStore.all() - - expect(peers.length).to.equal(4) - expect(peers.map(peer => peer.id.toString())).to.have.members([ - peerIds[0].toString(), - peerIds[1].toString(), - peerIds[2].toString(), - peerIds[3].toString() - ]) - }) - - it('deletes a stored peer', async () => { - await peerStore.delete(peerIds[0]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(3) - expect(Array.from(peers.keys())).to.not.have.members([peerIds[0].toString()]) - }) - - it('deletes a stored peer which is only on one book', async () => { - await peerStore.delete(peerIds[2]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(3) - }) - - it('gets the stored information of a peer in all its books', async () => { - const peer = await peerStore.get(peerIds[0]) - expect(peer).to.exist() - expect(peer.protocols).to.have.members([proto1]) - - const peerMultiaddrs = peer.addresses.map((mi) => mi.multiaddr) - expect(peerMultiaddrs).to.have.deep.members([addr1, addr2]) - - expect(peer.id.toString()).to.equal(peerIds[0].toString()) - }) - - it('gets the stored information of a peer that is not present in all its books', async () => { - const peers = await peerStore.get(peerIds[2]) - expect(peers).to.exist() - expect(peers.protocols.length).to.eql(0) - - const peerMultiaddrs = peers.addresses.map((mi) => mi.multiaddr) - expect(peerMultiaddrs).to.have.deep.members([addr4]) - }) - - it('can find all the peers supporting a protocol', async () => { - const peerSupporting2 = [] - - for await (const peer of await peerStore.all()) { - if (peer.protocols.includes(proto2)) { - peerSupporting2.push(peer) - } - } - - expect(peerSupporting2.length).to.eql(2) - expect(peerSupporting2[0].id.toString()).to.eql(peerIds[1].toString()) - expect(peerSupporting2[1].id.toString()).to.eql(peerIds[3].toString()) - }) - - it('can find all the peers listening on a given address', async () => { - const peerListening4 = [] - - for await (const peer of await peerStore.all()) { - const multiaddrs = peer.addresses.map((mi) => mi.multiaddr.toString()) - - if (multiaddrs.includes(addr4.toString())) { - peerListening4.push(peer) - } - } - - expect(peerListening4.length).to.eql(2) - expect(peerListening4[0].id.toString()).to.eql(peerIds[2].toString()) - expect(peerListening4[1].id.toString()).to.eql(peerIds[3].toString()) - }) - }) - - describe('peerStore.getPeers', () => { - let peerStore: PersistentPeerStore - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - }) - - it('returns peers if only addresses are known', async () => { - await peerStore.addressBook.set(peerIds[0], [addr1]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(1) - - const peerData = peers[0] - expect(peerData).to.exist() - expect(peerData.id).to.exist() - expect(peerData.addresses).to.have.lengthOf(1) - expect(peerData.protocols).to.have.lengthOf(0) - expect(peerData.metadata).to.be.empty() - }) - - it('returns peers if only protocols are known', async () => { - await peerStore.protoBook.set(peerIds[0], [proto1]) - - const peers = await peerStore.all() - expect(peers.length).to.equal(1) - - const peerData = peers[0] - expect(peerData).to.exist() - expect(peerData.id).to.exist() - expect(peerData.addresses).to.have.lengthOf(0) - expect(peerData.protocols).to.have.lengthOf(1) - expect(peerData.metadata).to.be.empty() - }) - - it('returns peers if only metadata is known', async () => { - const metadataKey = 'location' - const metadataValue = uint8ArrayFromString('earth') - await peerStore.metadataBook.setValue(peerIds[0], metadataKey, metadataValue) - - const peers = await peerStore.all() - expect(peers.length).to.equal(1) - - const peerData = peers[0] - expect(peerData).to.exist() - expect(peerData.id).to.exist() - expect(peerData.addresses).to.have.lengthOf(0) - expect(peerData.protocols).to.have.lengthOf(0) - expect(peerData.metadata).to.exist() - expect(peerData.metadata.get(metadataKey)).to.equalBytes(metadataValue) - }) - }) - - describe('tags', () => { - let peerStore: PersistentPeerStore - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId: peerIds[4], datastore: new MemoryDatastore() }) - }) - - it('tags a peer', async () => { - const name = 'a-tag' - await peerStore.tagPeer(peerIds[0], name) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore did not contain tag for peer') - .to.eventually.deep.include.members([{ - name, - value: 0 - }]) - }) - - it('tags a peer with a value', async () => { - const name = 'a-tag' - const value = 50 - await peerStore.tagPeer(peerIds[0], name, { - value - }) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore did not contain tag for peer with a value') - .to.eventually.deep.include.members([{ - name, - value - }]) - }) - - it('tags a peer with a valid value', async () => { - const name = 'a-tag' - - await expect(peerStore.tagPeer(peerIds[0], name, { - value: -1 - }), 'PeerStore contain tag for peer where value was too small') - .to.eventually.be.rejected().with.property('code', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - - await expect(peerStore.tagPeer(peerIds[0], name, { - value: 101 - }), 'PeerStore contain tag for peer where value was too large') - .to.eventually.be.rejected().with.property('code', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - - await expect(peerStore.tagPeer(peerIds[0], name, { - value: 5.5 - }), 'PeerStore contain tag for peer where value was not an integer') - .to.eventually.be.rejected().with.property('code', 'ERR_TAG_VALUE_OUT_OF_BOUNDS') - }) - - it('tags a peer with an expiring value', async () => { - const name = 'a-tag' - const value = 50 - await peerStore.tagPeer(peerIds[0], name, { - value, - ttl: 50 - }) - - await expect(peerStore.getTags(peerIds[0])) - .to.eventually.deep.include.members([{ - name, - value - }], 'PeerStore did not contain expiring value') - - await delay(100) - - await expect(peerStore.getTags(peerIds[0])) - .to.eventually.not.deep.include.members([{ - name, - value - }], 'PeerStore contained expired value') - }) - - it('does not tag a peer twice', async () => { - const name = 'a-tag' - await peerStore.tagPeer(peerIds[0], name, { - value: 1 - }) - await peerStore.tagPeer(peerIds[0], name, { - value: 10 - }) - - const allTags = await peerStore.getTags(peerIds[0]) - const tags = allTags.filter(t => t.name === name) - - expect(tags).to.have.lengthOf(1) - expect(tags).to.have.nested.property('[0].value', 10) - }) - - it('untags a peer', async () => { - const name = 'a-tag' - await peerStore.tagPeer(peerIds[0], name) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore did not contain tag') - .to.eventually.deep.include.members([{ - name, - value: 0 - }]) - - await peerStore.unTagPeer(peerIds[0], name) - - await expect(peerStore.getTags(peerIds[0]), 'PeerStore contained untagged tag') - .to.eventually.not.deep.include.members([{ - name, - value: 0 - }]) - }) - }) -}) diff --git a/test/proto-book.spec.ts b/test/proto-book.spec.ts deleted file mode 100644 index 84dbeb8..0000000 --- a/test/proto-book.spec.ts +++ /dev/null @@ -1,388 +0,0 @@ -/* eslint-env mocha */ - -import { expect } from 'aegir/chai' -import sinon from 'sinon' -import { MemoryDatastore } from 'datastore-core/memory' -import pDefer from 'p-defer' -import pWaitFor from 'p-wait-for' -import { PersistentPeerStore } from '../src/index.js' -import { codes } from '../src/errors.js' -import { createEd25519PeerId } from '@libp2p/peer-id-factory' -import type { PeerId } from '@libp2p/interface-peer-id' -import type { ProtoBook } from '@libp2p/interface-peer-store' - -const arraysAreEqual = (a: string[], b: string[]): boolean => { - if (a.length !== b.length) { - return false - } - - return a.sort().every((item, index) => b[index] === item) -} - -describe('protoBook', () => { - let peerId: PeerId - - before(async () => { - peerId = await createEd25519PeerId() - }) - - describe('protoBook.set', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.set('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('throws invalid parameters error if no protocols provided', async () => { - // @ts-expect-error invalid input - await expect(pb.set(peerId)).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('replaces the stored content by default and emit change event', async () => { - const defer = pDefer() - const supportedProtocols = ['protocol1', 'protocol2'] - - peerStore.addEventListener('change:protocols', (evt) => { - const { peerId, protocols } = evt.detail - expect(peerId).to.exist() - expect(protocols).to.have.deep.members(supportedProtocols) - defer.resolve() - }, { - once: true - }) - - await pb.set(peerId, supportedProtocols) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocols) - - await defer.promise - }) - - it('emits on set if not storing the exact same content', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1', 'protocol2'] - const supportedProtocolsB = ['protocol2'] - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocolsA) - - // set 2 (same content) - await pb.set(peerId, supportedProtocolsB) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocolsB) - - await defer.promise - }) - - it('does not emit on set if it is storing the exact same content', async () => { - const defer = pDefer() - - const supportedProtocols = ['protocol1', 'protocol2'] - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocols) - - // set 2 (same content) - await pb.set(peerId, supportedProtocols) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - }) - - describe('protoBook.add', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.add('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('throws invalid parameters error if no protocols provided', async () => { - // @ts-expect-error invalid input - await expect(pb.add(peerId)).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('adds the new content and emits change event', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1', 'protocol2'] - const supportedProtocolsB = ['protocol3'] - const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) - - let changeTrigger = 2 - peerStore.addEventListener('change:protocols', (evt) => { - const { protocols } = evt.detail - changeTrigger-- - if (changeTrigger === 0 && arraysAreEqual(protocols, finalProtocols)) { - defer.resolve() - } - }) - - // Replace - await pb.set(peerId, supportedProtocolsA) - let protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocolsA) - - // Add - await pb.add(peerId, supportedProtocolsB) - protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - return await defer.promise - }) - - it('emits on add if the content to add not exists', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1'] - const supportedProtocolsB = ['protocol2'] - const finalProtocols = supportedProtocolsA.concat(supportedProtocolsB) - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.resolve() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocolsA) - - // set 2 (content already existing) - await pb.add(peerId, supportedProtocolsB) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - return await defer.promise - }) - - it('does not emit on add if the content to add already exists', async () => { - const defer = pDefer() - - const supportedProtocolsA = ['protocol1', 'protocol2'] - const supportedProtocolsB = ['protocol2'] - - let changeCounter = 0 - peerStore.addEventListener('change:protocols', () => { - changeCounter++ - if (changeCounter > 1) { - defer.reject() - } - }) - - // set 1 - await pb.set(peerId, supportedProtocolsA) - - // set 2 (content already existing) - await pb.add(peerId, supportedProtocolsB) - - // Wait 50ms for incorrect second event - setTimeout(() => { - defer.resolve() - }, 50) - - return await defer.promise - }) - }) - - describe('protoBook.remove', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.remove('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('throws invalid parameters error if no protocols provided', async () => { - // @ts-expect-error invalid input - await expect(pb.remove(peerId)).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('removes the given protocol and emits change event', async () => { - const spy = sinon.spy() - - const supportedProtocols = ['protocol1', 'protocol2'] - const removedProtocols = ['protocol1'] - const finalProtocols = supportedProtocols.filter(p => !removedProtocols.includes(p)) - - peerStore.addEventListener('change:protocols', spy) - - // Replace - await pb.set(peerId, supportedProtocols) - let protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocols) - - // Remove - await pb.remove(peerId, removedProtocols) - protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - await pWaitFor(() => spy.callCount === 2) - - const [firstCallArgs] = spy.firstCall.args - const [secondCallArgs] = spy.secondCall.args - expect(arraysAreEqual(firstCallArgs.detail.protocols, supportedProtocols)) - expect(arraysAreEqual(secondCallArgs.detail.protocols, finalProtocols)) - }) - - it('emits on remove if the content changes', async () => { - const spy = sinon.spy() - - const supportedProtocols = ['protocol1', 'protocol2'] - const removedProtocols = ['protocol2'] - const finalProtocols = supportedProtocols.filter(p => !removedProtocols.includes(p)) - - peerStore.addEventListener('change:protocols', spy) - - // set - await pb.set(peerId, supportedProtocols) - - // remove (content already existing) - await pb.remove(peerId, removedProtocols) - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(finalProtocols) - - await pWaitFor(() => spy.callCount === 2) - }) - - it('does not emit on remove if the content does not change', async () => { - const spy = sinon.spy() - - const supportedProtocols = ['protocol1', 'protocol2'] - const removedProtocols = ['protocol3'] - - peerStore.addEventListener('change:protocols', spy) - - // set - await pb.set(peerId, supportedProtocols) - - // remove - await pb.remove(peerId, removedProtocols) - - // Only one event - expect(spy.callCount).to.eql(1) - }) - }) - - describe('protoBook.get', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.get('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('returns empty if no protocols are known for the provided peer', async () => { - const protocols = await pb.get(peerId) - - expect(protocols).to.be.empty() - }) - - it('returns the protocols stored', async () => { - const supportedProtocols = ['protocol1', 'protocol2'] - - await pb.set(peerId, supportedProtocols) - - const protocols = await pb.get(peerId) - expect(protocols).to.have.deep.members(supportedProtocols) - }) - }) - - describe('protoBook.delete', () => { - let peerStore: PersistentPeerStore - let pb: ProtoBook - - beforeEach(() => { - peerStore = new PersistentPeerStore({ peerId, datastore: new MemoryDatastore() }) - pb = peerStore.protoBook - }) - - it('throws invalid parameters error if invalid PeerId is provided', async () => { - // @ts-expect-error invalid input - await expect(pb.delete('invalid peerId')).to.eventually.be.rejected().with.property('code', codes.ERR_INVALID_PARAMETERS) - }) - - it('should not emit event if no records exist for the peer', async () => { - const defer = pDefer() - - peerStore.addEventListener('change:protocols', () => { - defer.reject() - }) - - await pb.delete(peerId) - - // Wait 50ms for incorrect invalid event - setTimeout(() => { - defer.resolve() - }, 50) - - await defer.promise - }) - - it('should emit event if a record exists for the peer', async () => { - const defer = pDefer() - - const supportedProtocols = ['protocol1', 'protocol2'] - await pb.set(peerId, supportedProtocols) - - // Listen after set - peerStore.addEventListener('change:protocols', (evt) => { - const { protocols } = evt.detail - expect(protocols.length).to.eql(0) - defer.resolve() - }) - - await pb.delete(peerId) - - await defer.promise - }) - }) -}) diff --git a/test/save.spec.ts b/test/save.spec.ts new file mode 100644 index 0000000..e97d0f5 --- /dev/null +++ b/test/save.spec.ts @@ -0,0 +1,252 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 6] */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import type { PeerId } from '@libp2p/interface-peer-id' +import pDefer from 'p-defer' +import { MemoryDatastore } from 'datastore-core/memory' +import { PersistentPeerStore } from '../src/index.js' +import { codes } from '../src/errors.js' +import { createEd25519PeerId, createRSAPeerId, createSecp256k1PeerId } from '@libp2p/peer-id-factory' +import { pEvent } from 'p-event' +import sinon from 'sinon' +import type { Libp2pEvents, PeerUpdate } from '@libp2p/interface-libp2p' +import { EventEmitter } from '@libp2p/interfaces/events' +import { Peer as PeerPB } from '../src/pb/peer.js' +import type { PeerData } from '@libp2p/interface-peer-store' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') + +describe('save', () => { + let peerId: PeerId + let otherPeerId: PeerId + let peerStore: PersistentPeerStore + let events: EventEmitter + + beforeEach(async () => { + peerId = await createEd25519PeerId() + otherPeerId = await createEd25519PeerId() + events = new EventEmitter() + peerStore = new PersistentPeerStore({ peerId, events, datastore: new MemoryDatastore() }) + }) + + it('throws invalid parameters error if invalid PeerId is provided', async () => { + // @ts-expect-error invalid input + await expect(peerStore.save('invalid peerId')) + .to.eventually.be.rejected.with.property('code', codes.ERR_INVALID_PARAMETERS) + }) + + it('throws invalid parameters error if no peer data provided', async () => { + // @ts-expect-error invalid input + await expect(peerStore.save(peerId)) + .to.eventually.be.rejected.with.property('code', codes.ERR_INVALID_PARAMETERS) + }) + + it('throws invalid parameters error if invalid multiaddrs are provided', async () => { + await expect(peerStore.save(peerId, { + // @ts-expect-error invalid input + addresses: ['invalid multiaddr'] + })) + .to.eventually.be.rejected.with.property('code', codes.ERR_INVALID_PARAMETERS) + }) + + it('replaces the stored content by default and emit change event', async () => { + const supportedMultiaddrs = [addr1, addr2] + const eventPromise = pEvent(events, 'peer:update') + + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrs + }) + + const event = await eventPromise as CustomEvent + + const { peer, previous } = event.detail + + expect(peer.addresses).to.deep.equal( + supportedMultiaddrs.map((multiaddr) => ({ + isCertified: false, + multiaddr + })) + ) + expect(previous).to.be.undefined() + }) + + it('emits on set if not storing the exact same content', async () => { + const defer = pDefer() + + const supportedMultiaddrsA = [addr1, addr2] + const supportedMultiaddrsB = [addr2] + + let changeCounter = 0 + events.addEventListener('peer:update', () => { + changeCounter++ + if (changeCounter > 1) { + defer.resolve() + } + }) + + // set 1 + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrsA + }) + + // set 2 + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrsB + }) + + const peer = await peerStore.get(otherPeerId) + const multiaddrs = peer.addresses.map((mi) => mi.multiaddr) + expect(multiaddrs).to.have.deep.members(supportedMultiaddrsB) + + await defer.promise + }) + + it('emits self event on save for self peer', async () => { + const eventPromise = pEvent(events, 'self:peer:update') + + await peerStore.save(peerId, { + multiaddrs: [addr1, addr2] + }) + + await eventPromise + }) + + it('does not emit on set if it is storing the exact same content', async () => { + const defer = pDefer() + + const supportedMultiaddrs = [addr1, addr2] + + let changeCounter = 0 + events.addEventListener('peer:update', () => { + changeCounter++ + if (changeCounter > 1) { + defer.reject(new Error('Saved identical data twice')) + } + }) + + // set 1 + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrs + }) + + // set 2 (same content) + await peerStore.save(otherPeerId, { + multiaddrs: supportedMultiaddrs + }) + + // Wait 50ms for incorrect second event + setTimeout(() => { + defer.resolve() + }, 50) + + await defer.promise + }) + + it('should not set public key when key does not match', async () => { + const edKey = await createEd25519PeerId() + + if (peerId.publicKey == null) { + throw new Error('Public key was missing') + } + + await expect(peerStore.save(edKey, { + publicKey: peerId.publicKey + })).to.eventually.be.rejectedWith(/bytes do not match/) + }) + + it('should not store a public key if already stored', async () => { + // @ts-expect-error private fields + const spy = sinon.spy(peerStore.store.datastore, 'put') + + if (otherPeerId.publicKey == null) { + throw new Error('Public key was missing') + } + + // Set PeerId + await peerStore.save(otherPeerId, { + publicKey: otherPeerId.publicKey + }) + await peerStore.save(otherPeerId, { + publicKey: otherPeerId.publicKey + }) + + expect(spy).to.have.property('callCount', 1) + }) + + it('should not store a public key if part of peer id', async () => { + // @ts-expect-error private fields + const spy = sinon.spy(peerStore.store.datastore, 'put') + + if (otherPeerId.publicKey == null) { + throw new Error('Public key was missing') + } + + const edKey = await createEd25519PeerId() + await peerStore.save(edKey, { + publicKey: edKey.publicKey + }) + + const dbPeerEdKey = PeerPB.decode(spy.getCall(0).args[1]) + expect(dbPeerEdKey).to.not.have.property('publicKey') + + const secpKey = await createSecp256k1PeerId() + await peerStore.save(secpKey, { + publicKey: secpKey.publicKey + }) + + const dbPeerSecpKey = PeerPB.decode(spy.getCall(1).args[1]) + expect(dbPeerSecpKey).to.not.have.property('publicKey') + + const rsaKey = await createRSAPeerId() + await peerStore.save(rsaKey, { + publicKey: rsaKey.publicKey + }) + + const dbPeerRsaKey = PeerPB.decode(spy.getCall(2).args[1]) + expect(dbPeerRsaKey).to.have.property('publicKey').that.equalBytes(rsaKey.publicKey) + }) + + it('saves all of the fields', async () => { + const peer: PeerData = { + multiaddrs: [ + addr1, + addr2 + ], + metadata: { + foo: Uint8Array.from([0, 1, 2]) + }, + tags: { + tag1: { value: 10 } + }, + protocols: [ + '/foo/bar' + ], + peerRecordEnvelope: Uint8Array.from([3, 4, 5]) + } + + const saved = await peerStore.save(otherPeerId, peer) + + expect(saved).to.have.property('addresses').that.deep.equals([{ + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr2, + isCertified: false + }]) + expect(saved).to.have.property('metadata').that.deep.equals( + new Map([ + ['foo', Uint8Array.from([0, 1, 2])] + ]) + ) + expect(saved).to.have.property('tags').that.deep.equals( + new Map([ + ['tag1', { value: 10 }] + ]) + ) + expect(saved).to.have.property('protocols').that.deep.equals(peer.protocols) + expect(saved).to.have.property('peerRecordEnvelope').that.deep.equals(peer.peerRecordEnvelope) + }) +}) diff --git a/test/utils/dedupe-addresses.spec.ts b/test/utils/dedupe-addresses.spec.ts new file mode 100644 index 0000000..9e076ef --- /dev/null +++ b/test/utils/dedupe-addresses.spec.ts @@ -0,0 +1,79 @@ +/* eslint-env mocha */ + +import { expect } from 'aegir/chai' +import { multiaddr } from '@multiformats/multiaddr' +import { dedupeFilterAndSortAddresses } from '../../src/utils/dedupe-addresses.js' +import { createEd25519PeerId } from '@libp2p/peer-id-factory' +import type { PeerId } from '@libp2p/interface-peer-id' + +const addr1 = multiaddr('/ip4/127.0.0.1/tcp/8000') +const addr2 = multiaddr('/ip4/20.0.0.1/tcp/8001') + +describe('dedupe-addresses', () => { + let peerId: PeerId + + beforeEach(async () => { + peerId = await createEd25519PeerId() + }) + + it('should dedupe addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr2, + isCertified: false + }])).to.deep.equal([{ + multiaddr: addr1.bytes, + isCertified: false + }, { + multiaddr: addr2.bytes, + isCertified: false + }]) + }) + + it('should sort addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ + multiaddr: addr2, + isCertified: false + }, { + multiaddr: addr1, + isCertified: false + }, { + multiaddr: addr1, + isCertified: false + }])).to.deep.equal([{ + multiaddr: addr1.bytes, + isCertified: false + }, { + multiaddr: addr2.bytes, + isCertified: false + }]) + }) + + it('should retain isCertified when deduping addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => true, [{ + multiaddr: addr1, + isCertified: true + }, { + multiaddr: addr1, + isCertified: false + }])).to.deep.equal([{ + multiaddr: addr1.bytes, + isCertified: true + }]) + }) + + it('should filter addresses', async () => { + expect(await dedupeFilterAndSortAddresses(peerId, async () => false, [{ + multiaddr: addr1, + isCertified: true + }, { + multiaddr: addr1, + isCertified: false + }])).to.deep.equal([]) + }) +}) From d6c33141bdfa1bc3169d4696c0ebf31003899365 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Mon, 24 Apr 2023 13:53:50 +0000 Subject: [PATCH 2/2] chore(release): 8.0.0 [skip ci] MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## [8.0.0](https://github.com/libp2p/js-libp2p-peer-store/compare/v7.0.2...v8.0.0) (2023-04-24) ### ⚠ BREAKING CHANGES * make peerstore atomic (#75) ### Features * make peerstore atomic ([#75](https://github.com/libp2p/js-libp2p-peer-store/issues/75)) ([4e89d3b](https://github.com/libp2p/js-libp2p-peer-store/commit/4e89d3bfeef0b64ccb7ccc09185a9d682ab376e3)) --- CHANGELOG.md | 11 +++++++++++ package.json | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c285a1..972d7bf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +## [8.0.0](https://github.com/libp2p/js-libp2p-peer-store/compare/v7.0.2...v8.0.0) (2023-04-24) + + +### ⚠ BREAKING CHANGES + +* make peerstore atomic (#75) + +### Features + +* make peerstore atomic ([#75](https://github.com/libp2p/js-libp2p-peer-store/issues/75)) ([4e89d3b](https://github.com/libp2p/js-libp2p-peer-store/commit/4e89d3bfeef0b64ccb7ccc09185a9d682ab376e3)) + ## [7.0.2](https://github.com/libp2p/js-libp2p-peer-store/compare/v7.0.1...v7.0.2) (2023-04-11) diff --git a/package.json b/package.json index 4bb0cc2..c087026 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@libp2p/peer-store", - "version": "7.0.2", + "version": "8.0.0", "description": "Stores information about peers libp2p knows on the network", "license": "Apache-2.0 OR MIT", "homepage": "https://github.com/libp2p/js-libp2p-peer-store#readme",