diff --git a/CHANGELOG.md b/CHANGELOG.md
index 56de2791..482837b8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,8 @@
+
+# [0.129.0](https://github.com/ipfs/interface-ipfs-core/compare/v0.128.0...v0.129.0) (2020-01-23)
+
+
+
# [0.128.0](https://github.com/ipfs/interface-ipfs-core/compare/v0.127.0...v0.128.0) (2020-01-22)
diff --git a/SPEC/BITSWAP.md b/SPEC/BITSWAP.md
index 9b97dc62..245f3a13 100644
--- a/SPEC/BITSWAP.md
+++ b/SPEC/BITSWAP.md
@@ -3,9 +3,6 @@
* [bitswap.wantlist](#bitswapwantlist)
* [bitswap.stat](#bitswapstat)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
### `bitswap.wantlist`
> Returns the wantlist, optionally filtered by peer ID
@@ -16,23 +13,18 @@ Although not listed in the documentation, all the following APIs that actually r
| Type | Description |
| -------- | -------- |
-| `Promise` | An object representing the wantlist |
-
-the returned object contains the following keys:
-
-- `Keys` An array of objects containing the following keys:
- - `/` A string multihash
+| `Promise` | An array of [CID][cid]s currently in the wantlist |
**Example:**
```JavaScript
const list = await ipfs.bitswap.wantlist()
console.log(list)
-// { Keys: [{ '/': 'QmHash' }] }
+// [ CID('QmHash') ]
const list2 = await ipfs.bitswap.wantlist(peerId)
console.log(list2)
-// { Keys: [{ '/': 'QmHash' }] }
+// [ CID('QmHash') ]
```
A great source of [examples][] can be found in the tests for this API.
@@ -51,11 +43,11 @@ Note: `bitswap.stat` and `stats.bitswap` can be used interchangeably.
| -------- | -------- |
| `Promise` | An object that contains information about the bitswap agent |
-the returned object contains the following keys:
+The returned object contains the following keys:
- `provideBufLen` is an integer.
-- `wantlist` (array of CIDs)
-- `peers` (array of peer IDs)
+- `wantlist` (array of [CID][cid]s)
+- `peers` (array of peer IDs as [CID][cid] instances)
- `blocksReceived` is a [BigNumber Int][1]
- `dataReceived` is a [BigNumber Int][1]
- `blocksSent` is a [BigNumber Int][1]
@@ -70,17 +62,17 @@ const stats = await ipfs.bitswap.stat()
console.log(stats)
// {
// provideBufLen: 0,
-// wantlist: [ { '/': 'QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM' } ],
+// wantlist: [ CID('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM') ],
// peers:
-// [ 'QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM',
-// 'QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu',
-// 'QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd' ],
+// [ CID('QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM'),
+// CID('QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu'),
+// CID('QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd') ],
// blocksReceived: 0,
// dataReceived: 0,
// blocksSent: 0,
// dataSent: 0,
// dupBlksReceived: 0,
-// dupDataReceived: 0
+// dupDataReceived: 0
// }
```
@@ -88,3 +80,4 @@ A great source of [examples][] can be found in the tests for this API.
[1]: https://github.com/MikeMcl/bignumber.js/
[examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/bitswap
+[cid]: https://www.npmjs.com/package/cids
diff --git a/SPEC/BLOCK.md b/SPEC/BLOCK.md
index 1c303725..a3a20d13 100644
--- a/SPEC/BLOCK.md
+++ b/SPEC/BLOCK.md
@@ -5,9 +5,6 @@
* [block.rm](#blockrm)
* [block.stat](#blockstat)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `block.get`
> Get a raw IPFS block.
@@ -114,22 +111,32 @@ A great source of [examples][] can be found in the tests for this API.
`options` is an Object that can contain the following properties:
-- force (boolean): Ignores nonexistent blocks.
-- quiet (boolean): write minimal output
+- `force` (boolean): Ignores nonexistent blocks.
+- `quiet` (boolean): write minimal output
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of objects containing hash and (potentially) error strings |
+| `AsyncIterable` | An async iterable that yields objects containing hash and (potentially) error strings |
-Note: If an error string is present for a given object in the returned array, the block with that hash was not removed and the string will contain the reason why, for example if the block was pinned.
+Each object yielded is of the form:
+
+```js
+{
+ hash: string,
+ error: string
+}
+```
+
+Note: If an error string is present for a given object, the block with that hash was not removed and the string will contain the reason why, for example if the block was pinned.
**Example:**
```JavaScript
-const result = await ipfs.block.rm(cid)
-console.log(result[0].hash)
+for await (const result of ipfs.block.rm(cid)) {
+ console.log(result.hash)
+}
```
A great source of [examples][] can be found in the tests for this API.
diff --git a/SPEC/BOOTSTRAP.md b/SPEC/BOOTSTRAP.md
index ba562a57..2dbe5ce0 100644
--- a/SPEC/BOOTSTRAP.md
+++ b/SPEC/BOOTSTRAP.md
@@ -4,15 +4,12 @@
the addresses of the bootstrap nodes. These are the trusted peers from
which to learn about other peers in the network.
-> Only edit this list if you understand the risks of adding or removing nodes from this list.
+> Only edit this list if you understand the risks of adding or removing nodes
* [bootstrap.add](#bootstrapadd)
* [bootstrap.list](#bootstraplist)
* [bootstrap.rm](#bootstraprm)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `bootstrap.add`
> Add a peer address to the bootstrap list
diff --git a/SPEC/CONFIG.md b/SPEC/CONFIG.md
index 5dd37790..725a59d4 100644
--- a/SPEC/CONFIG.md
+++ b/SPEC/CONFIG.md
@@ -6,9 +6,6 @@
* [config.profiles.list](#configprofileslist)
* [config.profiles.apply](#configprofilesapply)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `config.get`
> Returns the currently being used config. If the daemon is off, it returns the stored config.
diff --git a/SPEC/DAG.md b/SPEC/DAG.md
index 270a30f6..c4ef9c49 100644
--- a/SPEC/DAG.md
+++ b/SPEC/DAG.md
@@ -10,9 +10,6 @@ _Explore the DAG API through interactive coding challenges in our ProtoSchool tu
- _[P2P data links with content addressing](https://proto.school/#/basics/) (beginner)_
- _[Blogging on the Decentralized Web](https://proto.school/#/blog/) (intermediate)_
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `dag.put`
> Store an IPLD format node
diff --git a/SPEC/DHT.md b/SPEC/DHT.md
index 31c8a5d3..f856555f 100644
--- a/SPEC/DHT.md
+++ b/SPEC/DHT.md
@@ -7,79 +7,80 @@
* [dht.put](#dhtput)
* [dht.query](#dhtquery)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `dht.findPeer`
-> Retrieve the Peer Info of a reachable node in the network.
+> Find the multiaddresses associated with a Peer ID
##### `ipfs.dht.findPeer(peerId)`
-Where `peerId` is a IPFS/libp2p Id from [PeerId](https://github.com/libp2p/js-peer-id) type.
+Where `peerId` is a Peer ID in `String`, [`CID`](https://github.com/multiformats/js-cid) or [`PeerId`](https://github.com/libp2p/js-peer-id) format.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An object type [`PeerInfo`](https://github.com/libp2p/js-peer-info) |
+| `Promise<{ id: CID, addrs: Multiaddr[] }>` | A promise that resolves to an object with `id` and `addrs`. `id` is a [`CID`](https://github.com/multiformats/js-cid) - the peer's ID and `addrs` is an array of [Multiaddr](https://github.com/multiformats/js-multiaddr/) - addresses for the peer. |
**Example:**
```JavaScript
-var id = PeerId.create()
-
-const peerInfo = await ipfs.dht.findPeer(id)
-// peerInfo will contain the multiaddrs of that peer
-const id = peerInfo.id
-const addrs = peerInfo.multiaddrs
+const info = await ipfs.dht.findPeer('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt')
+
+console.log(info.id.toString())
+/*
+QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt
+*/
+
+info.addrs.forEach(addr => console.log(addr.toString()))
+/*
+/ip4/147.75.94.115/udp/4001/quic
+/ip6/2604:1380:3000:1f00::1/udp/4001/quic
+/dnsaddr/bootstrap.libp2p.io
+/ip6/2604:1380:3000:1f00::1/tcp/4001
+/ip4/147.75.94.115/tcp/4001
+*/
```
A great source of [examples][] can be found in the tests for this API.
#### `dht.findProvs`
-> Retrieve the providers for content that is addressed by an hash.
+> Find peers that can provide a specific value, given a CID.
-##### `ipfs.dht.findProvs(hash, [options])`
+##### `ipfs.dht.findProvs(cid, [options])`
-Where `hash` is a multihash.
+Where `cid` is a CID as a `String`, `Buffer` or [`CID`](https://github.com/multiformats/js-cid) instance.
-`options` an optional object with the following properties
- - `timeout` - a maximum timeout in milliseconds
- - `maxNumProviders` - a maximum number of providers to find
+`options` an optional object with the following properties:
+ - `numProviders` - the number of providers to find. Default: 20
+
+Note that if `options.numProviders` are not found an error will be thrown.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of type [`PeerInfo`](https://github.com/libp2p/js-peer-info) |
-
-each entry of the returned array is composed by the peerId, as well as an array with its adresses.
+| `AsyncIterable<{ id: CID, addrs: Multiaddr[] }>` | A async iterable that yields objects with `id` and `addrs`. `id` is a [`CID`](https://github.com/multiformats/js-cid) - the peer's ID and `addrs` is an array of [Multiaddr](https://github.com/multiformats/js-multiaddr/) - addresses for the peer. |
**Example:**
```JavaScript
-const provs = await ipfs.dht.findProvs(multihash)
-provs.forEach(prov => {
- console.log(prov.id.toB58String())
-})
-
-const provs2 = await ipfs.dht.findProvs(multihash, { timeout: 4000 })
-provs2.forEach(prov => {
- console.log(prov.id.toB58String())
-})
+const providers = ipfs.dht.findProvs('QmdPAhQRxrDKqkGPvQzBvjYe3kU8kiEEAd2J6ETEamKAD9')
+
+for await (const provider of providers) {
+ console.log(provider.id.toString())
+}
```
A great source of [examples][] can be found in the tests for this API.
#### `dht.get`
-> Retrieve a value from DHT
+> Given a key, query the routing system for its best value.
##### `ipfs.dht.get(key)`
-Where `key` is a Buffer.
+Where `key` is a `Buffer`.
**Returns**
@@ -99,70 +100,158 @@ A great source of [examples][] can be found in the tests for this API.
> Announce to the network that you are providing given values.
-##### `ipfs.dht.provide(cid)`
+##### `ipfs.dht.provide(cid, [options])`
+
+Where `cid` is a CID or array of CIDs as a `String`, `Buffer` or [`CID`](https://github.com/multiformats/js-cid) instance.
-Where `cid` is a CID or array of CIDs.
+`options` an optional object with the following properties:
+ - `recursive` - boolean, set to `true` to recursively provide the entire graph. Default `false`.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | If action is successfully completed. Otherwise an error will be thrown |
+| `AsyncIterable` | DHT query messages. See example below for structure. |
+
+Note: You must consume the iterable to completion to complete the provide operation.
**Example:**
```JavaScript
-await ipfs.dht.provide(cid)
+for await (const message of ipfs.dht.provide('QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR')) {
+ console.log(message)
+}
+
+/*
+Prints objects like:
+
+{
+ extra: 'dial backoff',
+ id: CID(QmWtewmnzJiQevJPSmG9s8aC7yRfK2WXTCdRc1pCbDFu6z),
+ responses: [
+ {
+ addrs: [
+ Multiaddr(/ip4/127.0.0.1/tcp/4001),
+ Multiaddr(/ip4/172.20.0.3/tcp/4001),
+ Multiaddr(/ip4/35.178.190.196/tcp/1024)
+ ],
+ id: CID(QmRz5Nth4jTFuJJKcjyb6uwvrhxWbruRvamKY2PJxwJKw8)
+ }
+ ],
+ type: 1
+}
+
+For message `type` values, see:
+https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L15-L24
+*/
+```
+
+Alternatively you can simply "consume" the iterable:
+
+```js
+const { consume } = require('streaming-iterables')
+await consume(ipfs.dht.provide('QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR'))
```
A great source of [examples][] can be found in the tests for this API.
#### `dht.put`
-> Store a value on the DHT
+> Write a key/value pair to the routing system.
##### `ipfs.dht.put(key, value)`
-Where `key` is a Buffer and `value` is a Buffer.
+Where `key` is a `Buffer` and `value` is a `Buffer`.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | If action is successfully completed. Otherwise an error will be thrown |
+| `AsyncIterable` | DHT query messages. See example below for structure. |
**Example:**
```JavaScript
-await ipfs.dht.put(key, value)
+for await (const message of ipfs.dht.put(key, value)) {
+ console.log(message)
+}
+
+/*
+Prints objects like:
+
+{
+ extra: 'dial backoff',
+ id: CID(QmWtewmnzJiQevJPSmG9s8aC7yRfK2WXTCdRc1pCbDFu6z),
+ responses: [
+ {
+ addrs: [
+ Multiaddr(/ip4/127.0.0.1/tcp/4001),
+ Multiaddr(/ip4/172.20.0.3/tcp/4001),
+ Multiaddr(/ip4/35.178.190.196/tcp/1024)
+ ],
+ id: CID(QmRz5Nth4jTFuJJKcjyb6uwvrhxWbruRvamKY2PJxwJKw8)
+ }
+ ],
+ type: 1
+}
+
+For message `type` values, see:
+https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L15-L24
+*/
+```
+
+Alternatively you can simply "consume" the iterable:
+
+```js
+const { consume } = require('streaming-iterables')
+await consume(ipfs.dht.put(key, value))
```
A great source of [examples][] can be found in the tests for this API.
#### `dht.query`
-> Queries the network for the 'closest peers' to a given key. 'closest' is defined by the rules of the underlying Peer Routing mechanism.
+> Find the closest Peer IDs to a given Peer ID by querying the DHT.
##### `ipfs.dht.query(peerId)`
-Where `peerId` is a IPFS/libp2p Id of type [PeerId](https://github.com/libp2p/js-peer-id).
+Where `peerId` is a Peer ID in `String`, [`CID`](https://github.com/multiformats/js-cid) or [`PeerId`](https://github.com/libp2p/js-peer-id) format.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of objects of type [PeerInfo](https://github.com/libp2p/js-peer-info) |
+| `AsyncIterable` | DHT query messages. See example below for structure. |
**Example:**
```JavaScript
-const id = PeerId.create()
-
-const peerInfos = await ipfs.dht.query(id)
-
-peerInfos.forEach(p => {
- console.log(p.id.toB58String())
-})
+for await (const info of ipfs.dht.query('QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt')) {
+ console.log(info)
+}
+
+/*
+Prints objects like:
+
+{
+ extra: 'dial backoff',
+ id: CID(QmWtewmnzJiQevJPSmG9s8aC7yRfK2WXTCdRc1pCbDFu6z),
+ responses: [
+ {
+ addrs: [
+ Multiaddr(/ip4/127.0.0.1/tcp/4001),
+ Multiaddr(/ip4/172.20.0.3/tcp/4001),
+ Multiaddr(/ip4/35.178.190.196/tcp/1024)
+ ],
+ id: CID(QmRz5Nth4jTFuJJKcjyb6uwvrhxWbruRvamKY2PJxwJKw8)
+ }
+ ],
+ type: 1
+}
+
+For message `type` values, see:
+https://github.com/libp2p/go-libp2p-core/blob/6e566d10f4a5447317a66d64c7459954b969bdab/routing/query.go#L15-L24
+*/
```
A great source of [examples][] can be found in the tests for this API.
diff --git a/SPEC/FILES.md b/SPEC/FILES.md
index d85cc86c..cdc18df6 100644
--- a/SPEC/FILES.md
+++ b/SPEC/FILES.md
@@ -4,104 +4,117 @@
#### The Regular API
The regular, top-level API for add, cat, get and ls Files on IPFS
- - [add](#add)
- - [addFromFs](#addfromfs)
- - [addFromStream](#addfromstream)
- - [addFromURL](#addfromurl)
- - [addPullStream](#addpullstream)
- - [addReadableStream](#addreadablestream)
- - [cat](#cat)
- - [catPullStream](#catpullstream)
- - [catReadableStream](#catreadablestream)
- - [get](#get)
- - [getPullStream](#getpullstream)
- - [getReadableStream](#getreadablestream)
- - [ls](#ls)
- - [lsPullStream](#lspullstream)
- - [lsReadableStream](#lsreadablestream)
+- [add](#add)
+- [cat](#cat)
+- [get](#get)
+- [ls](#ls)
#### The Files API
The Files API, aka MFS (Mutable File System)
+- [files.chmod](#fileschmod)
+- [files.cp](#filescp)
+- [files.flush](#filesflush)
+- [files.ls](#filesls)
+- [files.mkdir](#filesmkdir)
+- [files.mv](#filesmv)
+- [files.read](#filesread)
+- [files.rm](#filesrm)
+- [files.stat](#filesstat)
+- [files.touch](#filestouch)
+- [files.write](#fileswrite)
_Explore the Mutable File System through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/mutable-file-system/)._
- - [files.chmod](#fileschmod)
- - [files.cp](#filescp)
- - [files.flush](#filesflush)
- - [files.ls](#filesls)
- - [files.lsReadableStream](#fileslsreadablestream)
- - [files.lsPullStream](#fileslspullstream)
- - [files.mkdir](#filesmkdir)
- - [files.mv](#filesmv)
- - [files.read](#filesread)
- - [files.readPullStream](#filesreadpullstream)
- - [files.readReadableStream](#filesreadreadablestream)
- - [files.rm](#filesrm)
- - [files.stat](#filesstat)
- - [files.touch](#filestouch)
- - [files.write](#fileswrite)
-
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
#### `add`
-> Add files and data to IPFS.
+> Import files and data into IPFS.
##### `ipfs.add(data, [options])`
Where `data` may be:
-- a [`Buffer instance`][b]
-- a [`Readable Stream`][rs]
-- a [`Pull Stream`][ps]
-- a [`File`][file]
-- an array of objects, each of the form:
-```JavaScript
+* `Bytes` (alias for `Buffer`|`ArrayBuffer`|`TypedArray`) [single file]
+* `Bloby` (alias for: `Blob`|`File`) [single file]
+* `string` [single file]
+* `FileObject` (see below for definition) [single file]
+* `Iterable` [single file]
+* `Iterable` [single file]
+* `Iterable` [multiple files]
+* `Iterable` [multiple files]
+* `Iterable` [multiple files]
+* `AsyncIterable` [single file]
+* `AsyncIterable` [multiple files]
+* `AsyncIterable` [multiple files]
+* `AsyncIterable` [multiple files]
+
+`FileObject` is a plain JS object of the following form:
+
+```js
{
- path: '/tmp/myfile.txt', // The file path
- content: // A Buffer, Readable Stream, Pull Stream or File with the contents of the file
- mode: // optional integer mode to store the entry with
- mtime: // optional value representing the modification time of the entry - either a `Date` object, an object with `{ secs, nsecs }` properties where `secs` is the number of seconds since (positive) or before (negative) the Unix Epoch began and `nsecs` is the number of nanoseconds since the last full second, or the output of `process.hrtime()`
+ // The path you want to the file to be accessible at from the root CID _after_ it has been added
+ path?: string
+ // The contents of the file (see below for definition)
+ content?: FileContent
+ // File mode to store the entry with (see https://en.wikipedia.org/wiki/File_system_permissions#Numeric_notation)
+ mode?: number | string
+ // The modification time of the entry (see below for definition)
+ mtime?: UnixTime
}
```
-If no `content` is passed, then the path is treated as an empty directory
+
+If no `path` is specified, then the item will be added to the root level and will be given a name according to it's CID.
+
+If no `content` is passed, then the item is treated as an empty directory.
+
+One of `path` or `content` _must_ be passed.
+
+`FileContent` is one of the following types:
+
+```js
+Bytes | Bloby | string | Iterable | Iterable | AsyncIterable
+```
+
+`UnixTime` is one of the following types:
+
+```js
+Date | { secs: number, nsecs?: number } | number[]
+```
+
+As an object, `secs` is the number of seconds since (positive) or before (negative) the Unix Epoch began and `nsecs` is the number of nanoseconds since the last full second.
+
+As an array of numbers, it must have two elements, as per the output of [`process.hrtime()`](https://nodejs.org/dist/latest/docs/api/process.html#process_process_hrtime_time).
`options` is an optional object argument that might include the following keys:
-- chunker (string, default `size-262144`): chunking algorithm used to build ipfs DAGs. Available formats:
+- `chunker` (string, default `size-262144`): chunking algorithm used to build ipfs DAGs. Available formats:
- size-{size}
- rabin
- rabin-{avg}
- rabin-{min}-{avg}-{max}
-- cidVersion (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including its version).
-- cidBase (string, default `base58btc`): Number base to display CIDs in. [The list of all possible values](https://github.com/multiformats/js-multibase/blob/master/src/constants.js).
-- enableShardingExperiment: allows to create directories with an unlimited number of entries currently size of unixfs directories is limited by the maximum block size. Note that this is an experimental feature.
-- hashAlg || hash (string, default `sha2-256`): multihash hashing algorithm to use. [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343).
-- onlyHash (boolean, default false): doesn't actually add the file to IPFS, but rather calculates its hash.
-- pin (boolean, default true): pin this object when adding.
-- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs.
-- quiet (boolean, default false): writes a minimal output.
-- quieter (boolean, default false): writes only final hash.
-- rawLeaves (boolean, default false): if true, DAG leaves will contain raw file data and not be wrapped in a protobuf.
-- recursive (boolean, default false): for when a Path is passed, this option can be enabled to add recursively all the files.
-- shardSplitThreshold (integer, default 1000): specifies the maximum size of unsharded directory that can be generated.
-- silent (boolean, default false): writes no output.
-- trickle (boolean, default false): if true will use the trickle DAG format for DAG generation.
+- `cidVersion` (integer, default `0`): the CID version to use when storing the data (storage keys are based on the CID, including its version).
+- `enableShardingExperiment`: allows to create directories with an unlimited number of entries currently size of unixfs directories is limited by the maximum block size. Note that this is an experimental feature.
+- `hashAlg` (string, default `sha2-256`): multihash hashing algorithm to use. [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343).
+- `onlyHash` (boolean, default `false`): doesn't actually add the file to IPFS, but rather calculates its hash.
+- `pin` (boolean, default `true`): pin this object when adding.
+- `progress` (function): a function that will be called with the byte length of chunks as a file is added to ipfs.
+- `rawLeaves` (boolean, default `false`): if true, DAG leaves will contain raw file data and not be wrapped in a protobuf.
+- `shardSplitThreshold` (integer, default `1000`): specifies the maximum size of unsharded directory that can be generated.
+- `trickle` (boolean, default `false`): if true will use the trickle DAG format for DAG generation.
[Trickle definition from go-ipfs documentation](https://godoc.org/github.com/ipsn/go-ipfs/gxlibs/github.com/ipfs/go-unixfs/importer/trickle).
-- wrapWithDirectory (boolean, default false): adds a wrapping node around the content.
+- `wrapWithDirectory` (boolean, default `false`): adds a wrapping node around the content.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of objects describing the added data |
+| `AsyncIterable` | An async iterable that yields objects describing the added data |
-an array of objects is returned, each of the form:
+Each yielded object is of the form:
```JavaScript
{
path: '/tmp/myfile.txt',
- hash: 'QmHash', // base58 encoded multihash
+ cid: CID('QmHash'),
mode: Number,
mtime: { secs: Number, nsecs: Number },
size: 123
@@ -110,244 +123,92 @@ an array of objects is returned, each of the form:
**Example:**
-In the browser, assuming `ipfs = new Ipfs(...)`:
-
```js
-const content = Ipfs.Buffer.from('ABC')
-const results = await ipfs.add(content)
-const hash = results[0].hash // "Qm...WW"
-```
-
-Now [ipfs.io/ipfs/Qm...WW](https://ipfs.io/ipfs/QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW)
-returns the "ABC" string.
-
-The following allows you to add multiple files at once. Note that intermediate directories in file paths will be automatically created and returned in the response along with files:
-
-```JavaScript
-const files = [
- {
- path: '/tmp/myfile.txt',
- content: Ipfs.Buffer.from('ABC')
- }
-]
-
-const results = await ipfs.add(files)
-```
-
-The `results` array:
-
-```javascript
-[
- {
- "path": "tmp",
- "hash": "QmWXdjNC362aPDtwHPUE9o2VMqPeNeCQuTBTv1NsKtwypg",
- "mode": 493,
- "mtime": { secs: Number, nsecs: Number },
- "size": 67
- },
- {
- "path": "/tmp/myfile.txt",
- "hash": "QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW",
- "mode": 420,
- "mtime": { secs: Number, nsecs: Number },
- "size": 11
- }
-]
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `addReadableStream`
-
-> Add files and data to IPFS using a [Readable Stream][rs] of class Duplex.
+const files = [{
+ path: '/tmp/myfile.txt',
+ content: 'ABC'
+}]
-##### `ipfs.addReadableStream([options])` -> [Readable Stream][rs]
+for await (const result of ipfs.add(content)) {
+ console.log(result)
+}
-Returns a Readable Stream of class Duplex, where objects can be written of the forms
+/*
+Prints out objects like:
-```js
{
- path: '/tmp/myfile.txt', // The file path
- content: // A Buffer, Readable Stream, Pull Stream or File with the contents of the file
+ "path": "tmp",
+ "cid": CID("QmWXdjNC362aPDtwHPUE9o2VMqPeNeCQuTBTv1NsKtwypg"),
+ "mode": 493,
+ "mtime": { secs: Number, nsecs: Number },
+ "size": 67
}
-```
-
-`options` is an optional object argument that might include the following keys:
-
-- cidVersion (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including its version)
-- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs.
-- hashAlg || hash (string): multihash hashing algorithm to use. (default: `sha2-256`) [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343)
-- wrapWithDirectory (boolean): adds a wrapping node around the content
-- pin (boolean, default true): pin this object when adding.
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.addReadableStream()
-stream.on('data', function (file) {
- // 'file' will be of the form
- // {
- // path: '/tmp/myfile.txt',
- // hash: 'QmHash' // base58 encoded multihash
- // mode: Number,
- // mtime: { secs: Number, nsecs: Number },
- // size: 123
- // }
-})
-
-stream.write({
- path:
- content:
-})
-// write as many files as you want
-
-stream.end()
-})
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `addPullStream`
-> Add files and data to IPFS using a [Pull Stream][ps].
-
-##### `ipfs.addPullStream([options])` -> [Pull Stream][ps]
-
-Returns a Pull Stream, where objects can be written of the forms
-
-```js
{
- path: '/tmp/myfile.txt', // The file path
- content: // A Buffer, Readable Stream, Pull Stream or File with the contents of the file
- mode: // optional integer mode to store the entry with
- mtime: // optional value representing the modification time of the entry - either a `Date` object, an object with `{ secs, nsecs }` properties where `secs` is the number of seconds since (positive) or before (negative) the Unix Epoch began and `nsecs` is the number of nanoseconds since the last full second, or the output of `process.hrtime()`
+ "path": "/tmp/myfile.txt",
+ "cid": CID("QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW"),
+ "mode": 420,
+ "mtime": { secs: Number, nsecs: Number },
+ "size": 11
}
+*/
```
-`options` is an optional object argument that might include the following keys:
-
-- cidVersion (integer, default 0): the CID version to use when storing the data (storage keys are based on the CID, including its version)
-- progress (function): a function that will be called with the byte length of chunks as a file is added to ipfs.
-- hashAlg || hash (string): multihash hashing algorithm to use. (default: `sha2-256`) [The list of all possible values]( https://github.com/multiformats/js-multihash/blob/master/src/constants.js#L5-L343)
-- wrapWithDirectory (boolean): adds a wrapping node around the content
-- pin (boolean, default true): pin this object when adding.
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.addPullStream()
-
-pull(
- pull.values([
- { path: , content: }
- ]),
- stream,
- pull.collect((err, values) => {
- // values will be an array of objects, which one of the form
- // {
- // path: '/tmp/myfile.txt',
- // hash: 'QmHash' // base58 encoded multihash
- // mode: Number
- // mtime: { secs: Number, nsecs: Number }
- // size: 123
- // }
- })
-)
-```
-
-#### `addFromFs`
-
-> Add files or entire directories from the FileSystem to IPFS
+Now [ipfs.io/ipfs/Qm...WW](https://ipfs.io/ipfs/QmNz1UBzpdd4HfZ3qir3aPiRdX5a93XwTuDNyXRc6PKhWW) returns the "ABC" string.
-##### `ipfs.addFromFs(path, [options])`
+**Importing files from the file system:**
-Reads a file or folder from `path` on the filesystem and adds it to IPFS.
+Both js-ipfs and js-ipfs-http-client export a utility to make importing files from the file system easier (Note: it not available in the browser).
-Options:
- - **recursive**: If `path` is a directory, use option `{ recursive: true }` to add the directory and all its sub-directories.
- - **ignore**: To exclude file globs from the directory, use option `{ ignore: ['ignore/this/folder/**', 'and/this/file'] }`.
- - **hidden**: hidden/dot files (files or folders starting with a `.`, for example, `.git/`) are not included by default. To add them, use the option `{ hidden: true }`.
+```js
+const IPFS = require('ipfs')
+const { globSource } = IPFS
-**Returns**
+const ipfs = await IPFS.create()
-| Type | Description |
-| -------- | -------- |
-| `Promise` | An array of objects describing the files that were added |
-
-an array of objects is returned, each of the form:
+for await (const file of ipfs.add(globSource('./docs', { recursive: true }))) {
+ console.log(file)
+}
-```js
+/*
{
- path: 'test-folder',
- hash: 'QmRNjDeKStKGTQXnJ2NFqeQ9oW23WcpbmvCVrpDHgDg3T6',
- mode: Number
- mtime: Date
- size: 123
+ path: 'docs/assets/anchor.js',
+ cid: CID('QmVHxRocoWgUChLEvfEyDuuD6qJ4PhdDL2dTLcpUy3dSC2'),
+ size: 15347
}
+{
+ path: 'docs/assets/bass-addons.css',
+ hash: CID('QmPiLWKd6yseMWDTgHegb8T7wVS7zWGYgyvfj7dGNt2viQ'),
+ size: 232
+}
+...
+*/
```
-**Example**
-
-```JavaScript
-const results = await ipfs.addFromFs('path/to/a/folder', { recursive: true , ignore: ['subfolder/to/ignore/**']})
-console.log(results)
-```
-
-#### `addFromURL`
-
-> Add a file from a URL to IPFS
-
-##### `ipfs.addFromURL(url, [options])`
-
-`options` is an optional object that argument that might include the same keys of [`ipfs.add(data, [options])`](#add)
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `Promise` | An object describing the added file |
-
-**Example**
-
-```JavaScript
-const result = await ipfs.addFromURL('http://example.com/')
-console.log('result')
-```
-
-#### `addFromStream`
-
-> Add a file from a stream to IPFS
-
-##### `ipfs.addFromStream(stream, [options])`
+**Importing a file from a URL:**
-This is very similar to `ipfs.add({ path:'', content: stream })`. It is like the reverse of cat.
+Both js-ipfs and js-ipfs-http-client export a utility to make importing a file from a URL easier.
-`options` is an optional object that argument that might include the same keys of [`ipfs.add(data, [options])`](#add)
-
-**Returns**
+```js
+const IPFS = require('ipfs')
+const { globSource } = IPFS
-| Type | Description |
-| -------- | -------- |
-| `Promise` | An array of objects describing the added data |
+const ipfs = await IPFS.create()
-an array of objects is returned, each of the form:
+for await (const file of ipfs.add(urlSource('https://ipfs.io/images/ipfs-logo.svg'))) {
+ console.log(file)
+}
-```JavaScript
+/*
{
- path: '/tmp/myfile.txt',
- hash: 'QmHash', // base58 encoded multihash
- mode: Number,
- mtime: { secs: Number, nsecs: Number },
- size: 123
+ path: 'ipfs-logo.svg',
+ cid: CID('QmTqZhR6f7jzdhLgPArDPnsbZpvvgxzCZycXK7ywkLxSyU'),
+ size: 3243
}
+*/
```
-**Example**
-
-```JavaScript
-const result = await ipfs.addFromStream()
-console.log(result)
-```
+A great source of [examples][] can be found in the tests for this API.
#### `cat`
@@ -357,11 +218,11 @@ console.log(result)
`ipfsPath` can be of type:
-- [`cid`][cid] of type:
- - a [CID](https://github.com/ipfs/js-cid) instance
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
+- [`CID`][cid] of type:
+ - `string` - the base encoded version of the CID
+ - [CID](https://github.com/ipfs/js-cid) - a CID instance
+ - [Buffer][b] - the raw Buffer of the CID
+- `string` - including the ipfs handler, a CID and a path to traverse to, e.g.
- '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
@@ -374,85 +235,16 @@ console.log(result)
| Type | Description |
| -------- | -------- |
-| `Promise` | A [`Buffer`][b] with the contents of `path` |
+| `AsyncIterable` | An async iterable that yields [`Buffer`][b] objects with the contents of `path` |
**Example:**
```JavaScript
-const file = await ipfs.cat(ipfsPath) {
-console.log(file.toString('utf8'))
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `catReadableStream`
-
-> Returns a [Readable Stream][rs] containing the contents of a file addressed by a valid IPFS Path.
-
-##### `ipfs.catReadableStream(ipfsPath, [options])` -> [Readable Stream][rs]
-
-`ipfsPath` can be of type:
-
-- [`cid`][cid] of type:
- - a [CID](https://github.com/ipfs/js-cid) instance
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
-
-`options` is an optional object that may contain the following keys:
- - `offset` is an optional byte offset to start the stream at
- - `length` is an optional number of bytes to read from the stream
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] with the contents of the file |
-
-**Example**
-
-```JavaScript
-const stream = ipfs.catReadableStream(ipfsPath)
-// stream will be a stream containing the data of the file requested
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `catPullStream`
-
-> Returns a [Pull Stream][ps] containing the contents of a file addressed by a valid IPFS Path.
-
-##### `ipfs.catPullStream(ipfsPath, [options])` -> [Pull Stream][rs]
-
-`ipfsPath` can be of type:
-
-- [`cid`][cid] of type:
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
-
-`options` is an optional object that may contain the following keys:
- - `offset` is an optional byte offset to start the stream at
- - `length` is an optional number of bytes to read from the stream
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [Pull Stream][ps] with the contents of the file |
-
-**Example**
-
-```JavaScript
-const stream = ipfs.catPullStream(ipfsPath)
-// stream will be a stream containing the data of the file requested
-})
+const chunks = []
+for await (const chunk of ipfs.cat(ipfsPath)) {
+ chunks.push(chunk)
+}
+console.log(Buffer.concat(chunks).toString())
```
A great source of [examples][] can be found in the tests for this API.
@@ -463,12 +255,13 @@ A great source of [examples][] can be found in the tests for this API.
##### `ipfs.get(ipfsPath)`
-ipfsPath can be of type:
+`ipfsPath` can be of type:
-- [`cid`][cid] of type:
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
+- [`CID`][cid] of type:
+ - `string` - the base encoded version of the CID
+ - [CID](https://github.com/ipfs/js-cid) - a CID instance
+ - [Buffer][b] - the raw Buffer of the CID
+- String, including the ipfs handler, a cid and a path to traverse to, e.g.
- '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
@@ -477,142 +270,39 @@ ipfsPath can be of type:
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of objects representing the files |
+| `AsyncIterable` | An async iterable that yields objects representing the files |
-an array of objects is returned, each of the form:
+Each yielded object is of the form:
```js
{
- path: '/tmp/myfile.txt',
- content:
+ path: string,
+ content: >,
+ mode: number,
+ mtime: { secs: number, nsecs: number }
}
```
-Here, each `path` corresponds to the name of a file, and `content` is a regular Readable stream with the raw contents of that file.
+Here, each `path` corresponds to the name of a file, and `content` is an async iterable with the file contents.
**Example:**
```JavaScript
-const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
+const BufferList = require('bl/BufferList')
+const cid = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
-const files = await ipfs.get(validCID)
-files.forEach((file) => {
+for await (const file of ipfs.get(cid)) {
console.log(file.path)
- console.log(file.content.toString('utf8'))
-})
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `getReadableStream`
-
-> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams.
-
-##### `ipfs.getReadableStream(ipfsPath)` -> [Readable Stream][rs]
-ipfsPath can be of type:
-
-- [`cid`][cid] of type:
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects |
-
-the yielded objects are of the form:
-
-```js
-{
- path: '/tmp/myfile.txt',
- content: ,
- mode: Number,
- mtime: { secs: Number, nsecs: Number }
-}
-```
-
-**Example:**
-
-```JavaScript
-const validCID = 'QmYwAPJzv5CZsnA625s3Xf2nemtYgPpHdWEz79ojWnPbdG'
-
-const stream = ipfs.getReadableStream(validCID)
-
-stream.on('data', (file) => {
- // write the file's path and contents to standard out
- console.log(file.path)
- if(file.type !== 'dir') {
- file.content.on('data', (data) => {
- console.log(data.toString())
- })
- file.content.resume()
+ const content = new BufferList()
+ for await (const chunk of file.content) {
+ content.append(chunk)
}
-})
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `getPullStream`
-
-> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded as Readable Streams.
-
-##### `ipfs.getPullStream(ipfsPath)` -> [Pull Stream][ps]
-ipfsPath can be of type:
-
-- [`cid`][cid] of type:
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [Pull Stream][ps] that will yield objects |
-
-the yielded objects are of the form:
-
-```js
-{
- path: '/tmp/myfile.txt',
- content: ,
- mode: Number,
- mtime: { secs: Number, nsecs: Number }
+ console.log(content.toString())
}
```
-**Example:**
-
-```JavaScript
-const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
-
-const stream = ipfs.getReadableStream(validCID)
-
-pull(
- stream,
- pull.collect((err, files) => {
- if (err) {
- throw err
- }
-
- files.forEach((file) => {
- console.log(file.path)
- console.log(file.path.toString())
- })
- })
-)
-```
-
A great source of [examples][] can be found in the tests for this API.
#### `ls`
@@ -621,66 +311,13 @@ A great source of [examples][] can be found in the tests for this API.
##### `ipfs.ls(ipfsPath)`
-> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionalities.
-
-ipfsPath can be of type:
-
-- [`cid`][cid] of type:
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `Promise` | An array of objects representing the files |
-
-an array of objects is returned, each of the form:
-
-```js
-{
- depth: 1,
- name: 'alice.txt',
- path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt',
- size: 11696,
- hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi',
- type: 'file',
- mode: Number,
- mtime: { secs: Number, nsecs: Number }
-}
-```
-
-**Example:**
-
-```JavaScript
-const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
-
-const files = await ipfs.ls(validCID)
-files.forEach((file) => {
- console.log(file.path)
-})
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `lsReadableStream`
-
-> Lists a directory from IPFS that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams.
-
-##### `ipfs.lsReadableStream(ipfsPath)` -> [Readable Stream][rs]
-
-> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionalities.
-
-ipfsPath can be of type:
+`ipfsPath` can be of type:
-- [`cid`][cid] of type:
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
+- [`CID`][cid] of type:
+ - `string` - the base encoded version of the CID
+ - [CID](https://github.com/ipfs/js-cid) - a CID instance
+ - [Buffer][b] - the raw Buffer of the CID
+- String, including the ipfs handler, a cid and a path to traverse to, e.g.
- '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
@@ -689,9 +326,9 @@ ipfsPath can be of type:
| Type | Description |
| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects |
+| `AsyncIterable` | An async iterable that yields objects representing the files |
-the yielded objects are of the form:
+Each yielded object is of the form:
```js
{
@@ -699,7 +336,7 @@ the yielded objects are of the form:
name: 'alice.txt',
path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt',
size: 11696,
- hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi',
+ cid: CID('QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi'),
type: 'file',
mode: Number,
mtime: { secs: Number, nsecs: Number }
@@ -709,81 +346,16 @@ the yielded objects are of the form:
**Example:**
```JavaScript
-const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
+const cid = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
-const stream = ipfs.lsReadableStream(validCID)
-
-stream.on('data', (file) => {
- // write the file's path and contents to standard out
+for await (const file of ipfs.ls(cid)) {
console.log(file.path)
-})
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `lsPullStream`
-
-> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded through a Pull Stream.
-
-##### `ipfs.lsPullStream(ipfsPath)` -> [Pull Stream][ps]
-
-> **Note:** ipfs.files.ls is currently only for MFS directories. The goal is to converge both functionalities.
-
-
-ipfsPath can be of type:
-
-- [`cid`][cid] of type:
- - [Buffer][b], the raw Buffer of the cid
- - String, the base58 encoded version of the cid
-- String, including the ipfs handler, a cid and a path to traverse to, ie:
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66'
- - '/ipfs/QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
- - 'QmXEmhrMpbVvTh61FNAxP9nU7ygVtyvZA8HZDUaqQCAb66/a.txt'
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [Pull Stream][ps] that will yield objects |
-
-the yielded objects are of the form:
-
-```js
-{
- depth: 1,
- name: 'alice.txt',
- path: 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt',
- size: 11696,
- hash: 'QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi',
- type: 'file',
- mode: Number,
- mtime: { secs: Number, nsecs: Number }
}
```
-**Example:**
-
-```JavaScript
-const validCID = 'QmQ2r6iMNpky5f1m4cnm3Yqw8VSvjuKpTcK1X7dBR1LkJF'
-
-const stream = ipfs.lsPullStream(validCID)
-
-pull(
- stream,
- pull.collect((err, files) => {
- if (err) {
- throw err
- }
-
- files.forEach((file) => console.log(file.path))
- })
-)
-```
-
A great source of [examples][] can be found in the tests for this API.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
+---
## The Files API aka MFS (The Mutable File System)
@@ -920,7 +492,6 @@ Where:
- `hash` is a Boolean value to return only the hash (default: false)
- `size` is a Boolean value to return only the size (default: false)
- `withLocal` is a Boolean value to compute the amount of the dag that is local, and if possible the total size (default: false)
- - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`)
**Returns**
@@ -930,7 +501,7 @@ Where:
the returned object has the following keys:
-- `hash` is a string with the hash
+- `cid` a [CID][cid] instance
- `size` is an integer with the file size in Bytes
- `cumulativeSize` is an integer with the size of the DAGNodes making up the file in Bytes
- `type` is a string that can be either `directory` or `file`
@@ -946,7 +517,7 @@ const stats = await ipfs.files.stat('/file.txt')
console.log(stats)
// {
-// hash: 'QmXmJBmnYqXVuicUfn9uDCC8kxCEEzQpsAbeq1iJvLAmVs',
+// hash: CID('QmXmJBmnYqXVuicUfn9uDCC8kxCEEzQpsAbeq1iJvLAmVs'),
// size: 60,
// cumulativeSize: 118,
// blocks: 1,
@@ -1023,7 +594,7 @@ await ipfs.files.rm('/my/beautiful/directory', { recursive: true })
#### `files.read`
-> Read a file into a [`Buffer`][b].
+> Read a file
##### `ipfs.files.read(path, [options])`
@@ -1041,81 +612,18 @@ Where:
| Type | Description |
| -------- | -------- |
-| `Promise` | A [`Buffer`][b] with the contents of `path` |
-
-N.b. this method is likely to result in high memory usage, you should use [files.readReadableStream](#filesreadreadablestream) or [files.readPullStream](#filesreadpullstream) instead where possible.
+| `AsyncIterable` | An async iterable that yields [`Buffer`][b] objects with the contents of `path` |
**Example:**
```JavaScript
-const buf = await ipfs.files.read('/hello-world')
-console.log(buf.toString('utf8'))
-
-// Hello, World!
-```
+const chunks = []
-#### `files.readReadableStream`
-
-> Read a file into a [`ReadableStream`][rs].
-
-##### `ipfs.files.readReadableStream(path, [options])`
-
-Where:
-
-- `path` is the path of the file to read and must point to a file (and not a directory). It might be:
- - An existing MFS path to a file (e.g. `/my-dir/a.txt`)
- - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`)
- - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`)
-- `options` is an optional Object that might contain the following keys:
- - `offset` is an Integer with the byte offset to begin reading from (default: 0)
- - `length` is an Integer with the maximum number of bytes to read (default: Read to the end of stream)
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] with the contents of `path` |
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.files.readReadableStream('/hello-world')
-stream.on('data', (buf) => console.log(buf.toString('utf8')))
-
-// Hello, World!
-```
-
-#### `files.readPullStream`
-
-> Read a file into a [`PullStream`][ps].
-
-##### `ipfs.files.readPullStream(path, [options])`
-
-Where:
-
-- `path` is the path of the file to read and must point to a file (and not a directory). It might be:
- - An existing MFS path to a file (e.g. `/my-dir/a.txt`)
- - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`)
- - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`)
-- `options` is an optional Object that might contain the following keys:
- - `offset` is an Integer with the byte offset to begin reading from (default: 0)
- - `length` is an Integer with the maximum number of bytes to read (default: Read to the end of stream)
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [`PullStream`][ps] with the contents of `path` |
-
-**Example:**
-
-```JavaScript
-pull(
- ipfs.files.readPullStream('/hello-world'),
- through(buf => console.log(buf.toString('utf8'))),
- collect(err => {})
-)
+for await (const chunk of ipfs.files.read('/hello-world')) {
+ chunks.push(chunk)
+}
+console.log(Buffer.concat(chunks).toString())
// Hello, World!
```
@@ -1130,8 +638,7 @@ Where:
- `path` is the path of the file to write
- `content` can be:
- a [`Buffer`][b]
- - a [`PullStream`][ps]
- - a [`ReadableStream`][rs]
+ - an `AsyncIterable` (note: Node.js readable streams are iterable)
- a [`Blob`][blob] (caveat: will only work in the browser)
- a string path to a file (caveat: will only work in Node.js)
- `options` is an optional Object that might contain the following keys:
@@ -1235,133 +742,35 @@ Where:
- An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`)
- A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`)
- `options` is an optional Object that might contain the following keys:
- - `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false)
- - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`)
- `sort` is a Boolean value. If true entries will be sorted by filename (default: false)
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of objects representing the files |
+| `AsyncIterable` | An async iterable that yields objects representing the files |
-each object contains the following keys:
+Each object contains the following keys:
- `name` which is the file's name
- `type` which is the object's type (`directory` or `file`)
- `size` the size of the file in bytes
-- `hash` the hash of the file
+- `cid` the hash of the file (A [CID][cid] instance)
- `mode` the UnixFS mode as a Number
- `mtime` an objects with numeric `secs` and `nsecs` properties
**Example:**
```JavaScript
-const files = await ipfs.files.ls('/screenshots')
-
-files.forEach((file) => {
- console.log(file.name)
-})
-
+for await (const file of ipfs.files.ls('/screenshots')) {
+ console.log(file.name)
+}
// 2018-01-22T18:08:46.775Z.png
// 2018-01-22T18:08:49.184Z.png
```
-#### `files.lsReadableStream`
-
-> Lists a directory from the local mutable namespace that is addressed by a valid IPFS Path. The list will be yielded as Readable Streams.
-
-##### `Go` **WIP**
-
-##### `JavaScript` - ipfs.files.lsReadableStream([path], [options]) -> [Readable Stream][rs]
-
-Where:
-
-- `path` is an optional string to show listing for (default: `/`). It might be:
- - An existing MFS path to a directory (e.g. `/my-dir`)
- - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`)
- - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`)
-- `options` is an optional Object that might contain the following keys:
- - `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false)
- - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`)
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] in [Object mode](https://nodejs.org/api/stream.html#stream_object_mode) that will yield objects |
-
-the yielded objects contain the following keys:
-
-- `name` which is the file's name
-- `type` which is the object's type (`directory` or `file`)
-- `size` the size of the file in bytes
-- `hash` the hash of the file
-- `mode` the UnixFS mode as a Number
-- `mtime` an object with numeric `secs` and `nsecs` properties
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.lsReadableStream('/some-dir')
-
-stream.on('data', (file) => {
- // write the file's path and contents to standard out
- console.log(file.name)
-})
-```
-
-#### `files.lsPullStream`
-
-> Fetch a file or an entire directory tree from IPFS that is addressed by a valid IPFS Path. The files will be yielded through a Pull Stream.
-
-##### `Go` **WIP**
-
-##### `JavaScript` - ipfs.lsPullStream([path], [options]) -> [Pull Stream][ps]
-
-Where:
-
-- `path` is an optional string to show listing for (default: `/`). It might be:
- - An existing MFS path to a directory (e.g. `/my-dir`)
- - An IPFS path (e.g. `/ipfs/QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks`)
- - A [CID][cid] instance (e.g. `new CID('QmWGeRAEgtsHW3ec7U4qW2CyVy7eA2mFRVbk1nb24jFyks')`)
-- `options` is an optional Object that might contain the following keys:
- - `long` is a Boolean value to decide whether or not to populate `type`, `size` and `hash` (default: false)
- - `cidBase` is which number base to use to format hashes - e.g. `base32`, `base64` etc (default: `base58btc`)
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [Pull Stream][os] that will yield objects |
-
-the yielded objects contain the following keys:
-
- - `name` which is the file's name
- - `type` which is the object's type (`directory` or `file`)
- - `size` the size of the file in bytes
- - `hash` the hash of the file
- - `mode` the UnixFS mode as a Number
- - `mtime` an object with numeric `secs` and `nsecs` properties
-
-**Example:**
-
-```JavaScript
-pull(
- ipfs.lsPullStream('/some-dir'),
- pull.through(file => {
- console.log(file.name)
- })
- pull.onEnd(...)
-)
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
[examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files-regular
[b]: https://www.npmjs.com/package/buffer
-[rs]: https://www.npmjs.com/package/readable-stream
-[ps]: https://www.npmjs.com/package/pull-stream
[file]: https://developer.mozilla.org/en-US/docs/Web/API/File
[cid]: https://www.npmjs.com/package/cids
[blob]: https://developer.mozilla.org/en-US/docs/Web/API/Blob
diff --git a/SPEC/KEY.md b/SPEC/KEY.md
index 82aad9a2..4d36532f 100644
--- a/SPEC/KEY.md
+++ b/SPEC/KEY.md
@@ -7,9 +7,6 @@
* [key.export](#keyexport)
* [key.import](#keyimport)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `key.gen`
> Generate a new key
diff --git a/SPEC/MISCELLANEOUS.md b/SPEC/MISCELLANEOUS.md
index b09641a5..ae1af409 100644
--- a/SPEC/MISCELLANEOUS.md
+++ b/SPEC/MISCELLANEOUS.md
@@ -5,13 +5,8 @@
* [dns](#dns)
* [stop](#stop)
* [ping](#ping)
-* [pingPullStream](#pingpullstream)
-* [pingReadableStream](#pingreadablestream)
* [resolve](#resolve)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `id`
> Returns the identity of the Peer
@@ -116,9 +111,9 @@ Where:
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of ping response objects |
+| `AsyncIterable` | An async iterable that yields ping response objects |
-an array of objects is returned, each of the form:
+Each yielded object is of the form:
```js
{
@@ -133,111 +128,15 @@ Note that not all ping response objects are "pongs". A "pong" message can be ide
**Example:**
```JavaScript
-const responses = await ipfs.ping('Qmhash')
-responses.forEach((res) => {
+for await (const res of ipfs.ping('Qmhash')) {
if (res.time) {
console.log(`Pong received: time=${res.time} ms`)
} else {
console.log(res.text)
}
-})
-```
-
-A great source of [examples](https://github.com/ipfs/interface-ipfs-core/tree/master/src/ping) can be found in the tests for this API.
-
-#### `pingPullStream`
-
-> Stream echo request packets to IPFS hosts
-
-##### `ipfs.pingPullStream(peerId, [options])`
-
-Where:
-
-- `peerId` (string) ID of the peer to be pinged.
-- `options` is an optional object argument that might include the following properties:
- - `count` (integer, default 10): the number of ping messages to send
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [`PullStream`][ps] of ping response objects |
-
-example of the returned objects:
-
-```js
-{
- success: true,
- time: 1234,
- text: ''
-}
-```
-
-Note that not all ping response objects are "pongs". A "pong" message can be identified by a truthy `success` property and an empty `text` property. Other ping responses are failures or status updates.
-
-**Example:**
-
-```JavaScript
-const pull = require('pull-stream')
-
-pull(
- ipfs.pingPullStream('Qmhash'),
- pull.drain((res) => {
- if (res.time) {
- console.log(`Pong received: time=${res.time} ms`)
- } else {
- console.log(res.text)
- }
- })
-)
-```
-
-A great source of [examples](https://github.com/ipfs/interface-ipfs-core/tree/master/src/ping) can be found in the tests for this API.
-
-#### `pingReadableStream`
-
-> Stream echo request packets to IPFS hosts
-
-##### `ipfs.pingReadableStream(peerId, [options])`
-
-Where:
-
-- `peerId` (string) ID of the peer to be pinged.
-- `options` is an optional object argument that might include the following properties:
- - `count` (integer, default 10): the number of ping messages to send
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [`ReadableStream`][rs] of ping response objects |
-
-example of the returned objects:
-
-```js
-{
- success: true,
- time: 1234,
- text: ''
}
```
-Note that not all ping response objects are "pongs". A "pong" message can be identified by a truthy `success` property and an empty `text` property. Other ping responses are failures or status updates.
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.pingReadableStream('Qmhash')
-
-stream.on('data', (res) => {
- if (res.time) {
- console.log(`Pong received: time=${res.time} ms`)
- } else {
- console.log(res.text)
- }
-})
-```
-
A great source of [examples](https://github.com/ipfs/interface-ipfs-core/tree/master/src/ping) can be found in the tests for this API.
#### `resolve`
diff --git a/SPEC/NAME.md b/SPEC/NAME.md
index e6705953..319ff84d 100644
--- a/SPEC/NAME.md
+++ b/SPEC/NAME.md
@@ -6,9 +6,6 @@
* [name.pubsub.subs](#namepubsubsubs)
* [name.resolve](#nameresolve)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `name.publish`
> Publish an IPNS name with a given value.
@@ -180,7 +177,7 @@ A great source of [examples][examples-pubsub] can be found in the tests for this
| Type | Description |
| -------- | -------- |
-| `Promise` | A string that contains the IPFS hash |
+| `AsyncIterable` | An async iterable that yields strings that are increasingly more accurate resolved paths. |
**Example:**
@@ -188,9 +185,10 @@ A great source of [examples][examples-pubsub] can be found in the tests for this
// The IPNS address you want to resolve.
const addr = '/ipns/ipfs.io'
-const name = await ipfs.name.resolve(addr)
-console.log(name)
-// /ipfs/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm
+for await (const name of ipfs.name.resolve(addr)) {
+ console.log(name)
+ // /ipfs/QmQrX8hka2BtNHa8N8arAq16TCVx5qHcb46c5yPewRycLm
+}
```
A great source of [examples][] can be found in the tests for this API.
diff --git a/SPEC/OBJECT.md b/SPEC/OBJECT.md
index d8db3618..1bbd2619 100644
--- a/SPEC/OBJECT.md
+++ b/SPEC/OBJECT.md
@@ -11,9 +11,6 @@
* [object.patch.appendData](#objectpatchappenddata)
* [object.patch.setData](#objectpatchsetdata)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `object.new`
> Create a new MerkleDAG node, using a specific layout. Caveat: So far, only UnixFS object layouts are supported.
@@ -90,7 +87,8 @@ A great source of [examples][] can be found in the tests for this API.
`options` is a optional argument of type object, that can contain the following properties:
-- `enc`, the encoding of multihash (base58, base64, etc), if any.
+- `enc` (`string`) - the encoding of multihash (base58, base64, etc), if any.
+- `timeout` (`number`|`string`) - Throw an error if the request does not complete within the specified milliseconds timeout. If `timeout` is a string, the value is parsed as a [human readable duration](https://www.npmjs.com/package/parse-duration). There is no timeout by default.
**Returns**
@@ -129,14 +127,14 @@ A great source of [examples][] can be found in the tests for this API.
| Type | Description |
| -------- | -------- |
-| `Promise` | A Buffer with the data that the MerkleDAG node contained |
+| `Promise` | An Promise that resolves to Buffer objects with the data that the MerkleDAG node contained |
**Example:**
```JavaScript
-const multihash = 'QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK'
+const cid = 'QmPb5f92FxKPYdT3QNBd1GKiL4tZUXUrzF4Hkpdr3Gf1gK'
-const data = await ipfs.object.data(multihash)
+const data = await ipfs.object.data(cid)
console.log(data.toString())
// Logs:
// some data
diff --git a/SPEC/PIN.md b/SPEC/PIN.md
index f8628a0a..0958ce80 100644
--- a/SPEC/PIN.md
+++ b/SPEC/PIN.md
@@ -4,9 +4,6 @@
* [pin.ls](#pinls)
* [pin.rm](#pinrm)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `pin.add`
> Adds an IPFS object to the pinset and also stores it to the IPFS repo. pinset is the set of hashes currently pinned (not gc'able).
@@ -17,19 +14,20 @@ Where:
- `hash` is an IPFS multihash.
- `options` is an object that can contain the following keys
- - 'recursive' - Recursively pin the object linked. Type: bool. Default: `true`
+ - `recursive` (`boolean`) - Recursively pin the object linked. Type: bool. Default: `true`
+ - `timeout` (`number`|`string`) - Throw an error if the request does not complete within the specified milliseconds timeout. If `timeout` is a string, the value is parsed as a [human readable duration](https://www.npmjs.com/package/parse-duration). There is no timeout by default.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of objects that represent the files that were pinned |
+| `Promise<{ cid: CID }>` | An array of objects that represent the files that were pinned |
an array of objects is returned, each of the form:
```JavaScript
{
- hash: 'QmHash'
+ cid: CID('QmHash')
}
```
@@ -39,7 +37,7 @@ an array of objects is returned, each of the form:
const pinset = await ipfs.pin.add('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u')
console.log(pinset)
// Logs:
-// [ { hash: 'QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u' } ]
+// [ { cid: CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') } ]
```
A great source of [examples][] can be found in the tests for this API.
@@ -53,28 +51,24 @@ A great source of [examples][] can be found in the tests for this API.
Where:
- `cid` - a [CID][cid] instance or CID as a string or an array of CIDs.
-- `options` is an object that can contain the following keys:
- - 'type' - Return also the type of pin (direct, indirect or recursive)
+- `options` - is an object that can contain the following keys:
+ - `type` - filter by this type of pin ("recursive", "direct" or "indirect")
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of current pinned objects |
-
-an array of objects with keys `hash` and `type` is returned.
+| `AsyncIterable<{ cid: CID, type: string }>` | An async iterable that yields currently pinned objects with `cid` and `type` properties. `cid` is a [CID][cid] of the pinned node, `type` is the pin type ("recursive", "direct" or "indirect") |
**Example:**
```JavaScript
-const pinset = await ipfs.pin.ls()
-console.log(pinset)
-// Logs
-// [
-// { hash: Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E, type: 'recursive' },
-// { hash: QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ, type: 'indirect' },
-// { hash: QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R, type: 'indirect' }
-// ]
+for await (const { cid, type } of ipfs.pin.ls()) {
+ console.log({ cid, type })
+}
+// { cid: CID(Qmc5XkteJdb337s7VwFBAGtiaoj2QCEzyxtNRy3iMudc3E), type: 'recursive' }
+// { cid: CID(QmZbj5ruYneZb8FuR9wnLqJCpCXMQudhSdWhdhp5U1oPWJ), type: 'indirect' }
+// { cid: CID(QmSo73bmN47gBxMNqbdV6rZ4KJiqaArqJ1nu5TvFhqqj1R), type: 'indirect' }
```
A great source of [examples][] can be found in the tests for this API.
@@ -94,14 +88,15 @@ Where:
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of unpinned objects |
+| `Promise<{ cid: CID }>` | An array of unpinned objects |
**Example:**
```JavaScript
-const pinset = await ipfs.pin.rm(hash)
+const pinset = await ipfs.pin.rm('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u')
console.log(pinset)
// prints the hashes that were unpinned
+// [ { cid: CID('QmWATWQ7fVPP2EFGu71UkfnqhYXDYH566qy47CnJDgvs8u') } ]
```
A great source of [examples][] can be found in the tests for this API.
diff --git a/SPEC/PUBSUB.md b/SPEC/PUBSUB.md
index 1f794a0d..63d505b3 100644
--- a/SPEC/PUBSUB.md
+++ b/SPEC/PUBSUB.md
@@ -6,9 +6,6 @@
* [pubsub.ls](#pubsubls)
* [pubsub.peers](#pubsubpeers)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `pubsub.subscribe`
> Subscribe to a pubsub topic.
@@ -53,12 +50,6 @@ If the `topic` and `handler` are provided, the `handler` will no longer receive
If **only** the `topic` param is provided, unsubscribe will remove **all** handlers for the `topic`. This behaves like [EventEmitter.removeAllListeners](https://nodejs.org/dist/latest/docs/api/events.html#events_emitter_removealllisteners_eventname). Use this if you would like to no longer receive any updates for the `topic`.
-**WARNING:** Unsubscribe is an async operation, but removing **all** handlers for a topic can only be done using the Promises API (due to the difficulty in distinguishing between a "handler" and a "callback" - they are both functions). If you _need_ to know when unsubscribe has completed you must use `await` or `.then` on the return value from
-
-```JavaScript
-ipfs.pubsub.unsubscribe('topic')
-```
-
**Returns**
| Type | Description |
@@ -79,6 +70,7 @@ console.log(`unsubscribed from ${topic}`)
```
Or removing all listeners:
+
```JavaScript
const topic = 'fruit-of-the-day'
const receiveMsg = (msg) => console.log(msg.toString())
@@ -130,7 +122,7 @@ A great source of [examples][] can be found in the tests for this API.
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of topicIDs that the peer is subscribed to |
+| `Promise` | An array of topicIDs that the peer is subscribed to |
**Example:**
@@ -153,14 +145,14 @@ A great source of [examples][] can be found in the tests for this API.
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of peer IDs subscribed to the `topic` |
+| `Promise` | An array of peer IDs subscribed to the `topic` |
**Example:**
```JavaScript
const topic = 'fruit-of-the-day'
-const peerIds = ipfs.pubsub.peers(topic)
+const peerIds = await ipfs.pubsub.peers(topic)
console.log(peerIds)
```
diff --git a/SPEC/REFS.md b/SPEC/REFS.md
index c2439a60..13627268 100644
--- a/SPEC/REFS.md
+++ b/SPEC/REFS.md
@@ -1,14 +1,7 @@
# Refs API
* [refs](#refs)
-* [refsReadableStream](#refsreadablestream)
-* [refsPullStream](#refspullstream)
* [refs.local](#refslocal)
-* [refs.localReadableStream](#refslocalreadablestream)
-* [refs.localPullStream](#refslocalpullstream)
-
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
#### `refs`
@@ -33,27 +26,27 @@ Although not listed in the documentation, all the following APIs that actually r
- `format ("")`: output edges with given format. Available tokens: ``, ``, ``
- `edges (false)`: output references in edge format: `" -> "`
- `maxDepth (1)`: only for recursive refs, limits fetch and listing to the given depth
+ - `timeout (number|string)`: Throw an error if the request does not complete within the specified milliseconds timeout. If `timeout` is a string, the value is parsed as a [human readable duration](https://www.npmjs.com/package/parse-duration). There is no timeout by default.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array representing the links (references) |
+| `AsyncIterable` | An async iterable that yields objects representing the links (references) |
+
+Each yielded object is of the form:
-example of the returned array:
```js
{
- ref: "myref",
- err: "error msg"
+ ref: string,
+ err: Error | null
}
```
**Example:**
```JavaScript
-const refs = await ipfs.refs(ipfsPath, { recursive: true })
-
-for (const ref of refs) {
+for await (const ref of ipfs.refs(ipfsPath, { recursive: true })) {
if (ref.err) {
console.error(ref.err)
} else {
@@ -63,64 +56,6 @@ for (const ref of refs) {
}
```
-#### `refsReadableStream`
-
-> Output references using a [Readable Stream][rs]
-
-##### `ipfs.refsReadableStream(ipfsPath, [options])`
-
-`options` is an optional object argument identical to the options for [ipfs.refs](#refs)
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] representing the references |
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.refsReadableStream(ipfsPath, { recursive: true })
-stream.on('data', function (ref) {
- // 'ref' will be of the form
- // {
- // ref: 'QmHash',
- // err: 'err message'
- // }
-})
-```
-
-#### `refsPullStream`
-
-> Output references using a [Pull Stream][ps].
-
-##### `ipfs.refsReadableStream(ipfsPath, [options])`
-
-`options` is an optional object argument identical to the options for [ipfs.refs](#refs)
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [Pull Stream][ps] representing the references |
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.refsPullStream(ipfsPath, { recursive: true })
-
-pull(
- stream,
- pull.collect((err, values) => {
- // values will be an array of objects, each one of the form
- // {
- // ref: 'QmHash',
- // err: 'err message'
- // }
- })
-)
-```
-
#### `refs.local`
> Output all local references (CIDs of all blocks in the blockstore)
@@ -131,94 +66,31 @@ pull(
| Type | Description |
| -------- | -------- |
-| `Promise` | An array representing all the local references |
+| `AsyncIterable` | An async iterable that yields objects representing the links (references) |
+
+Each yielded object is of the form:
-example of the returned array:
```js
{
- ref: "myref",
- err: "error msg"
+ ref: string,
+ err: Error | null
}
```
**Example:**
```JavaScript
-ipfs.refs.local(function (err, refs) {
- if (err) {
- throw err
- }
-
- for (const ref of refs) {
- if (ref.err) {
- console.error(ref.err)
- } else {
- console.log(ref.ref)
- // output: "QmHash"
- }
+for await (const ref of ipfs.refs.local()) {
+ if (ref.err) {
+ console.error(ref.err)
+ } else {
+ console.log(ref.ref)
+ // output: "QmHash"
}
-})
-```
-
-#### `refs.localReadableStream`
-
-> Output all local references using a [Readable Stream][rs]
-
-##### `ipfs.localReadableStream()`
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] representing all the local references |
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.refs.localReadableStream()
-stream.on('data', function (ref) {
- // 'ref' will be of the form
- // {
- // ref: 'QmHash',
- // err: 'err message'
- // }
-})
-```
-
-#### `refs.localPullStream`
-
-> Output all local references using a [Pull Stream][ps].
-
-##### `ipfs.refs.localReadableStream()`
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [Pull Stream][ps] representing all the local references |
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.refs.localPullStream()
-
-pull(
- stream,
- pull.collect((err, values) => {
- // values will be an array of objects, each one of the form
- // {
- // ref: 'QmHash',
- // err: 'err message'
- // }
- })
-)
+}
```
-A great source of [examples][] can be found in the tests for this API.
-
[examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/files-regular
[b]: https://www.npmjs.com/package/buffer
-[rs]: https://www.npmjs.com/package/readable-stream
-[ps]: https://www.npmjs.com/package/pull-stream
[cid]: https://www.npmjs.com/package/cids
[blob]: https://developer.mozilla.org/en-US/docs/Web/API/Blob
diff --git a/SPEC/REPO.md b/SPEC/REPO.md
index 03789f5d..37c8a930 100644
--- a/SPEC/REPO.md
+++ b/SPEC/REPO.md
@@ -4,9 +4,6 @@
* [repo.stat](#repostat)
* [repo.version](#repoversion)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `repo.gc`
> Perform a garbage collection sweep on the repo.
@@ -23,18 +20,19 @@ Where:
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of objects |
+| `AsyncIterable` | An async iterable that yields objects describing nodes that were garbage collected |
-each object contains the following properties:
+Each yielded object contains the following properties:
-- `err` is an Error if it was not possible to GC a particular block.
+- `err` is an `Error` if it was not possible to GC a particular block.
- `cid` is the [CID][cid] of the block that was Garbage Collected.
**Example:**
```JavaScript
-const res = await ipfs.repo.gc()
-console.log(res)
+for await (const res of ipfs.repo.gc()) {
+ console.log(res)
+}
```
#### `repo.stat`
diff --git a/SPEC/STATS.md b/SPEC/STATS.md
index 67cf0119..7d350cd7 100644
--- a/SPEC/STATS.md
+++ b/SPEC/STATS.md
@@ -3,11 +3,6 @@
* [stats.bitswap](#statsbitswap)
* [stats.repo](#statsrepo)
* [stats.bw](#statsbw)
-* [stats.bwPullStream](#statsbwpullstream)
-* [stats.bwReadableStream](#statsbwreadablestream)
-
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
#### `stats.bitswap`
@@ -23,7 +18,7 @@ Note: `stats.repo` and `repo.stat` can be used interchangeably. See [`repo.stat`
#### `stats.bw`
-> Get IPFS bandwidth information as an object.
+> Get IPFS bandwidth information.
##### `ipfs.stats.bw([options])`
@@ -32,16 +27,16 @@ Where:
- `options` is an optional object that might contain the following keys:
- `peer` specifies a peer to print bandwidth for.
- `proto` specifies a protocol to print bandwidth for.
- - `poll` is used to print bandwidth at an interval.
- - `interval` is the time interval to wait between updating output, if `poll` is true.
+ - `poll` is used to yield bandwidth info at an interval.
+ - `interval` is the time interval to wait between updating output, if `poll` is `true`.
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An object representing IPFS bandwidth information |
+| `AsyncIterable` | An async iterable that yields IPFS bandwidth information |
-the returned object contains the following keys:
+Each yielded object contains the following keys:
- `totalIn` - is a [BigNumber Int][bigNumber], in bytes.
- `totalOut` - is a [BigNumber Int][bigNumber], in bytes.
@@ -51,88 +46,16 @@ the returned object contains the following keys:
**Example:**
```JavaScript
-const stats = await ipfs.stats.bw()
-
-console.log(stats)
-// { totalIn: BigNumber {...},
-// totalOut: BigNumber {...},
-// rateIn: BigNumber {...},
-// rateOut: BigNumber {...} }
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `stats.bwPullStream`
-
-> Get IPFS bandwidth information as a [Pull Stream][ps].
-
-##### `ipfs.stats.bwPullStream([options])`
-
-Options are described on [`ipfs.stats.bw`](#bw).
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `PullStream` | A [Pull Stream][ps] representing IPFS bandwidth information |
-
-**Example:**
-
-```JavaScript
-const pull = require('pull-stream')
-const log = require('pull-stream/sinks/log')
-
-const stream = ipfs.stats.bwPullStream({ poll: true })
-
-pull(
- stream,
- log()
-)
-
-// { totalIn: BigNumber {...},
-// totalOut: BigNumber {...},
-// rateIn: BigNumber {...},
-// rateOut: BigNumber {...} }
-// ...
-// Ad infinitum
-```
-
-A great source of [examples][] can be found in the tests for this API.
-
-#### `stats.bwReadableStream`
-
-> Get IPFS bandwidth information as a [Readable Stream][rs].
-
-##### `ipfs.stats.bwReadableStream([options])`
-
-Options are described on [`ipfs.stats.bw`](#bw).
-
-**Returns**
-
-| Type | Description |
-| -------- | -------- |
-| `ReadableStream` | A [Readable Stream][rs] representing IPFS bandwidth information |
-
-**Example:**
-
-```JavaScript
-const stream = ipfs.stats.bwReadableStream({ poll: true })
-
-stream.on('data', (data) => {
- console.log(data)
-}))
-
+for await (const stats of ipfs.stats.bw()) {
+ console.log(stats)
+}
// { totalIn: BigNumber {...},
// totalOut: BigNumber {...},
// rateIn: BigNumber {...},
// rateOut: BigNumber {...} }
-// ...
-// Ad infinitum
```
A great source of [examples][] can be found in the tests for this API.
[bigNumber]: https://github.com/MikeMcl/bignumber.js/
-[rs]: https://www.npmjs.com/package/readable-stream
-[ps]: https://www.npmjs.com/package/pull-stream
[examples]: https://github.com/ipfs/interface-ipfs-core/blob/master/src/stats
diff --git a/SPEC/SWARM.md b/SPEC/SWARM.md
index e61a8bf0..b82eb258 100644
--- a/SPEC/SWARM.md
+++ b/SPEC/SWARM.md
@@ -8,9 +8,6 @@
* [swarm.filters.add](#swarmfiltersadd) (not implemented yet)
* [swarm.filters.rm](#swarmfiltersrm) (not implemented yet)
-### ⚠️ Note
-Although not listed in the documentation, all the following APIs that actually return a **promise** can also accept a **final callback** parameter.
-
#### `swarm.addrs`
> List of known addresses of each peer connected.
@@ -21,13 +18,29 @@ Although not listed in the documentation, all the following APIs that actually r
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of of [`PeerInfo`](https://github.com/libp2p/js-peer-info)s |
+| `Promise<{ id: CID, addrs: Multiaddr[] }>` | A promise that resolves to an object with `id` and `addrs`. `id` is a [`CID`](https://github.com/multiformats/js-cid) - the peer's ID and `addrs` is an array of [Multiaddr](https://github.com/multiformats/js-multiaddr/) - addresses for the peer. |
**Example:**
```JavaScript
const peerInfos = await ipfs.swarm.addrs()
-console.log(peerInfos)
+
+peerInfos.forEach(info => {
+ console.log(info.id.toString())
+ /*
+ QmcZf59bWwK5XFi76CZX8cbJ4BhTzzA3gU1ZjYZcYW3dwt
+ */
+
+ info.addrs.forEach(addr => console.log(addr.toString()))
+ /*
+ /ip4/147.75.94.115/udp/4001/quic
+ /ip6/2604:1380:3000:1f00::1/udp/4001/quic
+ /dnsaddr/bootstrap.libp2p.io
+ /ip6/2604:1380:3000:1f00::1/tcp/4001
+ /ip4/147.75.94.115/tcp/4001
+ */
+})
+
```
A great source of [examples][] can be found in the tests for this API.
@@ -86,7 +99,7 @@ A great source of [examples][] can be found in the tests for this API.
| Type | Description |
| -------- | -------- |
-| `Promise` | An array of [`MultiAddr`](https://github.com/multiformats/js-multiaddr) representing the local addresses the node is listening |
+| `Promise` | An array of [`Multiaddr`](https://github.com/multiformats/js-multiaddr) representing the local addresses the node is listening |
**Example:**
@@ -103,31 +116,33 @@ A great source of [examples][] can be found in the tests for this API.
##### `ipfs.swarm.peers([options])`
-If `options.verbose` is set to `true` additional information, such as `latency` is provided.
+`options` an optional object with the following properties:
+ - `direction` - set to `true` to return connection direction information. Default `false`
+ - `streams` - set to `true` to return information about open muxed streams. Default `false`
+ - `verbose` - set to `true` to return all extra information. Default `false`
+ - `latency` - set to `true` to return latency information. Default `false`
**Returns**
| Type | Description |
| -------- | -------- |
-| `Promise` | An array with the list of peers that the node have connections with |
+| `Promise` | An array with the list of peers that the node have connections with |
-the returned array has the following form:
+The returned array has the following form:
- `addr: Multiaddr`
-- `peer: PeerId`
-- `latency: String` Only if `verbose: true` was passed
-
-Starting with `go-ipfs 0.4.5` these additional properties are provided
-
+- `peer: CID`
+- `latency: String` - Only if `verbose: true` was passed
- `muxer: String` - The type of stream muxer the peer is usng
-- `streams: []String` - Only if `verbose: true`, a list of currently open streams
+- `streams: string[]` - Only if `verbose: true`, a list of currently open streams
+- `direction: number` - Inbound or outbound connection
-If an error occurs trying to create an individual `peerInfo` object, it will have the properties
+If an error occurs trying to create an individual object, it will have the properties:
- `error: Error` - the error that occurred
- `rawPeerInfo: Object` - the raw data for the peer
-and all other properties may be undefined.
+All other properties may be `undefined`.
**Example:**
diff --git a/package.json b/package.json
index f13c6f0e..b0d41dbd 100644
--- a/package.json
+++ b/package.json
@@ -1,11 +1,13 @@
{
"name": "interface-ipfs-core",
- "version": "0.128.0",
+ "version": "0.129.0",
"description": "A test suite and interface you can use to implement a IPFS core interface.",
"leadMaintainer": "Alan Shaw ",
"main": "src/index.js",
"browser": {
- "fs": false
+ "fs": false,
+ "os": false,
+ "ipfs-utils/src/files/glob-source": false
},
"scripts": {
"test": "exit 0",
@@ -36,43 +38,28 @@
},
"homepage": "https://github.com/ipfs/interface-ipfs-core#readme",
"dependencies": {
- "bl": "^3.0.0",
- "bs58": "^4.0.1",
- "callbackify": "^1.1.0",
"chai": "^4.2.0",
"chai-as-promised": "^7.1.1",
"cids": "~0.7.1",
- "concat-stream": "^2.0.0",
"delay": "^4.3.0",
"dirty-chai": "^2.0.1",
- "es6-promisify": "^6.0.1",
- "get-stream": "^5.1.0",
"hat": "0.0.3",
"ipfs-block": "~0.8.0",
"ipfs-unixfs": "^0.3.0",
- "ipfs-utils": "^0.4.2",
+ "ipfs-utils": "^0.6.0",
"ipld-dag-cbor": "~0.15.0",
"ipld-dag-pb": "^0.18.1",
"is-ipfs": "~0.6.1",
- "is-plain-object": "^3.0.0",
+ "it-all": "^1.0.1",
+ "it-concat": "^1.0.0",
+ "it-last": "^1.0.1",
"it-pushable": "^1.3.1",
- "libp2p-crypto": "~0.16.0",
- "multiaddr": "^6.0.0",
+ "multiaddr": "^7.2.1",
"multibase": "~0.6.0",
"multihashes": "~0.4.14",
- "multihashing-async": "~0.6.0",
- "p-each-series": "^2.1.0",
- "p-map-series": "^2.1.0",
- "p-timeout": "^3.2.0",
- "p-times": "^2.1.0",
- "peer-id": "~0.12.2",
- "peer-info": "~0.15.1",
- "pull-stream": "^3.6.11",
- "pull-to-promise": "^1.0.1",
- "pump": "^3.0.0",
- "readable-stream": "^3.1.1",
- "streaming-iterables": "^4.1.0",
- "through2": "^3.0.0"
+ "multihashing-async": "^0.8.0",
+ "peer-id": "~0.13.5",
+ "readable-stream": "^3.4.0"
},
"devDependencies": {
"aegir": "^20.3.2",
@@ -81,7 +68,6 @@
"contributors": [
"Alan Shaw ",
"Alan Shaw ",
- "Alex Potsides ",
"Andrey ",
"Dan Ordille ",
"Dan Shields <35669742+NukeManDan@users.noreply.github.com>",
@@ -131,6 +117,7 @@
"Vasco Santos ",
"Volker Mische ",
"Vutsal Singhal ",
+ "achingbrain ",
"dirkmc ",
"greenkeeper[bot] <23040076+greenkeeper[bot]@users.noreply.github.com>",
"greenkeeper[bot] ",
diff --git a/src/files-regular/add.js b/src/add.js
similarity index 51%
rename from src/files-regular/add.js
rename to src/add.js
index 6c11e88d..1172ec9a 100644
--- a/src/files-regular/add.js
+++ b/src/add.js
@@ -2,11 +2,19 @@
'use strict'
const { fixtures } = require('./utils')
-const Readable = require('readable-stream').Readable
-const pull = require('pull-stream')
-const expectTimeout = require('../utils/expect-timeout')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
+const { Readable } = require('readable-stream')
+const all = require('it-all')
+const fs = require('fs')
+const os = require('os')
+const path = require('path')
const { supportsFileReader } = require('ipfs-utils/src/supports')
+const globSource = require('ipfs-utils/src/files/glob-source')
+const urlSource = require('ipfs-utils/src/files/url-source')
+const { isNode } = require('ipfs-utils/src/env')
+const { getDescribe, getIt, expect } = require('./utils/mocha')
+const { echoUrl, redirectUrl } = require('./utils/echo-http-server')
+
+const fixturesPath = path.join(__dirname, '..', 'test', 'fixtures')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -24,28 +32,27 @@ module.exports = (common, options) => {
async function testMode (mode, expectedMode) {
const content = String(Math.random() + Date.now())
- const files = await ipfs.add({
+ const files = await all(ipfs.add({
content: Buffer.from(content),
mode
- })
-
+ }))
expect(files).to.have.length(1)
expect(files).to.have.nested.property('[0].mode', expectedMode)
- const stats = await ipfs.files.stat(`/ipfs/${files[0].hash}`)
+ const stats = await ipfs.files.stat(`/ipfs/${files[0].cid}`)
expect(stats).to.have.property('mode', expectedMode)
}
async function testMtime (mtime, expectedMtime) {
const content = String(Math.random() + Date.now())
- const files = await ipfs.add({
+ const files = await all(ipfs.add({
content: Buffer.from(content),
mtime
- })
+ }))
expect(files).to.have.length(1)
expect(files).to.have.deep.nested.property('[0].mtime', expectedMtime)
- const stats = await ipfs.files.stat(`/ipfs/${files[0].hash}`)
+ const stats = await ipfs.files.stat(`/ipfs/${files[0].cid}`)
expect(stats).to.have.deep.property('mtime', expectedMtime)
}
@@ -56,8 +63,8 @@ module.exports = (common, options) => {
it('should add a File', async function () {
if (!supportsFileReader) return this.skip('skip in node')
- const filesAdded = await ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' }))
- expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC')
+ const filesAdded = await all(ipfs.add(new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' })))
+ expect(filesAdded[0].cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC')
})
it('should add a File as tuple', async function () {
@@ -68,8 +75,8 @@ module.exports = (common, options) => {
content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' })
}
- const filesAdded = await ipfs.add(tuple)
- expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC')
+ const filesAdded = await all(ipfs.add(tuple))
+ expect(filesAdded[0].cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC')
})
it('should add a File as array of tuple', async function () {
@@ -80,27 +87,27 @@ module.exports = (common, options) => {
content: new self.File(['should add a File'], 'filename.txt', { type: 'text/plain' })
}
- const filesAdded = await ipfs.add([tuple])
- expect(filesAdded[0].hash).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC')
+ const filesAdded = await all(ipfs.add([tuple]))
+ expect(filesAdded[0].cid.toString()).to.be.eq('QmTVfLxf3qXiJgr4KwG6UBckcNvTqBp93Rwy5f7h3mHsVC')
})
it('should add a Buffer', async () => {
- const filesAdded = await ipfs.add(fixtures.smallFile.data)
+ const filesAdded = await all(ipfs.add(fixtures.smallFile.data))
expect(filesAdded).to.have.length(1)
const file = filesAdded[0]
- expect(file.hash).to.equal(fixtures.smallFile.cid)
+ expect(file.cid.toString()).to.equal(fixtures.smallFile.cid)
expect(file.path).to.equal(fixtures.smallFile.cid)
// file.size counts the overhead by IPLD nodes and unixfs protobuf
expect(file.size).greaterThan(fixtures.smallFile.data.length)
})
it('should add a BIG Buffer', async () => {
- const filesAdded = await ipfs.add(fixtures.bigFile.data)
+ const filesAdded = await all(ipfs.add(fixtures.bigFile.data))
expect(filesAdded).to.have.length(1)
const file = filesAdded[0]
- expect(file.hash).to.equal(fixtures.bigFile.cid)
+ expect(file.cid.toString()).to.equal(fixtures.bigFile.cid)
expect(file.path).to.equal(fixtures.bigFile.cid)
// file.size counts the overhead by IPLD nodes and unixfs protobuf
expect(file.size).greaterThan(fixtures.bigFile.data.length)
@@ -114,11 +121,11 @@ module.exports = (common, options) => {
accumProgress = p
}
- const filesAdded = await ipfs.add(fixtures.bigFile.data, { progress: handler })
+ const filesAdded = await all(ipfs.add(fixtures.bigFile.data, { progress: handler }))
expect(filesAdded).to.have.length(1)
const file = filesAdded[0]
- expect(file.hash).to.equal(fixtures.bigFile.cid)
+ expect(file.cid.toString()).to.equal(fixtures.bigFile.cid)
expect(file.path).to.equal(fixtures.bigFile.cid)
expect(progCalled).to.be.true()
expect(accumProgress).to.equal(fixtures.bigFile.data.length)
@@ -127,11 +134,11 @@ module.exports = (common, options) => {
it('should add a Buffer as tuple', async () => {
const tuple = { path: 'testfile.txt', content: fixtures.smallFile.data }
- const filesAdded = await ipfs.add([tuple])
+ const filesAdded = await all(ipfs.add([tuple]))
expect(filesAdded).to.have.length(1)
const file = filesAdded[0]
- expect(file.hash).to.equal(fixtures.smallFile.cid)
+ expect(file.cid.toString()).to.equal(fixtures.smallFile.cid)
expect(file.path).to.equal('testfile.txt')
})
@@ -139,26 +146,26 @@ module.exports = (common, options) => {
const data = 'a string'
const expectedCid = 'QmQFRCwEpwQZ5aQMqCsCaFbdjNLLHoyZYDjr92v1F7HeqX'
- const filesAdded = await ipfs.add(data)
+ const filesAdded = await all(ipfs.add(data))
expect(filesAdded).to.be.length(1)
- const { path, size, hash } = filesAdded[0]
+ const { path, size, cid } = filesAdded[0]
expect(path).to.equal(expectedCid)
expect(size).to.equal(16)
- expect(hash).to.equal(expectedCid)
+ expect(cid.toString()).to.equal(expectedCid)
})
it('should add a TypedArray', async () => {
const data = Uint8Array.from([1, 3, 8])
const expectedCid = 'QmRyUEkVCuHC8eKNNJS9BDM9jqorUvnQJK1DM81hfngFqd'
- const filesAdded = await ipfs.add(data)
+ const filesAdded = await all(ipfs.add(data))
expect(filesAdded).to.be.length(1)
- const { path, size, hash } = filesAdded[0]
+ const { path, size, cid } = filesAdded[0]
expect(path).to.equal(expectedCid)
expect(size).to.equal(11)
- expect(hash).to.equal(expectedCid)
+ expect(cid.toString()).to.equal(expectedCid)
})
it('should add readable stream', async () => {
@@ -168,13 +175,13 @@ module.exports = (common, options) => {
rs.push(Buffer.from('some data'))
rs.push(null)
- const filesAdded = await ipfs.add(rs)
+ const filesAdded = await all(ipfs.add(rs))
expect(filesAdded).to.be.length(1)
const file = filesAdded[0]
expect(file.path).to.equal(expectedCid)
expect(file.size).to.equal(17)
- expect(file.hash).to.equal(expectedCid)
+ expect(file.cid.toString()).to.equal(expectedCid)
})
it('should add array of objects with readable stream content', async () => {
@@ -186,29 +193,13 @@ module.exports = (common, options) => {
const tuple = { path: 'data.txt', content: rs }
- const filesAdded = await ipfs.add([tuple])
+ const filesAdded = await all(ipfs.add([tuple]))
expect(filesAdded).to.be.length(1)
const file = filesAdded[0]
expect(file.path).to.equal('data.txt')
expect(file.size).to.equal(17)
- expect(file.hash).to.equal(expectedCid)
- })
-
- it('should add pull stream', async () => {
- const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
-
- const res = await ipfs.add(pull.values([Buffer.from('test')]))
- expect(res).to.have.length(1)
- expect(res[0]).to.include({ path: expectedCid, hash: expectedCid, size: 12 })
- })
-
- it('should add array of objects with pull stream content', async () => {
- const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
-
- const res = await ipfs.add([{ content: pull.values([Buffer.from('test')]) }])
- expect(res).to.have.length(1)
- expect(res[0]).to.include({ path: expectedCid, hash: expectedCid, size: 12 })
+ expect(file.cid.toString()).to.equal(expectedCid)
})
it('should add a nested directory as array of tupples', async function () {
@@ -230,11 +221,11 @@ module.exports = (common, options) => {
emptyDir('files/empty')
]
- const res = await ipfs.add(dirs)
+ const res = await all(ipfs.add(dirs))
const root = res[res.length - 1]
expect(root.path).to.equal('test-folder')
- expect(root.hash).to.equal(fixtures.directory.cid)
+ expect(root.cid.toString()).to.equal(fixtures.directory.cid)
})
it('should add a nested directory as array of tupples with progress', async function () {
@@ -267,13 +258,13 @@ module.exports = (common, options) => {
accumProgress += p
}
- const filesAdded = await ipfs.add(dirs, { progress: handler })
+ const filesAdded = await all(ipfs.add(dirs, { progress: handler }))
const root = filesAdded[filesAdded.length - 1]
expect(progCalled).to.be.true()
expect(accumProgress).to.be.at.least(total)
expect(root.path).to.equal('test-folder')
- expect(root.hash).to.equal(fixtures.directory.cid)
+ expect(root.cid.toString()).to.equal(fixtures.directory.cid)
})
it('should add files to a directory non sequentially', async function () {
@@ -289,7 +280,7 @@ module.exports = (common, options) => {
content('a/alice.txt')
]
- const filesAdded = await ipfs.add(input)
+ const filesAdded = await all(ipfs.add(input))
const toPath = ({ path }) => path
const nonSeqDirFilePaths = input.map(toPath).filter(p => p.includes('/a/'))
@@ -301,18 +292,18 @@ module.exports = (common, options) => {
it('should fail when passed invalid input', () => {
const nonValid = 138
- return expect(ipfs.add(nonValid)).to.eventually.be.rejected()
+ return expect(all(ipfs.add(nonValid))).to.eventually.be.rejected()
})
it('should wrap content in a directory', async () => {
const data = { path: 'testfile.txt', content: fixtures.smallFile.data }
- const filesAdded = await ipfs.add(data, { wrapWithDirectory: true })
+ const filesAdded = await all(ipfs.add(data, { wrapWithDirectory: true }))
expect(filesAdded).to.have.length(2)
const file = filesAdded[0]
const wrapped = filesAdded[1]
- expect(file.hash).to.equal(fixtures.smallFile.cid)
+ expect(file.cid.toString()).to.equal(fixtures.smallFile.cid)
expect(file.path).to.equal('testfile.txt')
expect(wrapped.path).to.equal('')
})
@@ -321,10 +312,12 @@ module.exports = (common, options) => {
this.slow(10 * 1000)
const content = String(Math.random() + Date.now())
- const files = await ipfs.add(Buffer.from(content), { onlyHash: true })
+ const files = await all(ipfs.add(Buffer.from(content), { onlyHash: true }))
expect(files).to.have.length(1)
- await expectTimeout(ipfs.object.get(files[0].hash), 4000)
+ await expect(ipfs.object.get(files[0].cid, { timeout: 4000 }))
+ .to.eventually.be.rejected()
+ .and.to.have.property('name').that.equals('TimeoutError')
})
it('should add with mode as string', async function () {
@@ -376,5 +369,147 @@ module.exports = (common, options) => {
nsecs: mtime[1]
})
})
+
+ it('should add a directory from the file system', async function () {
+ if (!isNode) this.skip()
+
+ const filesPath = path.join(fixturesPath, 'test-folder')
+
+ const result = await all(ipfs.add(globSource(filesPath, { recursive: true })))
+ expect(result.length).to.be.above(8)
+ })
+
+ it('should add a directory from the file system with an odd name', async function () {
+ if (!isNode) this.skip()
+
+ const filesPath = path.join(fixturesPath, 'weird name folder [v0]')
+
+ const result = await all(ipfs.add(globSource(filesPath, { recursive: true })))
+ expect(result.length).to.be.above(8)
+ })
+
+ it('should ignore a directory from the file system', async function () {
+ if (!isNode) this.skip()
+
+ const filesPath = path.join(fixturesPath, 'test-folder')
+
+ const result = await all(ipfs.add(globSource(filesPath, { recursive: true, ignore: ['files/**'] })))
+ expect(result.length).to.be.below(9)
+ })
+
+ it('should add a file from the file system', async function () {
+ if (!isNode) this.skip()
+
+ const filePath = path.join(fixturesPath, 'testfile.txt')
+
+ const result = await all(ipfs.add(globSource(filePath)))
+ expect(result.length).to.equal(1)
+ expect(result[0].path).to.equal('testfile.txt')
+ })
+
+ it('should add a hidden file in a directory from the file system', async function () {
+ if (!isNode) this.skip()
+
+ const filesPath = path.join(fixturesPath, 'hidden-files-folder')
+
+ const result = await all(ipfs.add(globSource(filesPath, { recursive: true, hidden: true })))
+ expect(result.length).to.be.above(10)
+ expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt')
+ expect(result.map(object => object.cid.toString())).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt')
+ })
+
+ it('should add a file from the file system with only-hash=true', async function () {
+ if (!isNode) this.skip()
+
+ this.slow(10 * 1000)
+
+ const content = String(Math.random() + Date.now())
+ const filepath = path.join(os.tmpdir(), `${content}.txt`)
+ fs.writeFileSync(filepath, content)
+
+ const out = await all(ipfs.add(globSource(filepath), { onlyHash: true }))
+
+ fs.unlinkSync(filepath)
+
+ await expect(ipfs.object.get(out[0].cid, { timeout: 500 }))
+ .to.eventually.be.rejected()
+ .and.to.have.property('name').that.equals('TimeoutError')
+ })
+
+ it('should add from a HTTP URL', async () => {
+ const text = `TEST${Date.now()}`
+ const url = echoUrl(text)
+
+ const [result, expectedResult] = await Promise.all([
+ all(ipfs.add(urlSource(url))),
+ all(ipfs.add(Buffer.from(text)))
+ ])
+
+ expect(result.err).to.not.exist()
+ expect(expectedResult.err).to.not.exist()
+ expect(result[0].cid.toString()).to.equal(expectedResult[0].cid.toString())
+ expect(result[0].size).to.equal(expectedResult[0].size)
+ expect(result[0].path).to.equal(text)
+ })
+
+ it('should add from a HTTP URL with redirection', async () => {
+ const text = `TEST${Date.now()}`
+ const url = echoUrl(text) + '?foo=bar#buzz'
+
+ const [result, expectedResult] = await Promise.all([
+ all(ipfs.add(urlSource(redirectUrl(url)))),
+ all(ipfs.add(Buffer.from(text)))
+ ])
+
+ expect(result.err).to.not.exist()
+ expect(expectedResult.err).to.not.exist()
+ expect(result[0].cid.toString()).to.equal(expectedResult[0].cid.toString())
+ expect(result[0].size).to.equal(expectedResult[0].size)
+ expect(result[0].path).to.equal(text)
+ })
+
+ it('should add from a URL with only-hash=true', async function () {
+ const text = `TEST${Date.now()}`
+ const url = echoUrl(text)
+
+ const res = await all(ipfs.add(urlSource(url), { onlyHash: true }))
+
+ await expect(ipfs.object.get(res[0].cid, { timeout: 500 }))
+ .to.eventually.be.rejected()
+ .and.to.have.property('name').that.equals('TimeoutError')
+ })
+
+ it('should add from a URL with wrap-with-directory=true', async () => {
+ const filename = `TEST${Date.now()}.txt` // also acts as data
+ const url = echoUrl(filename) + '?foo=bar#buzz'
+ const addOpts = { wrapWithDirectory: true }
+
+ const [result, expectedResult] = await Promise.all([
+ all(ipfs.add(urlSource(url), addOpts)),
+ all(ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts))
+ ])
+ expect(result.err).to.not.exist()
+ expect(expectedResult.err).to.not.exist()
+ expect(result).to.deep.equal(expectedResult)
+ })
+
+ it('should add from a URL with wrap-with-directory=true and URL-escaped file name', async () => {
+ const filename = `320px-Domažlice,_Jiráskova_43_(${Date.now()}).jpg` // also acts as data
+ const url = echoUrl(filename) + '?foo=bar#buzz'
+ const addOpts = { wrapWithDirectory: true }
+
+ const [result, expectedResult] = await Promise.all([
+ all(ipfs.add(urlSource(url), addOpts)),
+ all(ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts))
+ ])
+
+ expect(result.err).to.not.exist()
+ expect(expectedResult.err).to.not.exist()
+ expect(result).to.deep.equal(expectedResult)
+ })
+
+ it('should not add from an invalid url', () => {
+ return expect(all(ipfs.add(urlSource('123http://invalid')))).to.eventually.be.rejected()
+ })
})
}
diff --git a/src/bitswap/utils.js b/src/bitswap/utils.js
index 7e6ae646..bcf75781 100644
--- a/src/bitswap/utils.js
+++ b/src/bitswap/utils.js
@@ -4,16 +4,18 @@ const delay = require('delay')
async function waitForWantlistKey (ipfs, key, opts = {}) {
opts.timeout = opts.timeout || 10000
+ opts.interval = opts.interval || 100
+
const end = Date.now() + opts.timeout
while (Date.now() < end) {
const list = await ipfs.bitswap.wantlist(opts.peerId)
- if (list && list.Keys && list.Keys.some(k => k['/'] === key)) {
+ if (list.some(cid => cid.toString() === key)) {
return
}
- await delay(500)
+ await delay(opts.interval)
}
throw new Error(`Timed out waiting for ${key} in wantlist`)
diff --git a/src/bitswap/wantlist.js b/src/bitswap/wantlist.js
index 7a79ebf1..7d673258 100644
--- a/src/bitswap/wantlist.js
+++ b/src/bitswap/wantlist.js
@@ -23,7 +23,7 @@ module.exports = (common, options) => {
ipfsA = (await common.spawn()).api
ipfsB = (await common.spawn({ type: 'go' })).api
// Add key to the wantlist for ipfsB
- ipfsB.block.get(key).catch(() => {})
+ ipfsB.block.get(key).catch(() => { /* is ok, expected on teardown */ })
await ipfsA.swarm.connect(ipfsB.peerId.addresses[0])
})
diff --git a/src/block/rm.js b/src/block/rm.js
index 72665a5b..37cd17d9 100644
--- a/src/block/rm.js
+++ b/src/block/rm.js
@@ -3,6 +3,7 @@
const { getDescribe, getIt, expect } = require('../utils/mocha')
const hat = require('hat')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -27,17 +28,17 @@ module.exports = (common, options) => {
})
// block should be present in the local store
- const localRefs = await ipfs.refs.local()
+ const localRefs = await all(ipfs.refs.local())
expect(localRefs).to.have.property('length').that.is.greaterThan(0)
expect(localRefs.find(ref => ref.ref === cid.toString())).to.be.ok()
- const result = await ipfs.block.rm(cid)
+ const result = await all(ipfs.block.rm(cid))
expect(result).to.be.an('array').and.to.have.lengthOf(1)
- expect(result[0]).to.have.property('hash', cid.toString())
+ expect(result[0].cid.toString()).equal(cid.toString())
expect(result[0]).to.not.have.property('error')
// did we actually remove the block?
- const localRefsAfterRemove = await ipfs.refs.local()
+ const localRefsAfterRemove = await all(ipfs.refs.local())
expect(localRefsAfterRemove).to.have.property('length').that.is.greaterThan(0)
expect(localRefsAfterRemove.find(ref => ref.ref === cid.toString())).to.not.be.ok()
})
@@ -47,10 +48,10 @@ module.exports = (common, options) => {
format: 'raw',
hashAlg: 'sha2-256'
})
- const result = await ipfs.block.rm(cid.toString())
+ const result = await all(ipfs.block.rm(cid.toString()))
expect(result).to.be.an('array').and.to.have.lengthOf(1)
- expect(result[0]).to.have.property('hash', cid.toString())
+ expect(result[0].cid.toString()).to.equal(cid.toString())
expect(result[0]).to.not.have.property('error')
})
@@ -59,10 +60,10 @@ module.exports = (common, options) => {
format: 'raw',
hashAlg: 'sha2-256'
})
- const result = await ipfs.block.rm(cid.buffer)
+ const result = await all(ipfs.block.rm(cid.buffer))
expect(result).to.be.an('array').and.to.have.lengthOf(1)
- expect(result[0]).to.have.property('hash', cid.toString())
+ expect(result[0].cid.toString()).to.equal(cid.toString())
expect(result[0]).to.not.have.property('error')
})
@@ -82,12 +83,12 @@ module.exports = (common, options) => {
})
]
- const result = await ipfs.block.rm(cids)
+ const result = await all(ipfs.block.rm(cids))
expect(result).to.be.an('array').and.to.have.lengthOf(3)
result.forEach((res, index) => {
- expect(res).to.have.property('hash', cids[index].toString())
+ expect(res.cid.toString()).to.equal(cids[index].toString())
expect(res).to.not.have.property('error')
})
})
@@ -99,13 +100,14 @@ module.exports = (common, options) => {
})
// remove it
- await ipfs.block.rm(cid)
+ await all(ipfs.block.rm(cid))
// remove it again
- const result = await ipfs.block.rm(cid)
+ const result = await all(ipfs.block.rm(cid))
expect(result).to.be.an('array').and.to.have.lengthOf(1)
- expect(result[0]).to.have.property('error').and.to.include('block not found')
+ expect(result[0]).to.have.property('error')
+ expect(result[0].error.message).to.include('block not found')
})
it('should not error when force removing non-existent blocks', async () => {
@@ -115,13 +117,13 @@ module.exports = (common, options) => {
})
// remove it
- await ipfs.block.rm(cid)
+ await all(ipfs.block.rm(cid))
// remove it again
- const result = await ipfs.block.rm(cid, { force: true })
+ const result = await all(ipfs.block.rm(cid, { force: true }))
expect(result).to.be.an('array').and.to.have.lengthOf(1)
- expect(result[0]).to.have.property('hash', cid.toString())
+ expect(result[0].cid.toString()).to.equal(cid.toString())
expect(result[0]).to.not.have.property('error')
})
@@ -130,7 +132,7 @@ module.exports = (common, options) => {
format: 'raw',
hashAlg: 'sha2-256'
})
- const result = await ipfs.block.rm(cid, { quiet: true })
+ const result = await all(ipfs.block.rm(cid, { quiet: true }))
expect(result).to.be.an('array').and.to.have.lengthOf(0)
})
@@ -142,10 +144,11 @@ module.exports = (common, options) => {
})
await ipfs.pin.add(cid.toString())
- const result = await ipfs.block.rm(cid)
+ const result = await all(ipfs.block.rm(cid))
expect(result).to.be.an('array').and.to.have.lengthOf(1)
- expect(result[0]).to.have.property('error').and.to.include('pinned')
+ expect(result[0]).to.have.property('error')
+ expect(result[0].error.message).to.include('pinned')
})
})
}
diff --git a/src/block/stat.js b/src/block/stat.js
index e2354b7e..285d1d51 100644
--- a/src/block/stat.js
+++ b/src/block/stat.js
@@ -27,10 +27,8 @@ module.exports = (common, options) => {
it('should stat by CID', async () => {
const cid = new CID(hash)
-
const stats = await ipfs.block.stat(cid)
-
- expect(stats).to.have.property('key')
+ expect(stats.cid.toString()).to.equal(cid.toString())
expect(stats).to.have.property('size')
})
diff --git a/src/bootstrap/add.js b/src/bootstrap/add.js
index 510888f6..b4030c67 100644
--- a/src/bootstrap/add.js
+++ b/src/bootstrap/add.js
@@ -4,7 +4,7 @@
const { getDescribe, getIt, expect } = require('../utils/mocha')
const invalidArg = 'this/Is/So/Invalid/'
-const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'
+const validIp4 = '/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
diff --git a/src/bootstrap/rm.js b/src/bootstrap/rm.js
index 839eb499..d3047d1b 100644
--- a/src/bootstrap/rm.js
+++ b/src/bootstrap/rm.js
@@ -13,7 +13,7 @@ module.exports = (common, options) => {
const it = getIt(options)
const invalidArg = 'this/Is/So/Invalid/'
- const validIp4 = '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'
+ const validIp4 = '/ip4/104.236.176.52/tcp/4001/p2p/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z'
describe('.bootstrap.rm', function () {
this.timeout(100 * 1000)
diff --git a/src/files-regular/cat.js b/src/cat.js
similarity index 66%
rename from src/files-regular/cat.js
rename to src/cat.js
index f3429759..c866acda 100644
--- a/src/files-regular/cat.js
+++ b/src/cat.js
@@ -2,9 +2,10 @@
'use strict'
const { fixtures } = require('./utils')
-const bs58 = require('bs58')
const CID = require('cids')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
+const concat = require('it-concat')
+const all = require('it-all')
+const { getDescribe, getIt, expect } = require('./utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -25,79 +26,79 @@ module.exports = (common, options) => {
after(() => common.clean())
before(() => Promise.all([
- ipfs.add(fixtures.smallFile.data),
- ipfs.add(fixtures.bigFile.data)
+ all(ipfs.add(fixtures.smallFile.data)),
+ all(ipfs.add(fixtures.bigFile.data))
]))
it('should cat with a base58 string encoded multihash', async () => {
- const data = await ipfs.cat(fixtures.smallFile.cid)
+ const data = await concat(ipfs.cat(fixtures.smallFile.cid))
expect(data.toString()).to.contain('Plz add me!')
})
it('should cat with a Buffer multihash', async () => {
- const cid = Buffer.from(bs58.decode(fixtures.smallFile.cid))
+ const cid = new CID(fixtures.smallFile.cid).multihash
- const data = await ipfs.cat(cid)
+ const data = await concat(ipfs.cat(cid))
expect(data.toString()).to.contain('Plz add me!')
})
it('should cat with a CID object', async () => {
const cid = new CID(fixtures.smallFile.cid)
- const data = await ipfs.cat(cid)
+ const data = await concat(ipfs.cat(cid))
expect(data.toString()).to.contain('Plz add me!')
})
it('should cat a file added as CIDv0 with a CIDv1', async () => {
const input = Buffer.from(`TEST${Date.now()}`)
- const res = await ipfs.add(input, { cidVersion: 0 })
+ const res = await all(ipfs.add(input, { cidVersion: 0 }))
- const cidv0 = new CID(res[0].hash)
+ const cidv0 = res[0].cid
expect(cidv0.version).to.equal(0)
const cidv1 = cidv0.toV1()
- const output = await ipfs.cat(cidv1)
- expect(output).to.eql(input)
+ const output = await concat(ipfs.cat(cidv1))
+ expect(output.slice()).to.eql(input)
})
it('should cat a file added as CIDv1 with a CIDv0', async () => {
const input = Buffer.from(`TEST${Date.now()}`)
- const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false })
+ const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false }))
- const cidv1 = new CID(res[0].hash)
+ const cidv1 = res[0].cid
expect(cidv1.version).to.equal(1)
const cidv0 = cidv1.toV0()
- const output = await ipfs.cat(cidv0)
- expect(output).to.eql(input)
+ const output = await concat(ipfs.cat(cidv0))
+ expect(output.slice()).to.eql(input)
})
it('should cat a BIG file', async () => {
- const data = await ipfs.cat(fixtures.bigFile.cid)
+ const data = await concat(ipfs.cat(fixtures.bigFile.cid))
expect(data.length).to.equal(fixtures.bigFile.data.length)
- expect(data).to.eql(fixtures.bigFile.data)
+ expect(data.slice()).to.eql(fixtures.bigFile.data)
})
it('should cat with IPFS path', async () => {
const ipfsPath = '/ipfs/' + fixtures.smallFile.cid
- const data = await ipfs.cat(ipfsPath)
+ const data = await concat(ipfs.cat(ipfsPath))
expect(data.toString()).to.contain('Plz add me!')
})
it('should cat with IPFS path, nested value', async () => {
const fileToAdd = { path: 'a/testfile.txt', content: fixtures.smallFile.data }
- const filesAdded = await ipfs.add([fileToAdd])
+ const filesAdded = await all(ipfs.add([fileToAdd]))
const file = await filesAdded.find((f) => f.path === 'a')
expect(file).to.exist()
- const data = await ipfs.cat(`/ipfs/${file.hash}/testfile.txt`)
+ const data = await concat(ipfs.cat(`/ipfs/${file.cid}/testfile.txt`))
expect(data.toString()).to.contain('Plz add me!')
})
@@ -105,23 +106,23 @@ module.exports = (common, options) => {
it('should cat with IPFS path, deeply nested value', async () => {
const fileToAdd = { path: 'a/b/testfile.txt', content: fixtures.smallFile.data }
- const filesAdded = await ipfs.add([fileToAdd])
+ const filesAdded = await all(ipfs.add([fileToAdd]))
const file = filesAdded.find((f) => f.path === 'a')
expect(file).to.exist()
- const data = await ipfs.cat(`/ipfs/${file.hash}/b/testfile.txt`)
+ const data = await concat(ipfs.cat(`/ipfs/${file.cid}/b/testfile.txt`))
expect(data.toString()).to.contain('Plz add me!')
})
it('should error on invalid key', () => {
const invalidCid = 'somethingNotMultihash'
- return expect(ipfs.cat(invalidCid)).to.eventually.be.rejected()
+ return expect(concat(ipfs.cat(invalidCid))).to.eventually.be.rejected()
})
it('should error on unknown path', () => {
- return expect(ipfs.cat(fixtures.smallFile.cid + '/does-not-exist')).to.eventually.be.rejected()
+ return expect(concat(ipfs.cat(fixtures.smallFile.cid + '/does-not-exist'))).to.eventually.be.rejected()
.and.be.an.instanceOf(Error)
.and.to.have.property('message')
.to.be.oneOf([
@@ -133,7 +134,7 @@ module.exports = (common, options) => {
it('should error on dir path', async () => {
const file = { path: 'dir/testfile.txt', content: fixtures.smallFile.data }
- const filesAdded = await ipfs.add([file])
+ const filesAdded = await all(ipfs.add([file]))
expect(filesAdded.length).to.equal(2)
const files = filesAdded.filter((file) => file.path === 'dir')
@@ -141,7 +142,7 @@ module.exports = (common, options) => {
const dir = files[0]
- const err = await expect(ipfs.cat(dir.hash)).to.be.rejected()
+ const err = await expect(concat(ipfs.cat(dir.cid))).to.be.rejected()
expect(err.message).to.contain('this dag node is a directory')
})
@@ -149,7 +150,7 @@ module.exports = (common, options) => {
const offset = 1
const length = 3
- const data = await ipfs.cat(fixtures.smallFile.cid, { offset, length })
+ const data = await concat(ipfs.cat(fixtures.smallFile.cid, { offset, length }))
expect(data.toString()).to.equal('lz ')
})
})
diff --git a/src/config/get.js b/src/config/get.js
index 7f8efeca..7e1d15fd 100644
--- a/src/config/get.js
+++ b/src/config/get.js
@@ -2,7 +2,6 @@
'use strict'
const { getDescribe, getIt, expect } = require('../utils/mocha')
-const isPlainObject = require('is-plain-object')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -25,7 +24,6 @@ module.exports = (common, options) => {
const config = await ipfs.config.get()
expect(config).to.be.an('object')
- expect(isPlainObject(config)).to.equal(true)
})
it('should retrieve a value through a key', async () => {
diff --git a/src/dag/get.js b/src/dag/get.js
index 67a25cda..adaf63d7 100644
--- a/src/dag/get.js
+++ b/src/dag/get.js
@@ -1,12 +1,11 @@
/* eslint-env mocha */
'use strict'
-const pEachSeries = require('p-each-series')
const dagPB = require('ipld-dag-pb')
const DAGNode = dagPB.DAGNode
const dagCBOR = require('ipld-dag-cbor')
const Unixfs = require('ipfs-unixfs')
-const CID = require('cids')
+const all = require('it-all')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -47,13 +46,8 @@ module.exports = (common, options) => {
cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor))
- await pEachSeries([
- { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' },
- { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' }
- ], (el) => ipfs.dag.put(el.node, {
- format: el.multicodec,
- hashAlg: el.hashAlg
- }))
+ await ipfs.dag.put(nodePb, { format: 'dag-pb', hashAlg: 'sha2-256' })
+ await ipfs.dag.put(nodeCbor, { format: 'dag-cbor', hashAlg: 'sha2-256' })
})
it('should get a dag-pb node', async () => {
@@ -160,9 +154,9 @@ module.exports = (common, options) => {
it('should get a node added as CIDv1 with a CIDv0', async () => {
const input = Buffer.from(`TEST${Date.now()}`)
- const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false })
+ const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false }))
- const cidv1 = new CID(res[0].hash)
+ const cidv1 = res[0].cid
expect(cidv1.version).to.equal(1)
const cidv0 = cidv1.toV0()
diff --git a/src/dag/tree.js b/src/dag/tree.js
index 36a20673..7ccc63de 100644
--- a/src/dag/tree.js
+++ b/src/dag/tree.js
@@ -1,10 +1,10 @@
/* eslint-env mocha */
'use strict'
-const pEachSeries = require('p-each-series')
const dagPB = require('ipld-dag-pb')
const DAGNode = dagPB.DAGNode
const dagCBOR = require('ipld-dag-cbor')
+const all = require('it-all')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -38,17 +38,12 @@ module.exports = (common, options) => {
}
cidCbor = await dagCBOR.util.cid(dagCBOR.util.serialize(nodeCbor))
- await pEachSeries([
- { node: nodePb, multicodec: 'dag-pb', hashAlg: 'sha2-256' },
- { node: nodeCbor, multicodec: 'dag-cbor', hashAlg: 'sha2-256' }
- ], (el) => ipfs.dag.put(el.node, {
- format: el.multicodec,
- hashAlg: el.hashAlg
- }))
+ await ipfs.dag.put(nodePb, { format: 'dag-pb', hashAlg: 'sha2-256' })
+ await ipfs.dag.put(nodeCbor, { format: 'dag-cbor', hashAlg: 'sha2-256' })
})
it('should get tree with CID', async () => {
- const paths = await ipfs.dag.tree(cidCbor)
+ const paths = await all(ipfs.dag.tree(cidCbor))
expect(paths).to.eql([
'pb',
'someData'
@@ -56,19 +51,19 @@ module.exports = (common, options) => {
})
it('should get tree with CID and path', async () => {
- const paths = await ipfs.dag.tree(cidCbor, 'someData')
+ const paths = await all(ipfs.dag.tree(cidCbor, 'someData'))
expect(paths).to.eql([])
})
it('should get tree with CID and path as String', async () => {
const cidCborStr = cidCbor.toBaseEncodedString()
- const paths = await ipfs.dag.tree(cidCborStr + '/someData')
+ const paths = await all(ipfs.dag.tree(cidCborStr + '/someData'))
expect(paths).to.eql([])
})
it('should get tree with CID recursive (accross different formats)', async () => {
- const paths = await ipfs.dag.tree(cidCbor, { recursive: true })
+ const paths = await all(ipfs.dag.tree(cidCbor, { recursive: true }))
expect(paths).to.have.members([
'pb',
'someData',
@@ -78,7 +73,7 @@ module.exports = (common, options) => {
})
it('should get tree with CID and path recursive', async () => {
- const paths = await ipfs.dag.tree(cidCbor, 'pb', { recursive: true })
+ const paths = await all(ipfs.dag.tree(cidCbor, 'pb', { recursive: true }))
expect(paths).to.have.members([
'Links',
'Data'
diff --git a/src/dht/find-peer.js b/src/dht/find-peer.js
index 4237b0d4..8e6c69e5 100644
--- a/src/dht/find-peer.js
+++ b/src/dht/find-peer.js
@@ -29,9 +29,9 @@ module.exports = (common, options) => {
it('should find other peers', async () => {
const res = await nodeA.dht.findPeer(nodeB.peerId.id)
- const id = res.id.toB58String()
+ const id = res.id.toString()
const nodeAddresses = nodeB.peerId.addresses.map((addr) => addr.split('/ipfs/')[0]) // remove '/ipfs/'
- const peerAddresses = res.multiaddrs.toArray().map((ma) => ma.toString().split('/ipfs/')[0])
+ const peerAddresses = res.addrs.map(ma => ma.toString().split('/ipfs/')[0])
expect(id).to.be.eql(nodeB.peerId.id)
expect(nodeAddresses).to.include(peerAddresses[0])
diff --git a/src/dht/find-provs.js b/src/dht/find-provs.js
index 52787f6f..35d3b1d6 100644
--- a/src/dht/find-provs.js
+++ b/src/dht/find-provs.js
@@ -1,17 +1,9 @@
/* eslint-env mocha */
'use strict'
-const multihashing = require('multihashing-async')
-const CID = require('cids')
const { getDescribe, getIt, expect } = require('../utils/mocha')
-
-async function fakeCid () {
- const bytes = Buffer.from(`TEST${Date.now()}`)
-
- const mh = await multihashing(bytes, 'sha2-256')
-
- return new CID(0, 'dag-pb', mh)
-}
+const all = require('it-all')
+const { fakeCid } = require('./utils')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -52,16 +44,16 @@ module.exports = (common, options) => {
providedCid = cids[0]
await Promise.all([
- nodeB.dht.provide(providedCid),
- nodeC.dht.provide(providedCid)
+ all(nodeB.dht.provide(providedCid)),
+ all(nodeC.dht.provide(providedCid))
])
})
it('should be able to find providers', async function () {
this.timeout(20 * 1000)
- const provs = await nodeA.dht.findProvs(providedCid)
- const providerIds = provs.map((p) => p.id.toB58String())
+ const provs = await all(nodeA.dht.findProvs(providedCid, { numProviders: 2 }))
+ const providerIds = provs.map((p) => p.id.toString())
expect(providerIds).to.have.members([
nodeB.peerId.id,
@@ -76,7 +68,7 @@ module.exports = (common, options) => {
const cidV0 = await fakeCid()
- await expect(nodeA.dht.findProvs(cidV0, options)).to.be.rejected()
+ await expect(all(nodeA.dht.findProvs(cidV0, options))).to.be.rejected()
})
})
}
diff --git a/src/dht/get.js b/src/dht/get.js
index 356a4122..9e987d3c 100644
--- a/src/dht/get.js
+++ b/src/dht/get.js
@@ -32,9 +32,10 @@ module.exports = (common, options) => {
.and.be.an.instanceOf(Error)
})
- it('should get a value after it was put on another node', async function () {
- this.timeout(80 * 1000)
-
+ // TODO: revisit this test - it puts an invalid key and so go-ipfs throws
+ // "invalid record keytype" - it needs to put a valid key and value for it to
+ // be a useful test.
+ it.skip('should get a value after it was put on another node', async () => {
const key = Buffer.from(hat())
const value = Buffer.from(hat())
diff --git a/src/dht/provide.js b/src/dht/provide.js
index 696f3aa7..54e84f57 100644
--- a/src/dht/provide.js
+++ b/src/dht/provide.js
@@ -2,6 +2,7 @@
'use strict'
const CID = require('cids')
+const all = require('it-all')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -27,46 +28,40 @@ module.exports = (common, options) => {
after(() => common.clean())
it('should provide local CID', async () => {
- const res = await ipfs.add(Buffer.from('test'))
+ const res = await all(ipfs.add(Buffer.from('test')))
- await ipfs.dht.provide(new CID(res[0].hash))
+ await all(ipfs.dht.provide(res[0].cid))
})
it('should not provide if block not found locally', () => {
const cid = new CID('Qmd7qZS4T7xXtsNFdRoK1trfMs5zU94EpokQ9WFtxdPxsZ')
- return expect(ipfs.dht.provide(cid)).to.eventually.be.rejected
+ return expect(all(ipfs.dht.provide(cid))).to.eventually.be.rejected
.and.be.an.instanceOf(Error)
.and.have.property('message')
.that.include('not found locally')
})
it('should allow multiple CIDs to be passed', async () => {
- const res = await ipfs.add([
+ const res = await all(ipfs.add([
{ content: Buffer.from('t0') },
{ content: Buffer.from('t1') }
- ])
+ ]))
- await ipfs.dht.provide([
- new CID(res[0].hash),
- new CID(res[1].hash)
- ])
+ await all(ipfs.dht.provide(res.map(f => f.cid)))
})
it('should provide a CIDv1', async () => {
- const res = await ipfs.add(Buffer.from('test'), { cidVersion: 1 })
-
- const cid = new CID(res[0].hash)
-
- await ipfs.dht.provide(cid)
+ const res = await all(ipfs.add(Buffer.from('test'), { cidVersion: 1 }))
+ await all(ipfs.dht.provide(res[0].cid))
})
it('should error on non CID arg', () => {
- return expect(ipfs.dht.provide({})).to.eventually.be.rejected()
+ return expect(all(ipfs.dht.provide({}))).to.eventually.be.rejected()
})
it('should error on array containing non CID arg', () => {
- return expect(ipfs.dht.provide([{}])).to.eventually.be.rejected()
+ return expect(all(ipfs.dht.provide([{}]))).to.eventually.be.rejected()
})
})
}
diff --git a/src/dht/query.js b/src/dht/query.js
index f5fea5ec..b5acdee2 100644
--- a/src/dht/query.js
+++ b/src/dht/query.js
@@ -1,8 +1,8 @@
/* eslint-env mocha */
'use strict'
-const pTimeout = require('p-timeout')
const { getDescribe, getIt, expect } = require('../utils/mocha')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -32,9 +32,8 @@ module.exports = (common, options) => {
this.timeout(timeout)
try {
- const peers = await pTimeout(nodeA.dht.query(nodeB.peerId.id), timeout - 1000)
-
- expect(peers.map((p) => p.id.toB58String())).to.include(nodeB.peerId.id)
+ const peers = await all(nodeA.dht.query(nodeB.peerId.id, { timeout: timeout - 1000 }))
+ expect(peers.map(p => p.id.toString())).to.include(nodeB.peerId.id)
} catch (err) {
if (err.name === 'TimeoutError') {
// This test is meh. DHT works best with >= 20 nodes. Therefore a
diff --git a/src/dht/utils.js b/src/dht/utils.js
new file mode 100644
index 00000000..9beceb78
--- /dev/null
+++ b/src/dht/utils.js
@@ -0,0 +1,10 @@
+'use strict'
+
+const multihashing = require('multihashing-async')
+const CID = require('cids')
+
+exports.fakeCid = async (data) => {
+ const bytes = data || Buffer.from(`TEST${Date.now()}`)
+ const mh = await multihashing(bytes, 'sha2-256')
+ return new CID(0, 'dag-pb', mh)
+}
diff --git a/src/files-mfs/ls-pull-stream.js b/src/files-mfs/ls-pull-stream.js
deleted file mode 100644
index d3a56599..00000000
--- a/src/files-mfs/ls-pull-stream.js
+++ /dev/null
@@ -1,73 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const hat = require('hat')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const pullToPromise = require('pull-to-promise')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.files.lsPullStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should not ls not found file/dir, expect error', () => {
- const testDir = `/test-${hat()}`
-
- return expect(pullToPromise.any(ipfs.files.lsPullStream(`${testDir}/404`))).to.eventually.be.rejected
- .and.be.an.instanceOf(Error)
- .and.have.property('message')
- .that.include('does not exist')
- })
-
- it('should ls directory', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true })
- await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
-
- const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir))
-
- expect(entries).to.have.lengthOf(2)
- expect(entries).to.have.nested.property('[0].name', 'b')
- expect(entries).to.have.nested.property('[0].type', 0)
- expect(entries).to.have.nested.property('[0].size', 0)
- expect(entries).to.have.nested.property('[0].hash', '')
- expect(entries).to.have.nested.property('[1].name', 'lv1')
- expect(entries).to.have.nested.property('[1].type', 0)
- expect(entries).to.have.nested.property('[1].size', 0)
- expect(entries).to.have.nested.property('[1].hash', '')
- })
-
- it('should ls directory with long option', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true })
- await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
-
- const entries = await pullToPromise.any(ipfs.files.lsPullStream(testDir, { long: true }))
-
- expect(entries).to.have.lengthOf(2)
- expect(entries).to.have.nested.property('[0].name', 'b')
- expect(entries).to.have.nested.property('[0].type', 0)
- expect(entries).to.have.nested.property('[0].size', 13)
- expect(entries).to.have.nested.property('[0].hash', 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T')
- expect(entries).to.have.nested.property('[1].name', 'lv1')
- expect(entries).to.have.nested.property('[1].type', 1)
- expect(entries).to.have.nested.property('[1].size', 0)
- expect(entries).to.have.nested.property('[1].hash', 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
- })
- })
-}
diff --git a/src/files-mfs/ls-readable-stream.js b/src/files-mfs/ls-readable-stream.js
deleted file mode 100644
index 9360b56d..00000000
--- a/src/files-mfs/ls-readable-stream.js
+++ /dev/null
@@ -1,77 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const hat = require('hat')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const getStream = require('get-stream')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.files.lsReadableStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should not ls not found file/dir, expect error', () => {
- const testDir = `/test-${hat()}`
- const stream = ipfs.files.lsReadableStream(`${testDir}/404`)
-
- return expect(getStream(stream)).to.eventually.be.rejected
- .and.be.an.instanceOf(Error)
- .and.have.property('message')
- .that.include('does not exist')
- })
-
- it('should ls directory', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true })
- await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
-
- const stream = ipfs.files.lsReadableStream(testDir)
-
- const entries = await getStream.array(stream)
-
- expect(entries).to.have.lengthOf(2)
- expect(entries).to.have.nested.property('[0].name', 'b')
- expect(entries).to.have.nested.property('[0].type', 0)
- expect(entries).to.have.nested.property('[0].size', 0)
- expect(entries).to.have.nested.property('[0].hash', '')
- expect(entries).to.have.nested.property('[1].name', 'lv1')
- expect(entries).to.have.nested.property('[1].type', 0)
- expect(entries).to.have.nested.property('[1].size', 0)
- expect(entries).to.have.nested.property('[1].hash', '')
- })
-
- it('should ls directory with long option', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true })
- await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
-
- const stream = ipfs.files.lsReadableStream(testDir, { long: true })
- const entries = await getStream.array(stream)
-
- expect(entries).to.have.lengthOf(2)
- expect(entries).to.have.nested.property('[0].name', 'b')
- expect(entries).to.have.nested.property('[0].type', 0)
- expect(entries).to.have.nested.property('[0].size', 13)
- expect(entries).to.have.nested.property('[0].hash', 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T')
- expect(entries).to.have.nested.property('[1].name', 'lv1')
- expect(entries).to.have.nested.property('[1].type', 1)
- expect(entries).to.have.nested.property('[1].size', 0)
- expect(entries).to.have.nested.property('[1].hash', 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
- })
- })
-}
diff --git a/src/files-mfs/ls.js b/src/files-mfs/ls.js
deleted file mode 100644
index 7ee5a530..00000000
--- a/src/files-mfs/ls.js
+++ /dev/null
@@ -1,136 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const hat = require('hat')
-const { fixtures } = require('../files-regular/utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.files.ls', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should not ls not found file/dir, expect error', () => {
- const testDir = `/test-${hat()}`
-
- return expect(ipfs.files.ls(`${testDir}/404`)).to.eventually.be.rejected()
- })
-
- it('should ls directory', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true })
- await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
-
- const entries = await ipfs.files.ls(testDir)
-
- expect(entries).to.have.lengthOf(2)
- expect(entries).to.have.nested.property('[0].name', 'b')
- expect(entries).to.have.nested.property('[0].type', 0)
- expect(entries).to.have.nested.property('[0].size', 0)
- expect(entries).to.have.nested.property('[0].hash', '')
- expect(entries).to.have.nested.property('[1].name', 'lv1')
- expect(entries).to.have.nested.property('[1].type', 0)
- expect(entries).to.have.nested.property('[1].size', 0)
- expect(entries).to.have.nested.property('[1].hash', '')
- })
-
- it('should ls directory with long option', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true })
- await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
-
- const entries = await ipfs.files.ls(testDir, { long: true })
-
- expect(entries).to.have.lengthOf(2)
- expect(entries).to.have.nested.property('[0].name', 'b')
- expect(entries).to.have.nested.property('[0].type', 0)
- expect(entries).to.have.nested.property('[0].size', 13)
- expect(entries).to.have.nested.property('[0].hash', 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T')
- expect(entries).to.have.nested.property('[1].name', 'lv1')
- expect(entries).to.have.nested.property('[1].type', 1)
- expect(entries).to.have.nested.property('[1].size', 0)
- expect(entries).to.have.nested.property('[1].hash', 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
- })
-
- it('ls directory with long option should include metadata', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(`${testDir}/lv1`, {
- parents: true,
- mtime: {
- secs: 5
- }
- })
- await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), {
- create: true,
- mtime: {
- secs: 5
- }
- })
-
- const entries = await ipfs.files.ls(testDir, { long: true })
-
- expect(entries).to.have.lengthOf(2)
- expect(entries).to.have.nested.property('[0].hash', 'QmTVnczjg445RUAEYNH1wvhVa2rnPoWMfHMxQc6W7HHoyM')
- expect(entries).to.have.nested.property('[0].mode', 0o0644)
- expect(entries).to.have.nested.deep.property('[0].mtime', {
- secs: 5,
- nsecs: 0
- })
- expect(entries).to.have.nested.property('[1].hash', 'QmXkBjmbtWUxXLa3s541UBSzPgvaAR7b8X3Amcp5D1VKTQ')
- expect(entries).to.have.nested.property('[1].mode', 0o0755)
- expect(entries).to.have.nested.deep.property('[1].mtime', {
- secs: 5,
- nsecs: 0
- })
- })
-
- it('should ls from outside of mfs', async () => {
- const testFileName = hat()
- const [{
- hash
- }] = await ipfs.add({ path: `/test/${testFileName}`, content: fixtures.smallFile.data })
- const listing = await ipfs.files.ls('/ipfs/' + hash)
- expect(listing).to.have.length(1)
- expect(listing[0].name).to.equal(hash)
- })
-
- it('should list an empty directory', async () => {
- const testDir = `/test-${hat()}`
- await ipfs.files.mkdir(testDir)
- const contents = await ipfs.files.ls(testDir)
-
- expect(contents).to.be.an('array').and.to.be.empty()
- })
-
- it('should list a file directly', async () => {
- const fileName = `single-file-${hat()}.txt`
- const filePath = `/${fileName}`
- await ipfs.files.write(filePath, Buffer.from('Hello world'), {
- create: true
- })
- const entries = await ipfs.files.ls(filePath)
-
- expect(entries).to.have.lengthOf(1)
- expect(entries).to.have.nested.property('[0].name', fileName)
- expect(entries).to.have.nested.property('[0].type', 0)
- expect(entries).to.have.nested.property('[0].size', 0)
- expect(entries).to.have.nested.property('[0].hash', '')
- })
- })
-}
diff --git a/src/files-mfs/read-pull-stream.js b/src/files-mfs/read-pull-stream.js
deleted file mode 100644
index ccd0c0f9..00000000
--- a/src/files-mfs/read-pull-stream.js
+++ /dev/null
@@ -1,46 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const hat = require('hat')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const pullToPromise = require('pull-to-promise')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.files.readPullStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should not read not found, expect error', () => {
- const testDir = `/test-${hat()}`
-
- return expect(pullToPromise.any(ipfs.files.readPullStream(`${testDir}/404`))).to.eventually.be.rejected
- .and.be.an.instanceOf(Error)
- .and.have.property('message')
- .that.include('does not exist')
- })
-
- it('should read file', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(testDir)
- await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true })
-
- const bufs = await pullToPromise.any(ipfs.files.readPullStream(`${testDir}/a`))
-
- expect(bufs).to.eql([Buffer.from('Hello, world!')])
- })
- })
-}
diff --git a/src/files-mfs/read-readable-stream.js b/src/files-mfs/read-readable-stream.js
deleted file mode 100644
index c3f87101..00000000
--- a/src/files-mfs/read-readable-stream.js
+++ /dev/null
@@ -1,48 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const hat = require('hat')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const getStream = require('get-stream')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.files.readReadableStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should not read not found, expect error', () => {
- const testDir = `/test-${hat()}`
- const stream = ipfs.files.readReadableStream(`${testDir}/404`)
-
- return expect(getStream(stream)).to.eventually.be.rejected
- .and.be.an.instanceOf(Error)
- .and.have.property('message')
- .that.include('does not exist')
- })
-
- it('should read file', async () => {
- const testDir = `/test-${hat()}`
-
- await ipfs.files.mkdir(testDir)
- await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true })
-
- const stream = ipfs.files.readReadableStream(`${testDir}/a`)
-
- const buf = await getStream(stream)
- expect(buf).to.eql('Hello, world!')
- })
- })
-}
diff --git a/src/files-regular/add-from-fs.js b/src/files-regular/add-from-fs.js
deleted file mode 100644
index db8a1bd2..00000000
--- a/src/files-regular/add-from-fs.js
+++ /dev/null
@@ -1,81 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const path = require('path')
-const expectTimeout = require('../utils/expect-timeout')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const fs = require('fs')
-const os = require('os')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.addFromFs', function () {
- this.timeout(40 * 1000)
-
- const fixturesPath = path.join(__dirname, '../../test/fixtures')
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should add a directory from the file system', async () => {
- const filesPath = path.join(fixturesPath, 'test-folder')
-
- const result = await ipfs.addFromFs(filesPath, { recursive: true })
- expect(result.length).to.be.above(8)
- })
-
- it('should add a directory from the file system with an odd name', async () => {
- const filesPath = path.join(fixturesPath, 'weird name folder [v0]')
-
- const result = await ipfs.addFromFs(filesPath, { recursive: true })
- expect(result.length).to.be.above(8)
- })
-
- it('should ignore a directory from the file system', async () => {
- const filesPath = path.join(fixturesPath, 'test-folder')
-
- const result = await ipfs.addFromFs(filesPath, { recursive: true, ignore: ['files/**'] })
-
- expect(result.some(file => file.path.includes('test-folder/files/'))).to.be.false()
- })
-
- it('should add a file from the file system', async () => {
- const filePath = path.join(fixturesPath, 'testfile.txt')
-
- const result = await ipfs.addFromFs(filePath)
- expect(result.length).to.equal(1)
- expect(result[0].path).to.equal('testfile.txt')
- })
-
- it('should add a hidden file in a directory from the file system', async () => {
- const filesPath = path.join(fixturesPath, 'hidden-files-folder')
-
- const result = await ipfs.addFromFs(filesPath, { recursive: true, hidden: true })
- expect(result.length).to.be.above(10)
- expect(result.map(object => object.path)).to.include('hidden-files-folder/.hiddenTest.txt')
- expect(result.map(object => object.hash)).to.include('QmdbAjVmLRdpFyi8FFvjPfhTGB2cVXvWLuK7Sbt38HXrtt')
- })
-
- it('should add a file from the file system with only-hash=true', async function () {
- this.slow(10 * 1000)
-
- const content = String(Math.random() + Date.now())
- const filepath = path.join(os.tmpdir(), `${content}.txt`)
- fs.writeFileSync(filepath, content)
-
- const out = await ipfs.addFromFs(filepath, { onlyHash: true })
-
- fs.unlinkSync(filepath)
- await expectTimeout(ipfs.object.get(out[0].hash), 4000)
- })
- })
-}
diff --git a/src/files-regular/add-from-stream.js b/src/files-regular/add-from-stream.js
deleted file mode 100644
index 5762cb70..00000000
--- a/src/files-regular/add-from-stream.js
+++ /dev/null
@@ -1,39 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { Readable } = require('readable-stream')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const { fixtures } = require('./utils')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.addFromStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should add from a stream', async () => {
- const stream = new Readable({
- read () {
- this.push(fixtures.bigFile.data)
- this.push(null)
- }
- })
-
- const result = await ipfs.addFromStream(stream)
- expect(result.length).to.equal(1)
- expect(result[0].hash).to.equal(fixtures.bigFile.cid)
- })
- })
-}
diff --git a/src/files-regular/add-from-url.js b/src/files-regular/add-from-url.js
deleted file mode 100644
index 0fa60577..00000000
--- a/src/files-regular/add-from-url.js
+++ /dev/null
@@ -1,112 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const pTimeout = require('p-timeout')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const { echoUrl, redirectUrl } = require('../utils/echo-http-server')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.addFromURL', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should add from a HTTP URL', async () => {
- const text = `TEST${Date.now()}`
- const url = echoUrl(text)
-
- const [result, expectedResult] = await Promise.all([
- ipfs.addFromURL(url),
- ipfs.add(Buffer.from(text))
- ])
-
- expect(result.err).to.not.exist()
- expect(expectedResult.err).to.not.exist()
- expect(result[0].hash).to.equal(expectedResult[0].hash)
- expect(result[0].size).to.equal(expectedResult[0].size)
- expect(result[0].path).to.equal(text)
- })
-
- it('should add from a HTTP URL with redirection', async () => {
- const text = `TEST${Date.now()}`
- const url = echoUrl(text) + '?foo=bar#buzz'
-
- const [result, expectedResult] = await Promise.all([
- ipfs.addFromURL(redirectUrl(url)),
- ipfs.add(Buffer.from(text))
- ])
-
- expect(result.err).to.not.exist()
- expect(expectedResult.err).to.not.exist()
- expect(result[0].hash).to.equal(expectedResult[0].hash)
- expect(result[0].size).to.equal(expectedResult[0].size)
- expect(result[0].path).to.equal(text)
- })
-
- it('should add from a URL with only-hash=true', async function () {
- const text = `TEST${Date.now()}`
- const url = echoUrl(text)
-
- const res = await ipfs.addFromURL(url, { onlyHash: true })
-
- try {
- // A successful object.get for this size data took my laptop ~14ms
- await pTimeout(ipfs.object.get(res[0].hash), 500)
- } catch (err) {
- if (err.name === 'TimeoutError') {
- // This doesn't seem to be the right approach:
- // the test shouldn't be passing when it gets a timeout error
- // but this is pretty the same logic as the previous callback one
- return Promise.resolve()
- }
-
- throw err
- }
- })
-
- it('should add from a URL with wrap-with-directory=true', async () => {
- const filename = `TEST${Date.now()}.txt` // also acts as data
- const url = echoUrl(filename) + '?foo=bar#buzz'
- const addOpts = { wrapWithDirectory: true }
-
- const [result, expectedResult] = await Promise.all([
- ipfs.addFromURL(url, addOpts),
- ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts)
- ])
- expect(result.err).to.not.exist()
- expect(expectedResult.err).to.not.exist()
- expect(result).to.deep.equal(expectedResult)
- })
-
- it('should add from a URL with wrap-with-directory=true and URL-escaped file name', async () => {
- const filename = `320px-Domažlice,_Jiráskova_43_(${Date.now()}).jpg` // also acts as data
- const url = echoUrl(filename) + '?foo=bar#buzz'
- const addOpts = { wrapWithDirectory: true }
-
- const [result, expectedResult] = await Promise.all([
- ipfs.addFromURL(url, addOpts),
- ipfs.add([{ path: filename, content: Buffer.from(filename) }], addOpts)
- ])
-
- expect(result.err).to.not.exist()
- expect(expectedResult.err).to.not.exist()
- expect(result).to.deep.equal(expectedResult)
- })
-
- it('should not add from an invalid url', () => {
- return expect(ipfs.addFromURL('123http://invalid')).to.eventually.be.rejected()
- })
- })
-}
diff --git a/src/files-regular/add-pull-stream.js b/src/files-regular/add-pull-stream.js
deleted file mode 100644
index 003f3f98..00000000
--- a/src/files-regular/add-pull-stream.js
+++ /dev/null
@@ -1,65 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const pull = require('pull-stream')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const pullToPromise = require('pull-to-promise')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.addPullStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should add pull stream of valid files and dirs', async function () {
- const content = (name) => ({
- path: `test-folder/${name}`,
- content: fixtures.directory.files[name]
- })
-
- const emptyDir = (name) => ({ path: `test-folder/${name}` })
-
- const files = [
- content('pp.txt'),
- content('holmes.txt'),
- content('jungle.txt'),
- content('alice.txt'),
- emptyDir('empty-folder'),
- content('files/hello.txt'),
- content('files/ipfs.txt'),
- emptyDir('files/empty')
- ]
-
- const stream = ipfs.addPullStream()
-
- const filesAdded = await pullToPromise.any(pull(pull.values(files), stream))
- const testFolderIndex = filesAdded.length - 1
-
- expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].path`, 'test-folder')
- expect(filesAdded).to.have.nested.property(`[${testFolderIndex}].hash`, fixtures.directory.cid)
- })
-
- it('should add with object chunks and pull stream content', async () => {
- const expectedCid = 'QmRf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'
- const data = [{ content: pull.values([Buffer.from('test')]) }]
- const stream = ipfs.addPullStream()
-
- const res = await pullToPromise.any(pull(pull.values(data), stream))
- expect(res).to.have.property('length', 1)
- expect(res[0]).to.include({ path: expectedCid, hash: expectedCid, size: 12 })
- })
- })
-}
diff --git a/src/files-regular/add-readable-stream.js b/src/files-regular/add-readable-stream.js
deleted file mode 100644
index f2a32c87..00000000
--- a/src/files-regular/add-readable-stream.js
+++ /dev/null
@@ -1,56 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const getStream = require('get-stream')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.addReadableStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- after(() => common.clean())
-
- it('should add readable stream of valid files and dirs', async function () {
- const content = (name) => ({
- path: `test-folder/${name}`,
- content: fixtures.directory.files[name]
- })
-
- const emptyDir = (name) => ({ path: `test-folder/${name}` })
-
- const files = [
- content('pp.txt'),
- content('holmes.txt'),
- content('jungle.txt'),
- content('alice.txt'),
- emptyDir('empty-folder'),
- content('files/hello.txt'),
- content('files/ipfs.txt'),
- emptyDir('files/empty')
- ]
-
- const stream = ipfs.addReadableStream()
-
- files.forEach((file) => stream.write(file))
- stream.end()
-
- const filesArray = await getStream.array(stream)
- const file = filesArray[filesArray.length - 1]
-
- expect(file.hash).to.equal(fixtures.directory.cid)
- })
- })
-}
diff --git a/src/files-regular/cat-pull-stream.js b/src/files-regular/cat-pull-stream.js
deleted file mode 100644
index 70b4cedc..00000000
--- a/src/files-regular/cat-pull-stream.js
+++ /dev/null
@@ -1,49 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const pullToPromise = require('pull-to-promise')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.catPullStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- before(() => ipfs.add(fixtures.smallFile.data))
- after(() => common.clean())
-
- it('should return a Pull Stream for a CID', async () => {
- const stream = ipfs.catPullStream(fixtures.smallFile.cid)
-
- const data = Buffer.concat(await pullToPromise.any(stream))
-
- expect(data.length).to.equal(fixtures.smallFile.data.length)
- expect(data.toString()).to.deep.equal(fixtures.smallFile.data.toString())
- })
-
- it('should export a chunk of a file in a Pull Stream', async () => {
- const offset = 1
- const length = 3
-
- const stream = ipfs.catPullStream(fixtures.smallFile.cid, {
- offset,
- length
- })
-
- const data = Buffer.concat(await pullToPromise.any(stream))
- expect(data.toString()).to.equal('lz ')
- })
- })
-}
diff --git a/src/files-regular/cat-readable-stream.js b/src/files-regular/cat-readable-stream.js
deleted file mode 100644
index 85a935c8..00000000
--- a/src/files-regular/cat-readable-stream.js
+++ /dev/null
@@ -1,50 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const getStream = require('get-stream')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.catReadableStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => {
- ipfs = (await common.spawn()).api
- await ipfs.add(fixtures.bigFile.data)
- await ipfs.add(fixtures.smallFile.data)
- })
-
- after(() => common.clean())
-
- it('should return a Readable Stream for a CID', async () => {
- const stream = ipfs.catReadableStream(fixtures.bigFile.cid)
- const data = await getStream.buffer(stream)
-
- expect(data).to.eql(fixtures.bigFile.data)
- })
-
- it('should export a chunk of a file in a Readable Stream', async () => {
- const offset = 1
- const length = 3
-
- const stream = ipfs.catReadableStream(fixtures.smallFile.cid, {
- offset,
- length
- })
-
- const data = await getStream.buffer(stream)
- expect(data.toString()).to.equal('lz ')
- })
- })
-}
diff --git a/src/files-regular/get-pull-stream.js b/src/files-regular/get-pull-stream.js
deleted file mode 100644
index 070ebfba..00000000
--- a/src/files-regular/get-pull-stream.js
+++ /dev/null
@@ -1,37 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const pullToPromise = require('pull-to-promise')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.getPullStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => { ipfs = (await common.spawn()).api })
-
- before(() => ipfs.add(fixtures.smallFile.data))
-
- after(() => common.clean())
-
- it('should return a Pull Stream of Pull Streams', async () => {
- const stream = ipfs.getPullStream(fixtures.smallFile.cid)
-
- const files = await pullToPromise.any(stream)
-
- const data = Buffer.concat(await pullToPromise.any(files[0].content))
- expect(data.toString()).to.contain('Plz add me!')
- })
- })
-}
diff --git a/src/files-regular/get-readable-stream.js b/src/files-regular/get-readable-stream.js
deleted file mode 100644
index 7a66915a..00000000
--- a/src/files-regular/get-readable-stream.js
+++ /dev/null
@@ -1,50 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const through = require('through2')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const getStream = require('get-stream')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.getReadableStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => {
- ipfs = (await common.spawn()).api
- await ipfs.add(fixtures.smallFile.data)
- })
-
- after(() => common.clean())
-
- it('should return a Readable Stream of Readable Streams', async () => {
- const stream = ipfs.getReadableStream(fixtures.smallFile.cid)
-
- // I was not able to use 'get-stream' module here
- // as it exceeds the timeout. I think it might be related
- // to 'pump' module that get-stream uses
- const files = await new Promise((resolve, reject) => {
- const filesArr = []
- stream.pipe(through.obj(async (file, enc, next) => {
- const content = await getStream.buffer(file.content)
- filesArr.push({ path: file.path, content: content })
- next()
- }, () => resolve(filesArr)))
- })
-
- expect(files).to.be.length(1)
- expect(files[0].path).to.eql(fixtures.smallFile.cid)
- expect(files[0].content.toString()).to.contain('Plz add me!')
- })
- })
-}
diff --git a/src/files-regular/index.js b/src/files-regular/index.js
deleted file mode 100644
index d097ad9c..00000000
--- a/src/files-regular/index.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const { createSuite } = require('../utils/suite')
-
-const tests = {
- add: require('./add'),
- addReadableStream: require('./add-readable-stream'),
- addPullStream: require('./add-pull-stream'),
- addFromStream: require('./add-from-stream'),
- addFromURL: require('./add-from-url'),
- addFromFs: require('./add-from-fs'),
- cat: require('./cat'),
- catReadableStream: require('./cat-readable-stream'),
- catPullStream: require('./cat-pull-stream'),
- get: require('./get'),
- getReadableStream: require('./get-readable-stream'),
- getPullStream: require('./get-pull-stream'),
- ls: require('./ls'),
- lsReadableStream: require('./ls-readable-stream'),
- lsPullStream: require('./ls-pull-stream'),
- refs: require('./refs'),
- refsReadableStream: require('./refs-readable-stream'),
- refsPullStream: require('./refs-pull-stream'),
- refsLocal: require('./refs-local'),
- refsLocalPullStream: require('./refs-local-pull-stream'),
- refsLocalReadableStream: require('./refs-local-readable-stream')
-}
-
-module.exports = createSuite(tests)
diff --git a/src/files-regular/ls-pull-stream.js b/src/files-regular/ls-pull-stream.js
deleted file mode 100644
index 04f43e39..00000000
--- a/src/files-regular/ls-pull-stream.js
+++ /dev/null
@@ -1,100 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const pullToPromise = require('pull-to-promise')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.lsPullStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => {
- ipfs = (await common.spawn()).api
- })
-
- after(() => common.clean())
-
- it('should pull stream ls with a base58 encoded CID', async function () {
- const content = (name) => ({
- path: `test-folder/${name}`,
- content: fixtures.directory.files[name]
- })
-
- const emptyDir = (name) => ({ path: `test-folder/${name}` })
-
- const dirs = [
- content('pp.txt'),
- content('holmes.txt'),
- content('jungle.txt'),
- content('alice.txt'),
- emptyDir('empty-folder'),
- content('files/hello.txt'),
- content('files/ipfs.txt'),
- emptyDir('files/empty')
- ]
-
- const res = await ipfs.add(dirs)
-
- const root = res[res.length - 1]
- expect(root.path).to.equal('test-folder')
- expect(root.hash).to.equal(fixtures.directory.cid)
-
- const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
- const stream = ipfs.lsPullStream(cid)
-
- const output = await pullToPromise.any(stream)
- expect(output).to.have.lengthOf(6)
- expect(output).to.have.nested.property('[0].depth', 1)
- expect(output).to.have.nested.property('[0].name', 'alice.txt')
- expect(output).to.have.nested.property('[0].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt')
- expect(output).to.have.nested.property('[0].size', 11685)
- expect(output).to.have.nested.property('[0].type', 'file')
-
- expect(output).to.have.nested.property('[1].depth', 1)
- expect(output).to.have.nested.property('[1].name', 'empty-folder')
- expect(output).to.have.nested.property('[1].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder')
- expect(output).to.have.nested.property('[1].size', 0)
- expect(output).to.have.nested.property('[1].hash', 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
- expect(output).to.have.nested.property('[1].type', 'dir')
-
- expect(output).to.have.nested.property('[2].depth', 1)
- expect(output).to.have.nested.property('[2].name', 'files')
- expect(output).to.have.nested.property('[2].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files')
- expect(output).to.have.nested.property('[2].size', 0)
- expect(output).to.have.nested.property('[2].hash', 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74')
- expect(output).to.have.nested.property('[2].type', 'dir')
-
- expect(output).to.have.nested.property('[3].depth', 1)
- expect(output).to.have.nested.property('[3].name', 'holmes.txt')
- expect(output).to.have.nested.property('[3].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt')
- expect(output).to.have.nested.property('[3].size', 581878)
- expect(output).to.have.nested.property('[3].hash', 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr')
- expect(output).to.have.nested.property('[3].type', 'file')
-
- expect(output).to.have.nested.property('[4].depth', 1)
- expect(output).to.have.nested.property('[4].name', 'jungle.txt')
- expect(output).to.have.nested.property('[4].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt')
- expect(output).to.have.nested.property('[4].size', 2294)
- expect(output).to.have.nested.property('[4].hash', 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9')
- expect(output).to.have.nested.property('[4].type', 'file')
-
- expect(output).to.have.nested.property('[5].depth', 1)
- expect(output).to.have.nested.property('[5].name', 'pp.txt')
- expect(output).to.have.nested.property('[5].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt')
- expect(output).to.have.nested.property('[5].size', 4540)
- expect(output).to.have.nested.property('[5].hash', 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
- expect(output).to.have.nested.property('[5].type', 'file')
- })
- })
-}
diff --git a/src/files-regular/ls-readable-stream.js b/src/files-regular/ls-readable-stream.js
deleted file mode 100644
index 37dc3d00..00000000
--- a/src/files-regular/ls-readable-stream.js
+++ /dev/null
@@ -1,100 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const getStream = require('get-stream')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.lsReadableStream', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => {
- ipfs = (await common.spawn()).api
- })
-
- after(() => common.clean())
-
- it('should readable stream ls with a base58 encoded CID', async function () {
- const content = (name) => ({
- path: `test-folder/${name}`,
- content: fixtures.directory.files[name]
- })
-
- const emptyDir = (name) => ({ path: `test-folder/${name}` })
-
- const dirs = [
- content('pp.txt'),
- content('holmes.txt'),
- content('jungle.txt'),
- content('alice.txt'),
- emptyDir('empty-folder'),
- content('files/hello.txt'),
- content('files/ipfs.txt'),
- emptyDir('files/empty')
- ]
-
- const res = await ipfs.add(dirs)
-
- const root = res[res.length - 1]
- expect(root.path).to.equal('test-folder')
- expect(root.hash).to.equal(fixtures.directory.cid)
-
- const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
- const stream = ipfs.lsReadableStream(cid)
-
- const output = await getStream.array(stream)
- expect(output).to.have.lengthOf(6)
- expect(output).to.have.nested.property('[0].depth', 1)
- expect(output).to.have.nested.property('[0].name', 'alice.txt')
- expect(output).to.have.nested.property('[0].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt')
- expect(output).to.have.nested.property('[0].size', 11685)
- expect(output).to.have.nested.property('[0].type', 'file')
-
- expect(output).to.have.nested.property('[1].depth', 1)
- expect(output).to.have.nested.property('[1].name', 'empty-folder')
- expect(output).to.have.nested.property('[1].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder')
- expect(output).to.have.nested.property('[1].size', 0)
- expect(output).to.have.nested.property('[1].hash', 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
- expect(output).to.have.nested.property('[1].type', 'dir')
-
- expect(output).to.have.nested.property('[2].depth', 1)
- expect(output).to.have.nested.property('[2].name', 'files')
- expect(output).to.have.nested.property('[2].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files')
- expect(output).to.have.nested.property('[2].size', 0)
- expect(output).to.have.nested.property('[2].hash', 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74')
- expect(output).to.have.nested.property('[2].type', 'dir')
-
- expect(output).to.have.nested.property('[3].depth', 1)
- expect(output).to.have.nested.property('[3].name', 'holmes.txt')
- expect(output).to.have.nested.property('[3].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt')
- expect(output).to.have.nested.property('[3].size', 581878)
- expect(output).to.have.nested.property('[3].hash', 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr')
- expect(output).to.have.nested.property('[3].type', 'file')
-
- expect(output).to.have.nested.property('[4].depth', 1)
- expect(output).to.have.nested.property('[4].name', 'jungle.txt')
- expect(output).to.have.nested.property('[4].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt')
- expect(output).to.have.nested.property('[4].size', 2294)
- expect(output).to.have.nested.property('[4].hash', 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9')
- expect(output).to.have.nested.property('[4].type', 'file')
-
- expect(output).to.have.nested.property('[5].depth', 1)
- expect(output).to.have.nested.property('[5].name', 'pp.txt')
- expect(output).to.have.nested.property('[5].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt')
- expect(output).to.have.nested.property('[5].size', 4540)
- expect(output).to.have.nested.property('[5].hash', 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
- expect(output).to.have.nested.property('[5].type', 'file')
- })
- })
-}
diff --git a/src/files-regular/ls.js b/src/files-regular/ls.js
deleted file mode 100644
index a3d127d6..00000000
--- a/src/files-regular/ls.js
+++ /dev/null
@@ -1,197 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const CID = require('cids')
-
-const randomName = prefix => `${prefix}${Math.round(Math.random() * 1000)}`
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.ls', function () {
- this.timeout(40 * 1000)
-
- let ipfs
-
- before(async () => {
- ipfs = (await common.spawn()).api
- })
-
- after(() => common.clean())
-
- it('should ls with a base58 encoded CID', async function () {
- const content = (name) => ({
- path: `test-folder/${name}`,
- content: fixtures.directory.files[name]
- })
-
- const emptyDir = (name) => ({ path: `test-folder/${name}` })
-
- const dirs = [
- content('pp.txt'),
- content('holmes.txt'),
- content('jungle.txt'),
- content('alice.txt'),
- emptyDir('empty-folder'),
- content('files/hello.txt'),
- content('files/ipfs.txt'),
- emptyDir('files/empty')
- ]
-
- const res = await ipfs.add(dirs)
-
- const root = res[res.length - 1]
- expect(root.path).to.equal('test-folder')
- expect(root.hash).to.equal(fixtures.directory.cid)
-
- const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
- const output = await ipfs.ls(cid)
-
- expect(output).to.have.lengthOf(6)
- expect(output).to.have.nested.property('[0].depth', 1)
- expect(output).to.have.nested.property('[0].name', 'alice.txt')
- expect(output).to.have.nested.property('[0].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt')
- expect(output).to.have.nested.property('[0].size', 11685)
- expect(output).to.have.nested.property('[0].type', 'file')
-
- expect(output).to.have.nested.property('[1].depth', 1)
- expect(output).to.have.nested.property('[1].name', 'empty-folder')
- expect(output).to.have.nested.property('[1].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder')
- expect(output).to.have.nested.property('[1].size', 0)
- expect(output).to.have.nested.property('[1].hash', 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
- expect(output).to.have.nested.property('[1].type', 'dir')
-
- expect(output).to.have.nested.property('[2].depth', 1)
- expect(output).to.have.nested.property('[2].name', 'files')
- expect(output).to.have.nested.property('[2].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files')
- expect(output).to.have.nested.property('[2].size', 0)
- expect(output).to.have.nested.property('[2].hash', 'QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74')
- expect(output).to.have.nested.property('[2].type', 'dir')
-
- expect(output).to.have.nested.property('[3].depth', 1)
- expect(output).to.have.nested.property('[3].name', 'holmes.txt')
- expect(output).to.have.nested.property('[3].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt')
- expect(output).to.have.nested.property('[3].size', 581878)
- expect(output).to.have.nested.property('[3].hash', 'QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr')
- expect(output).to.have.nested.property('[3].type', 'file')
-
- expect(output).to.have.nested.property('[4].depth', 1)
- expect(output).to.have.nested.property('[4].name', 'jungle.txt')
- expect(output).to.have.nested.property('[4].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt')
- expect(output).to.have.nested.property('[4].size', 2294)
- expect(output).to.have.nested.property('[4].hash', 'QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9')
- expect(output).to.have.nested.property('[4].type', 'file')
-
- expect(output).to.have.nested.property('[5].depth', 1)
- expect(output).to.have.nested.property('[5].name', 'pp.txt')
- expect(output).to.have.nested.property('[5].path', 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt')
- expect(output).to.have.nested.property('[5].size', 4540)
- expect(output).to.have.nested.property('[5].hash', 'QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
- expect(output).to.have.nested.property('[5].type', 'file')
- })
-
- it('should ls files added as CIDv0 with a CIDv1', async () => {
- const dir = randomName('DIR')
-
- const input = [
- { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) },
- { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) }
- ]
-
- const res = await ipfs.add(input, { cidVersion: 0 })
-
- const cidv0 = new CID(res[res.length - 1].hash)
- expect(cidv0.version).to.equal(0)
-
- const cidv1 = cidv0.toV1()
-
- const output = await ipfs.ls(cidv1)
- expect(output.length).to.equal(input.length)
-
- output.forEach(({ hash }) => {
- expect(res.find(file => file.hash === hash)).to.exist()
- })
- })
-
- it('should ls files added as CIDv1 with a CIDv0', async () => {
- const dir = randomName('DIR')
-
- const input = [
- { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) },
- { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) }
- ]
-
- const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false })
-
- const cidv1 = new CID(res[res.length - 1].hash)
- expect(cidv1.version).to.equal(1)
-
- const cidv0 = cidv1.toV1()
-
- const output = await ipfs.ls(cidv0)
- expect(output.length).to.equal(input.length)
-
- output.forEach(({ hash }) => {
- expect(res.find(file => file.hash === hash)).to.exist()
- })
- })
-
- it('should correctly handle a non existing hash', () => {
- return expect(ipfs.ls('surelynotavalidhashheh?')).to.eventually.be.rejected()
- })
-
- it('should correctly handle a non existing path', () => {
- return expect(ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there')).to.eventually.be.rejected()
- })
-
- it('should ls files by path', async () => {
- const dir = randomName('DIR')
-
- const input = [
- { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) },
- { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) }
- ]
-
- const res = await ipfs.add(input)
- const output = await ipfs.ls(`/ipfs/${res[res.length - 1].hash}`)
- expect(output.length).to.equal(input.length)
-
- output.forEach(({ hash }) => {
- expect(res.find(file => file.hash === hash)).to.exist()
- })
- })
-
- it('should ls with metadata', async () => {
- const dir = randomName('DIR')
- const mtime = new Date()
- const mode = '0532'
- const expectedMode = parseInt(mode, 8)
- const expectedMtime = {
- secs: Math.floor(mtime.getTime() / 1000),
- nsecs: (mtime.getTime() - (Math.floor(mtime.getTime() / 1000) * 1000)) * 1000
- }
-
- const input = [
- { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')), mode, mtime },
- { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')), mode, mtime }
- ]
-
- const res = await ipfs.add(input)
- const output = await ipfs.ls(`/ipfs/${res[res.length - 1].hash}`)
-
- expect(output).to.have.lengthOf(input.length)
- expect(output).to.have.nested.deep.property('[0].mtime', expectedMtime)
- expect(output).to.have.nested.property('[0].mode', expectedMode)
- expect(output).to.have.nested.deep.property('[1].mtime', expectedMtime)
- expect(output).to.have.nested.property('[1].mode', expectedMode)
- })
- })
-}
diff --git a/src/files-regular/refs-local-pull-stream.js b/src/files-regular/refs-local-pull-stream.js
deleted file mode 100644
index 88774247..00000000
--- a/src/files-regular/refs-local-pull-stream.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const pullToPromise = require('pull-to-promise')
-
-module.exports = (createCommon, options) => {
- const ipfsRefsLocal = (ipfs) => {
- const stream = ipfs.refs.localPullStream()
-
- return pullToPromise.any(stream)
- }
- require('./refs-local-tests')(createCommon, '.refs.localPullStream', ipfsRefsLocal, options)
-}
diff --git a/src/files-regular/refs-local-readable-stream.js b/src/files-regular/refs-local-readable-stream.js
deleted file mode 100644
index 236961d1..00000000
--- a/src/files-regular/refs-local-readable-stream.js
+++ /dev/null
@@ -1,12 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const getStream = require('get-stream')
-
-module.exports = (createCommon, options) => {
- const ipfsRefsLocal = (ipfs) => {
- const stream = ipfs.refs.localReadableStream()
- return getStream.array(stream)
- }
- require('./refs-local-tests')(createCommon, '.refs.localReadableStream', ipfsRefsLocal, options)
-}
diff --git a/src/files-regular/refs-local.js b/src/files-regular/refs-local.js
deleted file mode 100644
index 53737e5d..00000000
--- a/src/files-regular/refs-local.js
+++ /dev/null
@@ -1,7 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-module.exports = (createCommon, options) => {
- const ipfsRefsLocal = (ipfs) => ipfs.refs.local()
- require('./refs-local-tests')(createCommon, '.refs.local', ipfsRefsLocal, options)
-}
diff --git a/src/files-regular/refs-pull-stream.js b/src/files-regular/refs-pull-stream.js
deleted file mode 100644
index 51885754..00000000
--- a/src/files-regular/refs-pull-stream.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const pullToPromise = require('pull-to-promise')
-
-module.exports = (createCommon, options) => {
- const ipfsRefs = (ipfs) => (path, params) => {
- const stream = ipfs.refsPullStream(path, params)
-
- return pullToPromise.any(stream)
- }
- require('./refs-tests')(createCommon, '.refsPullStream', ipfsRefs, options)
-}
diff --git a/src/files-regular/refs-readable-stream.js b/src/files-regular/refs-readable-stream.js
deleted file mode 100644
index b49072ea..00000000
--- a/src/files-regular/refs-readable-stream.js
+++ /dev/null
@@ -1,12 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const getStream = require('get-stream')
-
-module.exports = (createCommon, options) => {
- const ipfsRefs = (ipfs) => (path, params) => {
- const stream = ipfs.refsReadableStream(path, params)
- return getStream.array(stream)
- }
- require('./refs-tests')(createCommon, '.refsReadableStream', ipfsRefs, options)
-}
diff --git a/src/files-regular/refs.js b/src/files-regular/refs.js
deleted file mode 100644
index 41dd8c03..00000000
--- a/src/files-regular/refs.js
+++ /dev/null
@@ -1,7 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-module.exports = (createCommon, options) => {
- const ipfsRefs = (ipfs) => ipfs.refs.bind(ipfs)
- require('./refs-tests')(createCommon, '.refs', ipfsRefs, options)
-}
diff --git a/src/files-mfs/chmod.js b/src/files/chmod.js
similarity index 100%
rename from src/files-mfs/chmod.js
rename to src/files/chmod.js
diff --git a/src/files-mfs/cp.js b/src/files/cp.js
similarity index 88%
rename from src/files-mfs/cp.js
rename to src/files/cp.js
index 3b13e042..930a2dc7 100644
--- a/src/files-mfs/cp.js
+++ b/src/files/cp.js
@@ -2,7 +2,9 @@
'use strict'
const hat = require('hat')
-const { fixtures } = require('../files-regular/utils')
+const all = require('it-all')
+const concat = require('it-concat')
+const { fixtures } = require('../utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -51,13 +53,11 @@ module.exports = (common, options) => {
})
it('should copy from outside of mfs', async () => {
- const [{
- hash
- }] = await ipfs.add(fixtures.smallFile.data)
+ const [{ cid }] = await all(ipfs.add(fixtures.smallFile.data))
const testFilePath = `/${hat()}`
- await ipfs.files.cp(`/ipfs/${hash}`, testFilePath)
- const testFileData = await ipfs.files.read(testFilePath)
- expect(testFileData).to.eql(fixtures.smallFile.data)
+ await ipfs.files.cp(`/ipfs/${cid}`, testFilePath)
+ const testFileData = await concat(ipfs.files.read(testFilePath))
+ expect(testFileData.slice()).to.eql(fixtures.smallFile.data)
})
it('should respect metadata when copying files', async function () {
@@ -118,13 +118,13 @@ module.exports = (common, options) => {
}
const [{
- hash
- }] = await ipfs.add({
+ cid
+ }] = await all(ipfs.add({
content: fixtures.smallFile.data,
mode,
mtime
- })
- await ipfs.files.cp(`/ipfs/${hash}`, testDestPath)
+ }))
+ await ipfs.files.cp(`/ipfs/${cid}`, testDestPath)
const stats = await ipfs.files.stat(testDestPath)
expect(stats).to.have.deep.property('mtime', expectedMtime)
diff --git a/src/files-mfs/flush.js b/src/files/flush.js
similarity index 90%
rename from src/files-mfs/flush.js
rename to src/files/flush.js
index d7d282d1..1912f9b6 100644
--- a/src/files-mfs/flush.js
+++ b/src/files/flush.js
@@ -36,7 +36,7 @@ module.exports = (common, options) => {
const root = await ipfs.files.stat('/')
const flushed = await ipfs.files.flush()
- expect(root.hash).to.equal(flushed.toString())
+ expect(root.cid.toString()).to.equal(flushed.toString())
})
it('should flush specific dir', async () => {
@@ -47,7 +47,7 @@ module.exports = (common, options) => {
const dirStats = await ipfs.files.stat(testDir)
const flushed = await ipfs.files.flush(testDir)
- expect(dirStats.hash).to.equal(flushed.toString())
+ expect(dirStats.cid.toString()).to.equal(flushed.toString())
})
})
}
diff --git a/src/files-mfs/index.js b/src/files/index.js
similarity index 66%
rename from src/files-mfs/index.js
rename to src/files/index.js
index a5ba1bd1..e6d1f831 100644
--- a/src/files-mfs/index.js
+++ b/src/files/index.js
@@ -11,11 +11,7 @@ const tests = {
rm: require('./rm'),
stat: require('./stat'),
read: require('./read'),
- readReadableStream: require('./read-readable-stream'),
- readPullStream: require('./read-pull-stream'),
ls: require('./ls'),
- lsReadableStream: require('./ls-readable-stream'),
- lsPullStream: require('./ls-pull-stream'),
flush: require('./flush'),
touch: require('./touch')
}
diff --git a/src/files/ls.js b/src/files/ls.js
new file mode 100644
index 00000000..dda48168
--- /dev/null
+++ b/src/files/ls.js
@@ -0,0 +1,118 @@
+/* eslint-env mocha */
+'use strict'
+
+const hat = require('hat')
+const all = require('it-all')
+const { fixtures } = require('../utils')
+const { getDescribe, getIt, expect } = require('../utils/mocha')
+
+/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
+/**
+ * @param {Factory} common
+ * @param {Object} options
+ */
+module.exports = (common, options) => {
+ const describe = getDescribe(options)
+ const it = getIt(options)
+
+ describe('.files.ls', function () {
+ this.timeout(40 * 1000)
+
+ let ipfs
+
+ before(async () => { ipfs = (await common.spawn()).api })
+
+ after(() => common.clean())
+
+ it('should not ls not found file/dir, expect error', () => {
+ const testDir = `/test-${hat()}`
+
+ return expect(all(ipfs.files.ls(`${testDir}/404`))).to.eventually.be.rejected()
+ })
+
+ it('should ls directory', async () => {
+ const testDir = `/test-${hat()}`
+
+ await ipfs.files.mkdir(`${testDir}/lv1`, { parents: true })
+ await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
+
+ const entries = await all(ipfs.files.ls(testDir))
+
+ expect(entries).to.have.lengthOf(2)
+ expect(entries[0].name).to.equal('b')
+ expect(entries[0].type).to.equal(0)
+ expect(entries[0].size).to.equal(13)
+ expect(entries[0].cid.toString()).to.equal('QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T')
+ expect(entries[1].name).to.equal('lv1')
+ expect(entries[1].type).to.equal(1)
+ expect(entries[1].size).to.equal(0)
+ expect(entries[1].cid.toString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
+ })
+
+ it('should ls directory and include metadata', async () => {
+ const testDir = `/test-${hat()}`
+
+ await ipfs.files.mkdir(`${testDir}/lv1`, {
+ parents: true,
+ mtime: {
+ secs: 5
+ }
+ })
+ await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), {
+ create: true,
+ mtime: {
+ secs: 5
+ }
+ })
+
+ const entries = await all(ipfs.files.ls(testDir, { long: true }))
+
+ expect(entries).to.have.lengthOf(2)
+ expect(entries[0].cid.toString()).to.equal('QmTVnczjg445RUAEYNH1wvhVa2rnPoWMfHMxQc6W7HHoyM')
+ expect(entries[0].mode).to.equal(0o0644)
+ expect(entries[0].mtime).to.deep.equal({
+ secs: 5,
+ nsecs: 0
+ })
+ expect(entries[1].cid.toString()).to.equal('QmXkBjmbtWUxXLa3s541UBSzPgvaAR7b8X3Amcp5D1VKTQ')
+ expect(entries[1].mode).to.equal(0o0755)
+ expect(entries[1].mtime).to.deep.equal({
+ secs: 5,
+ nsecs: 0
+ })
+ })
+
+ it('should ls from outside of mfs', async () => {
+ const testFileName = hat()
+ const [{
+ cid
+ }] = await all(ipfs.add({ path: `/test/${testFileName}`, content: fixtures.smallFile.data }))
+ const listing = await all(ipfs.files.ls('/ipfs/' + cid))
+ expect(listing).to.have.length(1)
+ expect(listing[0].name).to.equal(cid.toString())
+ })
+
+ it('should list an empty directory', async () => {
+ const testDir = `/test-${hat()}`
+ await ipfs.files.mkdir(testDir)
+ const contents = await all(ipfs.files.ls(testDir))
+
+ expect(contents).to.be.an('array').and.to.be.empty()
+ })
+
+ it('should list a file directly', async () => {
+ const fileName = `single-file-${hat()}.txt`
+ const filePath = `/${fileName}`
+ await ipfs.files.write(filePath, Buffer.from('Hello world'), {
+ create: true
+ })
+ const entries = await all(ipfs.files.ls(filePath))
+
+ expect(entries).to.have.lengthOf(1)
+ expect(entries[0].name).to.equal(fileName)
+ expect(entries[0].type).to.equal(0)
+ expect(entries[0].size).to.equal(11)
+ expect(entries[0].cid.toString()).to.equal('Qmetpc7cZmN25Wcc6R27cGCAvCDqCS5GjHG4v7xABEfpmJ')
+ })
+ })
+}
diff --git a/src/files-mfs/mkdir.js b/src/files/mkdir.js
similarity index 100%
rename from src/files-mfs/mkdir.js
rename to src/files/mkdir.js
diff --git a/src/files-mfs/mv.js b/src/files/mv.js
similarity index 100%
rename from src/files-mfs/mv.js
rename to src/files/mv.js
diff --git a/src/files-mfs/read.js b/src/files/read.js
similarity index 72%
rename from src/files-mfs/read.js
rename to src/files/read.js
index 46467768..ed97418b 100644
--- a/src/files-mfs/read.js
+++ b/src/files/read.js
@@ -2,7 +2,9 @@
'use strict'
const hat = require('hat')
-const { fixtures } = require('../files-regular/utils')
+const concat = require('it-concat')
+const all = require('it-all')
+const { fixtures } = require('../utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -38,17 +40,15 @@ module.exports = (common, options) => {
await ipfs.files.mkdir(testDir)
await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true })
- const buf = await ipfs.files.read(`${testDir}/a`)
+ const buf = await concat(ipfs.files.read(`${testDir}/a`))
- expect(buf).to.eql(Buffer.from('Hello, world!'))
+ expect(buf.slice()).to.eql(Buffer.from('Hello, world!'))
})
it('should read from outside of mfs', async () => {
- const [{
- hash
- }] = await ipfs.add(fixtures.smallFile.data)
- const testFileData = await ipfs.files.read(`/ipfs/${hash}`)
- expect(testFileData).to.eql(fixtures.smallFile.data)
+ const [{ cid }] = await all(ipfs.add(fixtures.smallFile.data))
+ const testFileData = await concat(ipfs.files.read(`/ipfs/${cid}`))
+ expect(testFileData.slice()).to.eql(fixtures.smallFile.data)
})
})
}
diff --git a/src/files-mfs/rm.js b/src/files/rm.js
similarity index 90%
rename from src/files-mfs/rm.js
rename to src/files/rm.js
index 9a91e0d3..21223fae 100644
--- a/src/files-mfs/rm.js
+++ b/src/files/rm.js
@@ -2,6 +2,7 @@
'use strict'
const hat = require('hat')
+const all = require('it-all')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -36,7 +37,7 @@ module.exports = (common, options) => {
await ipfs.files.rm(`${testDir}/c`)
- const contents = await ipfs.files.ls(testDir)
+ const contents = await all(ipfs.files.ls(testDir))
expect(contents).to.be.an('array').and.to.be.empty()
})
@@ -47,7 +48,7 @@ module.exports = (common, options) => {
await ipfs.files.rm(`${testDir}/lv1/lv2`, { recursive: true })
- const lv1Contents = await ipfs.files.ls(`${testDir}/lv1`)
+ const lv1Contents = await all(ipfs.files.ls(`${testDir}/lv1`))
expect(lv1Contents).to.be.an('array').and.to.be.empty()
})
})
diff --git a/src/files-mfs/stat.js b/src/files/stat.js
similarity index 91%
rename from src/files-mfs/stat.js
rename to src/files/stat.js
index 2f03eb5b..51f32f22 100644
--- a/src/files-mfs/stat.js
+++ b/src/files/stat.js
@@ -2,7 +2,8 @@
'use strict'
const hat = require('hat')
-const { fixtures } = require('../files-regular/utils')
+const all = require('it-all')
+const { fixtures } = require('../utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -24,7 +25,8 @@ module.exports = (common, options) => {
args: common.opts.type === 'go' ? [] : ['--enable-sharding-experiment']
})).api
})
- before(async () => { await ipfs.add(fixtures.smallFile.data) })
+
+ before(async () => { await all(ipfs.add(fixtures.smallFile.data)) })
after(() => common.clean())
@@ -41,12 +43,13 @@ module.exports = (common, options) => {
await ipfs.files.write(`${testDir}/b`, Buffer.from('Hello, world!'), { create: true })
const stat = await ipfs.files.stat(`${testDir}/b`)
+ stat.cid = stat.cid.toString()
expect(stat).to.include({
type: 'file',
blocks: 1,
size: 13,
- hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T',
+ cid: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T',
cumulativeSize: 71,
withLocality: false
})
@@ -96,6 +99,7 @@ module.exports = (common, options) => {
await ipfs.files.write(`${testDir}/a`, Buffer.from('Hello, world!'), { create: true })
const stat = await ipfs.files.stat(testDir)
+ stat.cid = stat.cid.toString()
expect(stat).to.include({
type: 'directory',
@@ -185,12 +189,13 @@ module.exports = (common, options) => {
// TODO enable this test when this feature gets released on go-ipfs
it.skip('should stat withLocal file', async function () {
const stat = await ipfs.files.stat('/test/b', { withLocal: true })
+ stat.cid = stat.cid.toString()
expect(stat).to.eql({
type: 'file',
blocks: 1,
size: 13,
- hash: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T',
+ cid: 'QmcZojhwragQr5qhTeFAmELik623Z21e3jBTpJXoQ9si1T',
cumulativeSize: 71,
withLocality: true,
local: true,
@@ -201,12 +206,13 @@ module.exports = (common, options) => {
// TODO enable this test when this feature gets released on go-ipfs
it.skip('should stat withLocal dir', async function () {
const stat = await ipfs.files.stat('/test', { withLocal: true })
+ stat.cid = stat.cid.toString()
expect(stat).to.eql({
type: 'directory',
blocks: 2,
size: 0,
- hash: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto',
+ cid: 'QmVrkkNurBCeJvPRohW5JTvJG4AxGrFg7FnmsZZUS6nJto',
cumulativeSize: 216,
withLocality: true,
local: true,
@@ -216,12 +222,13 @@ module.exports = (common, options) => {
it('should stat outside of mfs', async () => {
const stat = await ipfs.files.stat('/ipfs/' + fixtures.smallFile.cid)
+ stat.cid = stat.cid.toString()
expect(stat).to.include({
type: 'file',
blocks: 0,
size: 12,
- hash: fixtures.smallFile.cid,
+ cid: fixtures.smallFile.cid,
cumulativeSize: 20,
withLocality: false
})
diff --git a/src/files-mfs/touch.js b/src/files/touch.js
similarity index 100%
rename from src/files-mfs/touch.js
rename to src/files/touch.js
diff --git a/src/files-mfs/write.js b/src/files/write.js
similarity index 100%
rename from src/files-mfs/write.js
rename to src/files/write.js
diff --git a/src/files-regular/get.js b/src/get.js
similarity index 65%
rename from src/files-regular/get.js
rename to src/get.js
index 8f15d0a9..609359b6 100644
--- a/src/files-regular/get.js
+++ b/src/get.js
@@ -2,9 +2,10 @@
'use strict'
const { fixtures } = require('./utils')
-const bs58 = require('bs58')
const CID = require('cids')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
+const all = require('it-all')
+const concat = require('it-concat')
+const { getDescribe, getIt, expect } = require('./utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -22,62 +23,63 @@ module.exports = (common, options) => {
before(async () => {
ipfs = (await common.spawn()).api
- await ipfs.add(fixtures.smallFile.data)
- await ipfs.add(fixtures.bigFile.data)
+ await all(ipfs.add(fixtures.smallFile.data))
+ await all(ipfs.add(fixtures.bigFile.data))
})
after(() => common.clean())
it('should get with a base58 encoded multihash', async () => {
- const files = await ipfs.get(fixtures.smallFile.cid)
+ const files = await all(ipfs.get(fixtures.smallFile.cid))
expect(files).to.be.length(1)
expect(files[0].path).to.eql(fixtures.smallFile.cid)
- expect(files[0].content.toString('utf8')).to.contain('Plz add me!')
+ expect((await concat(files[0].content)).toString()).to.contain('Plz add me!')
})
it('should get with a Buffer multihash', async () => {
- const cidBuf = Buffer.from(bs58.decode(fixtures.smallFile.cid))
+ const cidBuf = new CID(fixtures.smallFile.cid).multihash
- const files = await ipfs.get(cidBuf)
+ const files = await all(ipfs.get(cidBuf))
expect(files).to.be.length(1)
expect(files[0].path).to.eql(fixtures.smallFile.cid)
- expect(files[0].content.toString('utf8')).to.contain('Plz add me!')
+ expect((await concat(files[0].content)).toString()).to.contain('Plz add me!')
})
it('should get a file added as CIDv0 with a CIDv1', async () => {
const input = Buffer.from(`TEST${Date.now()}`)
- const res = await ipfs.add(input, { cidVersion: 0 })
+ const res = await all(ipfs.add(input, { cidVersion: 0 }))
- const cidv0 = new CID(res[0].hash)
+ const cidv0 = res[0].cid
expect(cidv0.version).to.equal(0)
const cidv1 = cidv0.toV1()
- const output = await ipfs.get(cidv1)
- expect(output[0].content).to.eql(input)
+ const output = await all(ipfs.get(cidv1))
+ expect((await concat(output[0].content)).slice()).to.eql(input)
})
it('should get a file added as CIDv1 with a CIDv0', async () => {
const input = Buffer.from(`TEST${Date.now()}`)
- const res = await ipfs.add(input, { cidVersion: 1, rawLeaves: false })
+ const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false }))
- const cidv1 = new CID(res[0].hash)
+ const cidv1 = res[0].cid
expect(cidv1.version).to.equal(1)
const cidv0 = cidv1.toV0()
- const output = await ipfs.get(cidv0)
- expect(output[0].content).to.eql(input)
+ const output = await all(ipfs.get(cidv0))
+ expect((await concat(output[0].content)).slice()).to.eql(input)
})
it('should get a BIG file', async () => {
- const files = await ipfs.get(fixtures.bigFile.cid)
- expect(files.length).to.equal(1)
- expect(files[0].path).to.equal(fixtures.bigFile.cid)
- expect(files[0].content.length).to.eql(fixtures.bigFile.data.length)
- expect(files[0].content).to.eql(fixtures.bigFile.data)
+ for await (const file of ipfs.get(fixtures.bigFile.cid)) {
+ expect(file.path).to.equal(fixtures.bigFile.cid)
+ const content = await concat(file.content)
+ expect(content.length).to.eql(fixtures.bigFile.data.length)
+ expect(content.slice()).to.eql(fixtures.bigFile.data)
+ }
})
it('should get a directory', async function () {
@@ -99,13 +101,18 @@ module.exports = (common, options) => {
emptyDir('files/empty')
]
- const res = await ipfs.add(dirs)
+ const res = await all(ipfs.add(dirs))
const root = res[res.length - 1]
expect(root.path).to.equal('test-folder')
- expect(root.hash).to.equal(fixtures.directory.cid)
+ expect(root.cid.toString()).to.equal(fixtures.directory.cid)
- let files = await ipfs.get(fixtures.directory.cid)
+ let files = await all((async function * () {
+ for await (let { path, content } of ipfs.get(fixtures.directory.cid)) {
+ content = content ? (await concat(content)).toString() : null
+ yield { path, content }
+ }
+ })())
files = files.sort((a, b) => {
if (a.path > b.path) return 1
@@ -129,13 +136,7 @@ module.exports = (common, options) => {
])
// Check contents
- const contents = files.map((file) => {
- return file.content
- ? file.content.toString()
- : null
- })
-
- expect(contents).to.include.members([
+ expect(files.map(f => f.content)).to.include.members([
fixtures.directory.files['alice.txt'].toString(),
fixtures.directory.files['files/hello.txt'].toString(),
fixtures.directory.files['files/ipfs.txt'].toString(),
@@ -151,13 +152,13 @@ module.exports = (common, options) => {
content: fixtures.smallFile.data
}
- const filesAdded = await ipfs.add(file)
+ const filesAdded = await all(ipfs.add(file))
filesAdded.forEach(async (file) => {
if (file.path === 'a') {
- const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`)
+ const files = await all(ipfs.get(`/ipfs/${file.cid}/testfile.txt`))
expect(files).to.be.length(1)
- expect(files[0].content.toString('utf8')).to.contain('Plz add me!')
+ expect((await concat(files[0].content)).toString()).to.contain('Plz add me!')
}
})
})
@@ -168,13 +169,13 @@ module.exports = (common, options) => {
content: fixtures.smallFile.data
}
- const filesAdded = await ipfs.add([file])
+ const filesAdded = await all(ipfs.add([file]))
filesAdded.forEach(async (file) => {
if (file.path === 'a') {
- const files = await ipfs.get(`/ipfs/${file.hash}/testfile.txt`)
+ const files = await all(ipfs.get(`/ipfs/${file.cid}/testfile.txt`))
expect(files).to.be.length(1)
- expect(files[0].content.toString('utf8')).to.contain('Plz add me!')
+ expect((await concat(files[0].content)).toString()).to.contain('Plz add me!')
}
})
})
@@ -182,7 +183,7 @@ module.exports = (common, options) => {
it('should error on invalid key', async () => {
const invalidCid = 'somethingNotMultihash'
- const err = await expect(ipfs.get(invalidCid)).to.be.rejected()
+ const err = await expect(all(ipfs.get(invalidCid))).to.be.rejected()
switch (err.toString()) {
case 'Error: invalid ipfs ref path':
diff --git a/src/index.js b/src/index.js
index 2c353fae..3e8bd774 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,7 +1,17 @@
'use strict'
-exports.filesRegular = require('./files-regular')
-exports.filesMFS = require('./files-mfs')
+const { createSuite } = require('./utils/suite')
+
+exports.root = createSuite({
+ add: require('./add'),
+ cat: require('./cat'),
+ get: require('./get'),
+ ls: require('./ls'),
+ refs: require('./refs'),
+ refsLocal: require('./refs-local')
+})
+
+exports.files = require('./files')
exports.bitswap = require('./bitswap')
exports.block = require('./block')
diff --git a/src/key/list.js b/src/key/list.js
index 7cf88bef..792ec92a 100644
--- a/src/key/list.js
+++ b/src/key/list.js
@@ -1,7 +1,6 @@
/* eslint-env mocha */
'use strict'
-const pTimes = require('p-times')
const hat = require('hat')
const { getDescribe, getIt, expect } = require('../utils/mocha')
@@ -26,7 +25,7 @@ module.exports = (common, options) => {
it('should list all the keys', async function () {
this.timeout(60 * 1000)
- const keys = await pTimes(3, () => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 }), { concurrency: 1 })
+ const keys = await Promise.all([1, 2, 3].map(() => ipfs.key.gen(hat(), { type: 'rsa', size: 2048 })))
const res = await ipfs.key.list()
expect(res).to.exist()
diff --git a/src/ls.js b/src/ls.js
new file mode 100644
index 00000000..feebb44a
--- /dev/null
+++ b/src/ls.js
@@ -0,0 +1,198 @@
+/* eslint-env mocha */
+'use strict'
+
+const { fixtures } = require('./utils')
+const { getDescribe, getIt, expect } = require('./utils/mocha')
+const all = require('it-all')
+
+const randomName = prefix => `${prefix}${Math.round(Math.random() * 1000)}`
+
+/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
+/**
+ * @param {Factory} common
+ * @param {Object} options
+ */
+module.exports = (common, options) => {
+ const describe = getDescribe(options)
+ const it = getIt(options)
+
+ describe('.ls', function () {
+ this.timeout(40 * 1000)
+
+ let ipfs
+
+ before(async () => {
+ ipfs = (await common.spawn()).api
+ })
+
+ after(() => common.clean())
+
+ it('should ls with a base58 encoded CID', async function () {
+ const content = (name) => ({
+ path: `test-folder/${name}`,
+ content: fixtures.directory.files[name]
+ })
+
+ const emptyDir = (name) => ({ path: `test-folder/${name}` })
+
+ const dirs = [
+ content('pp.txt'),
+ content('holmes.txt'),
+ content('jungle.txt'),
+ content('alice.txt'),
+ emptyDir('empty-folder'),
+ content('files/hello.txt'),
+ content('files/ipfs.txt'),
+ emptyDir('files/empty')
+ ]
+
+ const res = await all(ipfs.add(dirs))
+
+ const root = res[res.length - 1]
+ expect(root.path).to.equal('test-folder')
+ expect(root.cid.toString()).to.equal(fixtures.directory.cid)
+
+ const cid = 'QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP'
+ const output = await all(ipfs.ls(cid))
+
+ expect(output).to.have.lengthOf(6)
+ expect(output[0].depth).to.equal(1)
+ expect(output[0].name).to.equal('alice.txt')
+ expect(output[0].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/alice.txt')
+ expect(output[0].size).to.equal(11685)
+ expect(output[0].cid.toString()).to.equal('QmZyUEQVuRK3XV7L9Dk26pg6RVSgaYkiSTEdnT2kZZdwoi')
+ expect(output[0].type).to.equal('file')
+
+ expect(output[1].depth).to.equal(1)
+ expect(output[1].name).to.equal('empty-folder')
+ expect(output[1].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/empty-folder')
+ expect(output[1].size).to.equal(0)
+ expect(output[1].cid.toString()).to.equal('QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn')
+ expect(output[1].type).to.equal('dir')
+
+ expect(output[2].depth).to.equal(1)
+ expect(output[2].name).to.equal('files')
+ expect(output[2].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/files')
+ expect(output[2].size).to.equal(0)
+ expect(output[2].cid.toString()).to.equal('QmZ25UfTqXGz9RsEJFg7HUAuBcmfx5dQZDXQd2QEZ8Kj74')
+ expect(output[2].type).to.equal('dir')
+
+ expect(output[3].depth).to.equal(1)
+ expect(output[3].name).to.equal('holmes.txt')
+ expect(output[3].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/holmes.txt')
+ expect(output[3].size).to.equal(581878)
+ expect(output[3].cid.toString()).to.equal('QmR4nFjTu18TyANgC65ArNWp5Yaab1gPzQ4D8zp7Kx3vhr')
+ expect(output[3].type).to.equal('file')
+
+ expect(output[4].depth).to.equal(1)
+ expect(output[4].name).to.equal('jungle.txt')
+ expect(output[4].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/jungle.txt')
+ expect(output[4].size).to.equal(2294)
+ expect(output[4].cid.toString()).to.equal('QmT6orWioMiSqXXPGsUi71CKRRUmJ8YkuueV2DPV34E9y9')
+ expect(output[4].type).to.equal('file')
+
+ expect(output[5].depth).to.equal(1)
+ expect(output[5].name).to.equal('pp.txt')
+ expect(output[5].path).to.equal('QmVvjDy7yF7hdnqE8Hrf4MHo5ABDtb5AbX6hWbD3Y42bXP/pp.txt')
+ expect(output[5].size).to.equal(4540)
+ expect(output[5].cid.toString()).to.equal('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
+ expect(output[5].type).to.equal('file')
+ })
+
+ it('should ls files added as CIDv0 with a CIDv1', async () => {
+ const dir = randomName('DIR')
+
+ const input = [
+ { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) },
+ { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) }
+ ]
+
+ const res = await all(ipfs.add(input, { cidVersion: 0 }))
+
+ const cidv0 = res[res.length - 1].cid
+ expect(cidv0.version).to.equal(0)
+
+ const cidv1 = cidv0.toV1()
+
+ const output = await all(ipfs.ls(cidv1))
+ expect(output.length).to.equal(input.length)
+
+ output.forEach(({ cid }) => {
+ expect(res.find(file => file.cid.toString() === cid.toString())).to.exist()
+ })
+ })
+
+ it('should ls files added as CIDv1 with a CIDv0', async () => {
+ const dir = randomName('DIR')
+
+ const input = [
+ { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) },
+ { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) }
+ ]
+
+ const res = await all(ipfs.add(input, { cidVersion: 1, rawLeaves: false }))
+
+ const cidv1 = res[res.length - 1].cid
+ expect(cidv1.version).to.equal(1)
+
+ const cidv0 = cidv1.toV1()
+
+ const output = await all(ipfs.ls(cidv0))
+ expect(output.length).to.equal(input.length)
+
+ output.forEach(({ cid }) => {
+ expect(res.find(file => file.cid.toString() === cid.toString())).to.exist()
+ })
+ })
+
+ it('should correctly handle a non existing hash', () => {
+ return expect(all(ipfs.ls('surelynotavalidhashheh?'))).to.eventually.be.rejected()
+ })
+
+ it('should correctly handle a non existing path', () => {
+ return expect(all(ipfs.ls('QmRNjDeKStKGTQXnJ2NFqeQ9oW/folder_that_isnt_there'))).to.eventually.be.rejected()
+ })
+
+ it('should ls files by path', async () => {
+ const dir = randomName('DIR')
+
+ const input = [
+ { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')) },
+ { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')) }
+ ]
+
+ const res = await all(ipfs.add(input))
+ const output = await all(ipfs.ls(`/ipfs/${res[res.length - 1].cid}`))
+ expect(output.length).to.equal(input.length)
+
+ output.forEach(({ cid }) => {
+ expect(res.find(file => file.cid.toString() === cid.toString())).to.exist()
+ })
+ })
+
+ it('should ls with metadata', async () => {
+ const dir = randomName('DIR')
+ const mtime = new Date()
+ const mode = '0532'
+ const expectedMode = parseInt(mode, 8)
+ const expectedMtime = {
+ secs: Math.floor(mtime.getTime() / 1000),
+ nsecs: (mtime.getTime() - (Math.floor(mtime.getTime() / 1000) * 1000)) * 1000
+ }
+
+ const input = [
+ { path: `${dir}/${randomName('F0')}`, content: Buffer.from(randomName('D0')), mode, mtime },
+ { path: `${dir}/${randomName('F1')}`, content: Buffer.from(randomName('D1')), mode, mtime }
+ ]
+
+ const res = await all(ipfs.add(input))
+ const output = await all(ipfs.ls(`/ipfs/${res[res.length - 1].cid}`))
+
+ expect(output).to.have.lengthOf(input.length)
+ expect(output[0].mtime).to.deep.equal(expectedMtime)
+ expect(output[0].mode).to.equal(expectedMode)
+ expect(output[1].mtime).to.deep.equal(expectedMtime)
+ expect(output[1].mode).to.equal(expectedMode)
+ })
+ })
+}
diff --git a/src/miscellaneous/resolve.js b/src/miscellaneous/resolve.js
index ccf4851b..8674425f 100644
--- a/src/miscellaneous/resolve.js
+++ b/src/miscellaneous/resolve.js
@@ -6,6 +6,7 @@ const loadFixture = require('aegir/fixtures')
const hat = require('hat')
const multibase = require('multibase')
const { getDescribe, getIt, expect } = require('../utils/mocha')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -29,27 +30,27 @@ module.exports = (common, options) => {
it('should resolve an IPFS hash', async () => {
const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core')
- const [{ hash }] = await ipfs.add(content)
- const path = await ipfs.resolve(`/ipfs/${hash}`)
- expect(path).to.equal(`/ipfs/${hash}`)
+ const [{ cid }] = await all(ipfs.add(content))
+ const path = await ipfs.resolve(`/ipfs/${cid}`)
+ expect(path).to.equal(`/ipfs/${cid}`)
})
it('should resolve an IPFS hash and return a base64url encoded CID in path', async () => {
- const [{ hash }] = await ipfs.add(Buffer.from('base64url encoded'))
- const path = await ipfs.resolve(`/ipfs/${hash}`, { cidBase: 'base64url' })
- const [,, cid] = path.split('/')
+ const [{ cid }] = await all(ipfs.add(Buffer.from('base64url encoded')))
+ const path = await ipfs.resolve(`/ipfs/${cid}`, { cidBase: 'base64url' })
+ const [,, cidStr] = path.split('/')
- expect(multibase.isEncoded(cid)).to.equal('base64url')
+ expect(multibase.isEncoded(cidStr)).to.equal('base64url')
})
// Test resolve turns /ipfs/QmRootHash/path/to/file into /ipfs/QmFileHash
it('should resolve an IPFS path link', async () => {
const path = 'path/to/testfile.txt'
const content = loadFixture('test/fixtures/testfile.txt', 'interface-ipfs-core')
- const [{ hash: fileHash }, , , { hash: rootHash }] = await ipfs.add([{ path, content }], { wrapWithDirectory: true })
- const resolve = await ipfs.resolve(`/ipfs/${rootHash}/${path}`)
+ const [{ cid: fileCid }, , , { cid: rootCid }] = await all(ipfs.add([{ path, content }], { wrapWithDirectory: true }))
+ const resolve = await ipfs.resolve(`/ipfs/${rootCid}/${path}`)
- expect(resolve).to.equal(`/ipfs/${fileHash}`)
+ expect(resolve).to.equal(`/ipfs/${fileCid}`)
})
it('should resolve up to the last node', async () => {
@@ -83,7 +84,7 @@ module.exports = (common, options) => {
this.timeout(20 * 1000)
const node = (await common.spawn({ type: 'go' })).api
await ipfs.swarm.connect(node.peerId.addresses[0])
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === true'))
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive === true')))
const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 })
await ipfs.name.publish(path, { allowOffline: true })
diff --git a/src/miscellaneous/stop.js b/src/miscellaneous/stop.js
index b6f3bd3b..c8dc16d4 100644
--- a/src/miscellaneous/stop.js
+++ b/src/miscellaneous/stop.js
@@ -18,10 +18,14 @@ module.exports = (common, options) => {
it('should stop the node', async () => {
const ipfs = await common.spawn()
+ // Should succeed because node is started
+ await ipfs.api.swarm.peers()
+
+ // Stop the node and try the call again
await ipfs.stop()
- // Trying to stop an already stopped node should return an error
- // as the node can't respond to requests anymore
- return expect(ipfs.api.stop()).to.eventually.be.rejected()
+
+ // Trying to use an API that requires a started node should return an error
+ return expect(ipfs.api.swarm.peers()).to.eventually.be.rejected()
})
})
}
diff --git a/src/name-pubsub/cancel.js b/src/name-pubsub/cancel.js
index 30cf5e81..28d13f85 100644
--- a/src/name-pubsub/cancel.js
+++ b/src/name-pubsub/cancel.js
@@ -2,8 +2,7 @@
'use strict'
const PeerId = require('peer-id')
-const { promisify } = require('es6-promisify')
-
+const all = require('it-all')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -38,14 +37,14 @@ module.exports = (common, options) => {
it('should cancel a subscription correctly returning true', async function () {
this.timeout(300 * 1000)
- const peerId = await promisify(PeerId.create.bind(PeerId))({ bits: 512 })
+ const peerId = await PeerId.create({ bits: 512 })
const id = peerId.toB58String()
const ipnsPath = `/ipns/${id}`
const subs = await ipfs.name.pubsub.subs()
expect(subs).to.be.an('array').that.does.not.include(ipnsPath)
- await expect(ipfs.name.resolve(id)).to.be.rejected()
+ await expect(all(ipfs.name.resolve(id))).to.be.rejected()
const subs1 = await ipfs.name.pubsub.subs()
const cancel = await ipfs.name.pubsub.cancel(ipnsPath)
diff --git a/src/name-pubsub/subs.js b/src/name-pubsub/subs.js
index 9b4764ff..87edcfe7 100644
--- a/src/name-pubsub/subs.js
+++ b/src/name-pubsub/subs.js
@@ -1,6 +1,7 @@
/* eslint-env mocha */
'use strict'
+const all = require('it-all')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -36,7 +37,7 @@ module.exports = (common, options) => {
const subs = await ipfs.name.pubsub.subs()
expect(subs).to.eql([]) // initally empty
- await expect(ipfs.name.resolve(id)).to.be.rejected()
+ await expect(all(ipfs.name.resolve(id))).to.be.rejected()
const res = await ipfs.name.pubsub.subs()
expect(res).to.be.an('array').that.does.include(`/ipns/${id}`)
diff --git a/src/name/publish.js b/src/name/publish.js
index c40abf3a..51adc515 100644
--- a/src/name/publish.js
+++ b/src/name/publish.js
@@ -5,6 +5,8 @@ const hat = require('hat')
const { fixture } = require('./utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
+const all = require('it-all')
+const last = require('it-last')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -23,7 +25,7 @@ module.exports = (common, options) => {
before(async () => {
ipfs = (await common.spawn()).api
nodeId = ipfs.peerId.id
- await ipfs.add(fixture.data, { pin: false })
+ await all(ipfs.add(fixture.data, { pin: false }))
})
after(() => common.clean())
@@ -40,10 +42,9 @@ module.exports = (common, options) => {
})
it('should publish correctly with the lifetime option and resolve', async () => {
- const [{ path }] = await ipfs.add(Buffer.from('should publish correctly with the lifetime option and resolve'))
+ const [{ path }] = await all(ipfs.add(Buffer.from('should publish correctly with the lifetime option and resolve')))
await ipfs.name.publish(path, { allowOffline: true, resolve: false, lifetime: '2h' })
-
- return expect(await ipfs.name.resolve(`/ipns/${nodeId}`)).to.eq(`/ipfs/${path}`)
+ expect(await last(ipfs.name.resolve(`/ipns/${nodeId}`))).to.eq(`/ipfs/${path}`)
})
it('should publish correctly when the file was not added but resolve is disabled', async function () {
@@ -78,8 +79,8 @@ module.exports = (common, options) => {
}
const key = await ipfs.key.gen(keyName, { type: 'rsa', size: 2048 })
-
const res = await ipfs.name.publish(value, options)
+
expect(res).to.exist()
expect(res.name).to.equal(key.id)
expect(res.value).to.equal(`/ipfs/${value}`)
diff --git a/src/name/resolve.js b/src/name/resolve.js
index d83d6845..334e6e32 100644
--- a/src/name/resolve.js
+++ b/src/name/resolve.js
@@ -4,6 +4,8 @@
const { getDescribe, getIt, expect } = require('../utils/mocha')
const delay = require('delay')
const CID = require('cids')
+const all = require('it-all')
+const last = require('it-last')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -28,84 +30,80 @@ module.exports = (common, options) => {
it('should resolve a record default options', async function () {
this.timeout(20 * 1000)
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record default options'))
-
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record default options')))
const { id: keyId } = await ipfs.key.gen('key-name-default', { type: 'rsa', size: 2048 })
await ipfs.name.publish(path, { allowOffline: true })
await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name-default' })
- return expect(await ipfs.name.resolve(`/ipns/${keyId}`))
+ expect(await last(ipfs.name.resolve(`/ipns/${keyId}`)))
.to.eq(`/ipfs/${path}`)
})
it('should resolve a record from peerid as cidv1 in base32', async function () {
this.timeout(20 * 1000)
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record from cidv1b32'))
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record from cidv1b32')))
const { id: peerId } = await ipfs.id()
await ipfs.name.publish(path, { allowOffline: true })
// Represent Peer ID as CIDv1 Base32
// https://github.com/libp2p/specs/blob/master/RFC/0001-text-peerid-cid.md
const keyCid = new CID(peerId).toV1().toString('base32')
- const resolvedPath = await ipfs.name.resolve(`/ipns/${keyCid}`)
+ const resolvedPath = await last(ipfs.name.resolve(`/ipns/${keyCid}`))
- return expect(resolvedPath).to.equal(`/ipfs/${path}`)
+ expect(resolvedPath).to.equal(`/ipfs/${path}`)
})
it('should resolve a record recursive === false', async () => {
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === false'))
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive === false')))
await ipfs.name.publish(path, { allowOffline: true })
- return expect(await ipfs.name.resolve(`/ipns/${nodeId}`, { recursive: false }))
+ expect(await last(ipfs.name.resolve(`/ipns/${nodeId}`, { recursive: false })))
.to.eq(`/ipfs/${path}`)
})
it('should resolve a record recursive === true', async function () {
this.timeout(20 * 1000)
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive === true'))
-
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive === true')))
const { id: keyId } = await ipfs.key.gen('key-name', { type: 'rsa', size: 2048 })
await ipfs.name.publish(path, { allowOffline: true })
await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name' })
- return expect(await ipfs.name.resolve(`/ipns/${keyId}`, { recursive: true }))
+ expect(await last(ipfs.name.resolve(`/ipns/${keyId}`, { recursive: true })))
.to.eq(`/ipfs/${path}`)
})
it('should resolve a record default options with remainder', async function () {
this.timeout(20 * 1000)
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record default options with remainder'))
-
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record default options with remainder')))
const { id: keyId } = await ipfs.key.gen('key-name-remainder-default', { type: 'rsa', size: 2048 })
await ipfs.name.publish(path, { allowOffline: true })
await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name-remainder-default' })
- return expect(await ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`))
+ expect(await last(ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`)))
.to.eq(`/ipfs/${path}/remainder/file.txt`)
})
it('should resolve a record recursive === false with remainder', async () => {
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive = false with remainder'))
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive = false with remainder')))
await ipfs.name.publish(path, { allowOffline: true })
- return expect(await ipfs.name.resolve(`/ipns/${nodeId}/remainder/file.txt`, { recursive: false }))
+ expect(await last(ipfs.name.resolve(`/ipns/${nodeId}/remainder/file.txt`, { recursive: false })))
.to.eq(`/ipfs/${path}/remainder/file.txt`)
})
it('should resolve a record recursive === true with remainder', async function () {
this.timeout(20 * 1000)
- const [{ path }] = await ipfs.add(Buffer.from('should resolve a record recursive = true with remainder'))
-
+ const [{ path }] = await all(ipfs.add(Buffer.from('should resolve a record recursive = true with remainder')))
const { id: keyId } = await ipfs.key.gen('key-name-remainder', { type: 'rsa', size: 2048 })
await ipfs.name.publish(path, { allowOffline: true })
await ipfs.name.publish(`/ipns/${nodeId}`, { allowOffline: true, key: 'key-name-remainder' })
- return expect(await ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`, { recursive: true }))
+ expect(await last(ipfs.name.resolve(`/ipns/${keyId}/remainder/file.txt`, { recursive: true })))
.to.eq(`/ipfs/${path}/remainder/file.txt`)
})
@@ -117,13 +115,13 @@ module.exports = (common, options) => {
}
// we add new data instead of re-using fixture to make sure lifetime handling doesn't break
- const [{ path }] = await ipfs.add(Buffer.from('should not get the entry if its validity time expired'))
+ const [{ path }] = await all(ipfs.add(Buffer.from('should not get the entry if its validity time expired')))
await ipfs.name.publish(path, publishOptions)
await delay(500)
// go only has 1 possible error https://github.com/ipfs/go-ipfs/blob/master/namesys/interface.go#L51
// so here we just expect an Error and don't match the error type to expiration
try {
- await ipfs.name.resolve(nodeId)
+ await last(ipfs.name.resolve(nodeId))
} catch (error) {
expect(error).to.exist()
}
@@ -141,45 +139,45 @@ module.exports = (common, options) => {
after(() => common.clean())
it('should resolve /ipns/ipfs.io', async () => {
- return expect(await ipfs.name.resolve('/ipns/ipfs.io'))
+ expect(await last(ipfs.name.resolve('/ipns/ipfs.io')))
.to.match(/\/ipfs\/.+$/)
})
it('should resolve /ipns/ipfs.io recursive === false', async () => {
- return expect(await ipfs.name.resolve('/ipns/ipfs.io', { recursive: false }))
+ expect(await last(ipfs.name.resolve('/ipns/ipfs.io', { recursive: false })))
.to.match(/\/ipns\/.+$/)
})
it('should resolve /ipns/ipfs.io recursive === true', async () => {
- return expect(await ipfs.name.resolve('/ipns/ipfs.io', { recursive: true }))
+ expect(await last(ipfs.name.resolve('/ipns/ipfs.io', { recursive: true })))
.to.match(/\/ipfs\/.+$/)
})
it('should resolve /ipns/ipfs.io with remainder', async () => {
- return expect(await ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg'))
+ expect(await last(ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg')))
.to.match(/\/ipfs\/.+\/images\/ipfs-logo.svg$/)
})
it('should resolve /ipns/ipfs.io with remainder recursive === false', async () => {
- return expect(await ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: false }))
+ expect(await last(ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: false })))
.to.match(/\/ipns\/.+\/images\/ipfs-logo.svg$/)
})
- it('should resolve /ipns/ipfs.io with remainder recursive === true', async () => {
- return expect(await ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: true }))
+ it('should resolve /ipns/ipfs.io with remainder recursive === true', async () => {
+ expect(await last(ipfs.name.resolve('/ipns/ipfs.io/images/ipfs-logo.svg', { recursive: true })))
.to.match(/\/ipfs\/.+\/images\/ipfs-logo.svg$/)
})
it('should fail to resolve /ipns/ipfs.a', async () => {
try {
- await ipfs.name.resolve('ipfs.a')
+ await last(ipfs.name.resolve('ipfs.a'))
} catch (error) {
expect(error).to.exist()
}
})
it('should resolve ipns path with hamt-shard recursive === true', async () => {
- return expect(await ipfs.name.resolve('/ipns/tr.wikipedia-on-ipfs.org/wiki/Anasayfa.html', { recursive: true }))
+ expect(await last(ipfs.name.resolve('/ipns/tr.wikipedia-on-ipfs.org/wiki/Anasayfa.html', { recursive: true })))
.to.match(/\/ipfs\/.+$/)
})
})
diff --git a/src/object/data.js b/src/object/data.js
index 4224b36d..50ca22b0 100644
--- a/src/object/data.js
+++ b/src/object/data.js
@@ -1,7 +1,6 @@
/* eslint-env mocha */
'use strict'
-const bs58 = require('bs58')
const hat = require('hat')
const { getDescribe, getIt, expect } = require('../utils/mocha')
@@ -33,29 +32,7 @@ module.exports = (common, options) => {
const nodeCid = await ipfs.object.put(testObj)
- let data = await ipfs.object.data(nodeCid)
- // because js-ipfs-api can't infer
- // if the returned Data is Buffer or String
- if (typeof data === 'string') {
- data = Buffer.from(data)
- }
- expect(testObj.Data).to.deep.equal(data)
- })
-
- it('should get data by base58 encoded multihash', async () => {
- const testObj = {
- Data: Buffer.from(hat()),
- Links: []
- }
-
- const nodeCid = await ipfs.object.put(testObj)
-
- let data = await ipfs.object.data(bs58.encode(nodeCid.buffer), { enc: 'base58' })
- // because js-ipfs-api can't infer
- // if the returned Data is Buffer or String
- if (typeof data === 'string') {
- data = Buffer.from(data)
- }
+ const data = await ipfs.object.data(nodeCid)
expect(testObj.Data).to.deep.equal(data)
})
@@ -67,12 +44,7 @@ module.exports = (common, options) => {
const nodeCid = await ipfs.object.put(testObj)
- let data = await ipfs.object.data(bs58.encode(nodeCid.buffer).toString(), { enc: 'base58' })
- // because js-ipfs-api can't infer if the
- // returned Data is Buffer or String
- if (typeof data === 'string') {
- data = Buffer.from(data)
- }
+ const data = await ipfs.object.data(nodeCid.toV0().toString(), { enc: 'base58' })
expect(testObj.Data).to.eql(data)
})
diff --git a/src/object/get.js b/src/object/get.js
index 5e7ecdf4..e0beb72f 100644
--- a/src/object/get.js
+++ b/src/object/get.js
@@ -8,6 +8,7 @@ const { getDescribe, getIt, expect } = require('../utils/mocha')
const UnixFs = require('ipfs-unixfs')
const crypto = require('crypto')
const { asDAGLink } = require('./utils')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -132,12 +133,12 @@ module.exports = (common, options) => {
// has to be big enough to span several DAGNodes
const data = crypto.randomBytes(1024 * 3000)
- const result = await ipfs.add({
+ const result = await all(ipfs.add({
path: '',
content: data
- })
+ }))
- const node = await ipfs.object.get(result[0].hash)
+ const node = await ipfs.object.get(result[0].cid)
const meta = UnixFs.unmarshal(node.Data)
expect(meta.fileSize()).to.equal(data.length)
diff --git a/src/object/links.js b/src/object/links.js
index a3b3c45e..61ccd72e 100644
--- a/src/object/links.js
+++ b/src/object/links.js
@@ -6,7 +6,7 @@ const DAGNode = dagPB.DAGNode
const hat = require('hat')
const { getDescribe, getIt, expect } = require('../utils/mocha')
const { asDAGLink } = require('./utils')
-const CID = require('cids')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -87,17 +87,17 @@ module.exports = (common, options) => {
it('should get links from CBOR object', async () => {
const hashes = []
- const res1 = await ipfs.add(Buffer.from('test data'))
- hashes.push(res1[0].hash)
+ const res1 = await all(ipfs.add(Buffer.from('test data')))
+ hashes.push(res1[0].cid)
- const res2 = await ipfs.add(Buffer.from('more test data'))
- hashes.push(res2[0].hash)
+ const res2 = await all(ipfs.add(Buffer.from('more test data')))
+ hashes.push(res2[0].cid)
const obj = {
some: 'data',
- mylink: new CID(hashes[0]),
+ mylink: hashes[0],
myobj: {
- anotherLink: new CID(hashes[1])
+ anotherLink: hashes[1]
}
}
const cid = await ipfs.dag.put(obj)
@@ -111,8 +111,8 @@ module.exports = (common, options) => {
// expect(names).includes('myobj/anotherLink')
const cids = [links[0].Hash.toString(), links[1].Hash.toString()]
- expect(cids).includes(hashes[0])
- expect(cids).includes(hashes[1])
+ expect(cids).includes(hashes[0].toString())
+ expect(cids).includes(hashes[1].toString())
})
it('returns error for request without argument', () => {
diff --git a/src/object/stat.js b/src/object/stat.js
index 25c60d27..b49ba3af 100644
--- a/src/object/stat.js
+++ b/src/object/stat.js
@@ -60,7 +60,12 @@ module.exports = (common, options) => {
const err = await expect(ipfs.object.stat(badCid, { timeout: `${timeout}s` })).to.be.rejected()
const timeForRequest = (new Date() - startTime) / 1000
- expect(err).to.have.property('message', 'failed to get block for QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ: context deadline exceeded')
+ if (err.code) {
+ expect(err.code).to.equal('ERR_TIMEOUT')
+ } else {
+ expect(err.message).to.equal('failed to get block for QmNggDXca24S6cMPEYHZjeuc4QRmofkRrAEqVL3MzzzzzZ: context deadline exceeded')
+ }
+
expect(timeForRequest).to.not.lessThan(timeout)
expect(timeForRequest).to.not.greaterThan(timeout + 1)
})
diff --git a/src/object/utils.js b/src/object/utils.js
index f426dfd9..e16547e0 100644
--- a/src/object/utils.js
+++ b/src/object/utils.js
@@ -1,28 +1,15 @@
'use strict'
-const { promisify } = require('es6-promisify')
const dagPB = require('ipld-dag-pb')
-const { DAGNode, DAGLink } = dagPB
-const calculateCid = (node) => dagPB.util.cid(node.serialize(), { cidVersion: 0 })
-
-const createDAGNode = promisify((data, links, cb) => {
- cb(null, new DAGNode(data, links))
-})
-
-const addLinkToDAGNode = promisify((parent, link, cb) => {
- cb(null, new DAGNode(parent.Data, parent.Links.concat(link)))
-})
+const calculateCid = node => dagPB.util.cid(node.serialize(), { cidVersion: 0 })
const asDAGLink = async (node, name = '') => {
const cid = await calculateCid(node)
-
- return new DAGLink(name, node.size, cid)
+ return new dagPB.DAGLink(name, node.size, cid)
}
module.exports = {
calculateCid,
- createDAGNode,
- addLinkToDAGNode,
asDAGLink
}
diff --git a/src/pin/add.js b/src/pin/add.js
index 1f2e9417..5e4cb2f3 100644
--- a/src/pin/add.js
+++ b/src/pin/add.js
@@ -3,6 +3,7 @@
const { fixtures } = require('./utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -20,7 +21,7 @@ module.exports = (common, options) => {
before(async () => {
ipfs = (await common.spawn()).api
await Promise.all(fixtures.files.map(file => {
- return ipfs.add(file.data, { pin: false })
+ return all(ipfs.add(file.data, { pin: false }))
}))
})
@@ -28,9 +29,7 @@ module.exports = (common, options) => {
it('should add a pin', async () => {
const pinset = await ipfs.pin.add(fixtures.files[0].cid, { recursive: false })
- expect(pinset).to.deep.include({
- hash: fixtures.files[0].cid
- })
+ expect(pinset.map(p => p.cid.toString())).to.include(fixtures.files[0].cid)
})
})
}
diff --git a/src/pin/ls.js b/src/pin/ls.js
index 780c3e3d..ffdfe798 100644
--- a/src/pin/ls.js
+++ b/src/pin/ls.js
@@ -3,6 +3,7 @@
const { fixtures } = require('./utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -22,13 +23,13 @@ module.exports = (common, options) => {
ipfs = (await common.spawn()).api
// two files wrapped in directories, only root CID pinned recursively
const dir = fixtures.directory.files.map((file) => ({ path: file.path, content: file.data }))
- await ipfs.add(dir, { pin: false, cidVersion: 0 })
+ await all(ipfs.add(dir, { pin: false, cidVersion: 0 }))
await ipfs.pin.add(fixtures.directory.cid, { recursive: true })
// a file (CID pinned recursively)
- await ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 })
+ await all(ipfs.add(fixtures.files[0].data, { pin: false, cidVersion: 0 }))
await ipfs.pin.add(fixtures.files[0].cid, { recursive: true })
// a single CID (pinned directly)
- await ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 })
+ await all(ipfs.add(fixtures.files[1].data, { pin: false, cidVersion: 0 }))
await ipfs.pin.add(fixtures.files[1].cid, { recursive: false })
})
@@ -36,118 +37,119 @@ module.exports = (common, options) => {
// 1st, because ipfs.add pins automatically
it('should list all recursive pins', async () => {
- const pinset = await ipfs.pin.ls({ type: 'recursive' })
+ const pinset = (await all(ipfs.pin.ls({ type: 'recursive' })))
+ .map(p => ({ ...p, cid: p.cid.toString() }))
+
expect(pinset).to.deep.include({
type: 'recursive',
- hash: fixtures.files[0].cid
+ cid: fixtures.files[0].cid
})
expect(pinset).to.deep.include({
type: 'recursive',
- hash: fixtures.directory.cid
+ cid: fixtures.directory.cid
})
})
it('should list all indirect pins', async () => {
- const pinset = await ipfs.pin.ls({ type: 'indirect' })
+ const pinset = (await all(ipfs.pin.ls({ type: 'indirect' })))
+ .map(p => ({ ...p, cid: p.cid.toString() }))
+
expect(pinset).to.not.deep.include({
type: 'recursive',
- hash: fixtures.files[0].cid
+ cid: fixtures.files[0].cid
})
expect(pinset).to.not.deep.include({
type: 'direct',
- hash: fixtures.files[1].cid
+ cid: fixtures.files[1].cid
})
expect(pinset).to.not.deep.include({
type: 'recursive',
- hash: fixtures.directory.cid
+ cid: fixtures.directory.cid
})
expect(pinset).to.deep.include({
type: 'indirect',
- hash: fixtures.directory.files[0].cid
+ cid: fixtures.directory.files[0].cid
})
expect(pinset).to.deep.include({
type: 'indirect',
- hash: fixtures.directory.files[1].cid
+ cid: fixtures.directory.files[1].cid
})
})
it('should list all types of pins', async () => {
- const pinset = await ipfs.pin.ls()
+ const pinset = (await all(ipfs.pin.ls()))
+ .map(p => ({ ...p, cid: p.cid.toString() }))
+
expect(pinset).to.not.be.empty()
// check the three "roots"
expect(pinset).to.deep.include({
type: 'recursive',
- hash: fixtures.directory.cid
+ cid: fixtures.directory.cid
})
expect(pinset).to.deep.include({
type: 'recursive',
- hash: fixtures.files[0].cid
+ cid: fixtures.files[0].cid
})
expect(pinset).to.deep.include({
type: 'direct',
- hash: fixtures.files[1].cid
+ cid: fixtures.files[1].cid
})
expect(pinset).to.deep.include({
type: 'indirect',
- hash: fixtures.directory.files[0].cid
+ cid: fixtures.directory.files[0].cid
})
expect(pinset).to.deep.include({
type: 'indirect',
- hash: fixtures.directory.files[1].cid
+ cid: fixtures.directory.files[1].cid
})
})
it('should list all direct pins', async () => {
- const pinset = await ipfs.pin.ls({ type: 'direct' })
+ const pinset = await all(ipfs.pin.ls({ type: 'direct' }))
expect(pinset).to.have.lengthOf(1)
- expect(pinset).to.deep.include({
- type: 'direct',
- hash: fixtures.files[1].cid
- })
+ expect(pinset[0].type).to.equal('direct')
+ expect(pinset[0].cid.toString()).to.equal(fixtures.files[1].cid)
})
it('should list pins for a specific hash', async () => {
- const pinset = await ipfs.pin.ls(fixtures.files[0].cid)
- expect(pinset).to.deep.equal([{
- type: 'recursive',
- hash: fixtures.files[0].cid
- }])
+ const pinset = await all(ipfs.pin.ls(fixtures.files[0].cid))
+ expect(pinset).to.have.lengthOf(1)
+ expect(pinset[0].type).to.equal('recursive')
+ expect(pinset[0].cid.toString()).to.equal(fixtures.files[0].cid)
})
it('should throw an error on missing direct pins for existing path', () => {
// ipfs.txt is an indirect pin, so lookup for direct one should throw an error
- return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' }))
+ return expect(all(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'direct' })))
.to.eventually.be.rejected
.and.be.an.instanceOf(Error)
.and.to.have.property('message', `path '/ipfs/${fixtures.directory.cid}/files/ipfs.txt' is not pinned`)
})
it('should throw an error on missing link for a specific path', () => {
- return expect(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' }))
+ return expect(all(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/I-DONT-EXIST.txt`, { type: 'direct' })))
.to.eventually.be.rejected
.and.be.an.instanceOf(Error)
.and.to.have.property('message', `no link named "I-DONT-EXIST.txt" under ${fixtures.directory.cid}`)
})
it('should list indirect pins for a specific path', async () => {
- const pinset = await ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' })
- expect(pinset).to.deep.include({
- type: `indirect through ${fixtures.directory.cid}`,
- hash: fixtures.directory.files[1].cid
- })
+ const pinset = await all(ipfs.pin.ls(`/ipfs/${fixtures.directory.cid}/files/ipfs.txt`, { type: 'indirect' }))
+ expect(pinset).to.have.lengthOf(1)
+ expect(pinset[0].type).to.equal(`indirect through ${fixtures.directory.cid}`)
+ expect(pinset[0].cid.toString()).to.equal(fixtures.directory.files[1].cid)
})
it('should list recursive pins for a specific hash', async () => {
- const pinset = await ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' })
- expect(pinset).to.deep.equal([{
- type: 'recursive',
- hash: fixtures.files[0].cid
- }])
+ const pinset = await all(ipfs.pin.ls(fixtures.files[0].cid, { type: 'recursive' }))
+ expect(pinset).to.have.lengthOf(1)
+ expect(pinset[0].type).to.equal('recursive')
+ expect(pinset[0].cid.toString()).to.equal(fixtures.files[0].cid)
})
it('should list pins for multiple CIDs', async () => {
- const pinset = await ipfs.pin.ls([fixtures.files[0].cid, fixtures.files[1].cid])
- const cids = pinset.map(({ hash }) => hash)
+ const pinset = await all(ipfs.pin.ls([fixtures.files[0].cid, fixtures.files[1].cid]))
+ const cids = pinset.map(p => p.cid.toString())
expect(cids).to.include(fixtures.files[0].cid)
expect(cids).to.include(fixtures.files[1].cid)
})
diff --git a/src/pin/rm.js b/src/pin/rm.js
index 28ba073c..75653816 100644
--- a/src/pin/rm.js
+++ b/src/pin/rm.js
@@ -3,6 +3,7 @@
const { fixtures } = require('./utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -19,9 +20,9 @@ module.exports = (common, options) => {
let ipfs
before(async () => {
ipfs = (await common.spawn()).api
- await ipfs.add(fixtures.files[0].data, { pin: false })
+ await all(ipfs.add(fixtures.files[0].data, { pin: false }))
await ipfs.pin.add(fixtures.files[0].cid, { recursive: true })
- await ipfs.add(fixtures.files[1].data, { pin: false })
+ await all(ipfs.add(fixtures.files[1].data, { pin: false }))
await ipfs.pin.add(fixtures.files[1].cid, { recursive: false })
})
@@ -29,27 +30,21 @@ module.exports = (common, options) => {
it('should remove a recursive pin', async () => {
const removedPinset = await ipfs.pin.rm(fixtures.files[0].cid, { recursive: true })
- expect(removedPinset).to.deep.equal([{
- hash: fixtures.files[0].cid
- }])
+ expect(removedPinset.map(p => p.cid.toString())).to.deep.equal([fixtures.files[0].cid])
- const pinset = await ipfs.pin.ls({ type: 'recursive' })
- expect(pinset).to.not.deep.include({
- hash: fixtures.files[0].cid,
+ const pinset = await all(ipfs.pin.ls({ type: 'recursive' }))
+ expect(pinset.map(p => ({ ...p, cid: p.cid.toString() }))).to.not.deep.include({
+ cid: fixtures.files[0].cid,
type: 'recursive'
})
})
it('should remove a direct pin', async () => {
const removedPinset = await ipfs.pin.rm(fixtures.files[1].cid, { recursive: false })
- expect(removedPinset).to.deep.equal([{
- hash: fixtures.files[1].cid
- }])
+ expect(removedPinset.map(p => p.cid.toString())).to.deep.equal([fixtures.files[1].cid])
- const pinset = await ipfs.pin.ls({ type: 'direct' })
- expect(pinset).to.not.deep.include({
- hash: fixtures.files[1].cid
- })
+ const pinset = await all(ipfs.pin.ls({ type: 'direct' }))
+ expect(pinset.map(p => p.cid.toString())).to.not.include(fixtures.files[1].cid)
})
})
}
diff --git a/src/ping/index.js b/src/ping/index.js
index a33bbddc..1994eced 100644
--- a/src/ping/index.js
+++ b/src/ping/index.js
@@ -2,9 +2,7 @@
const { createSuite } = require('../utils/suite')
const tests = {
- ping: require('./ping'),
- pingPullStream: require('./ping-pull-stream'),
- pingReadableStream: require('./ping-readable-stream')
+ ping: require('./ping')
}
module.exports = createSuite(tests)
diff --git a/src/ping/ping-pull-stream.js b/src/ping/ping-pull-stream.js
deleted file mode 100644
index 23c48077..00000000
--- a/src/ping/ping-pull-stream.js
+++ /dev/null
@@ -1,65 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const pullToPromise = require('pull-to-promise')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const { isPong } = require('./utils.js')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.pingPullStream', function () {
- this.timeout(60 * 1000)
-
- let ipfsA
- let ipfsB
-
- before(async () => {
- ipfsA = (await common.spawn()).api
- ipfsB = (await common.spawn({ type: 'js' })).api
- await ipfsA.swarm.connect(ipfsB.peerId.addresses[0])
- })
-
- after(() => common.clean())
-
- it('should send the specified number of packets over pull stream', async () => {
- const count = 3
-
- const results = await pullToPromise.any(ipfsA.pingPullStream(ipfsB.peerId.id, { count }))
-
- const packetNum = results.reduce((acc, result) => {
- expect(result.success).to.be.true()
-
- if (isPong(result)) {
- acc++
- }
-
- return acc
- }, 0)
-
- expect(packetNum).to.equal(count)
- })
-
- it('should fail when pinging an unknown peer over pull stream', () => {
- const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn'
- const count = 2
-
- return expect(pullToPromise.any(ipfsA.pingPullStream(unknownPeerId, { count })))
- .to.eventually.be.rejected()
- })
-
- it('should fail when pinging an invalid peer id over pull stream', () => {
- const invalidPeerId = 'not a peer ID'
- const count = 2
-
- return expect(pullToPromise.any(ipfsA.pingPullStream(invalidPeerId, { count })))
- .to.eventually.be.rejected()
- })
- })
-}
diff --git a/src/ping/ping-readable-stream.js b/src/ping/ping-readable-stream.js
deleted file mode 100644
index 75b3ff84..00000000
--- a/src/ping/ping-readable-stream.js
+++ /dev/null
@@ -1,96 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const pump = require('pump')
-const { Writable } = require('stream')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
-const { isPong } = require('./utils.js')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.pingReadableStream', function () {
- this.timeout(60 * 1000)
-
- let ipfsA
- let ipfsB
-
- before(async () => {
- ipfsA = (await common.spawn()).api
- ipfsB = (await common.spawn({ type: 'js' })).api
- await ipfsA.swarm.connect(ipfsB.peerId.addresses[0])
- })
-
- after(() => common.clean())
-
- it('should send the specified number of packets over readable stream', () => {
- let packetNum = 0
- const count = 3
-
- return new Promise((resolve, reject) => {
- pump(
- ipfsA.pingReadableStream(ipfsB.peerId.id, { count }),
- new Writable({
- objectMode: true,
- write (res, enc, cb) {
- expect(res.success).to.be.true()
- // It's a pong
- if (isPong(res)) {
- packetNum++
- }
-
- cb()
- }
- }),
- (err) => {
- expect(err).to.not.exist()
- expect(packetNum).to.equal(count)
- resolve()
- }
- )
- })
- })
-
- it('should fail when pinging peer that is not available over readable stream', () => {
- const unknownPeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn'
-
- return new Promise((resolve, reject) => {
- pump(
- ipfsA.pingReadableStream(unknownPeerId, {}),
- new Writable({
- objectMode: true,
- write: (res, enc, cb) => cb()
- }),
- (err) => {
- expect(err).to.exist()
- resolve()
- }
- )
- })
- })
-
- it('should fail when pinging an invalid peer id over readable stream', () => {
- const invalidPeerId = 'not a peer ID'
-
- return new Promise((resolve, reject) => {
- pump(
- ipfsA.pingReadableStream(invalidPeerId, {}),
- new Writable({
- objectMode: true,
- write: (chunk, enc, cb) => cb()
- }),
- (err) => {
- expect(err).to.exist()
- resolve()
- }
- )
- })
- })
- })
-}
diff --git a/src/ping/ping.js b/src/ping/ping.js
index 00e42b6c..12a77d6c 100644
--- a/src/ping/ping.js
+++ b/src/ping/ping.js
@@ -3,6 +3,7 @@
const { getDescribe, getIt, expect } = require('../utils/mocha')
const { expectIsPingResponse, isPong } = require('./utils')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -29,7 +30,7 @@ module.exports = (common, options) => {
it('should send the specified number of packets', async () => {
const count = 3
- const responses = await ipfsA.ping(ipfsB.peerId.id, { count })
+ const responses = await all(ipfsA.ping(ipfsB.peerId.id, { count }))
responses.forEach(expectIsPingResponse)
const pongs = responses.filter(isPong)
@@ -40,14 +41,14 @@ module.exports = (common, options) => {
const notAvailablePeerId = 'QmUmaEnH1uMmvckMZbh3yShaasvELPW4ZLPWnB4entMTEn'
const count = 2
- return expect(ipfsA.ping(notAvailablePeerId, { count })).to.eventually.be.rejected()
+ return expect(all(ipfsA.ping(notAvailablePeerId, { count }))).to.eventually.be.rejected()
})
it('should fail when pinging an invalid peer Id', () => {
const invalidPeerId = 'not a peer ID'
const count = 2
- return expect(ipfsA.ping(invalidPeerId, { count })).to.eventually.be.rejected()
+ return expect(all(ipfsA.ping(invalidPeerId, { count }))).to.eventually.be.rejected()
})
})
}
diff --git a/src/pubsub/subscribe.js b/src/pubsub/subscribe.js
index f2b91faa..0df59523 100644
--- a/src/pubsub/subscribe.js
+++ b/src/pubsub/subscribe.js
@@ -2,7 +2,7 @@
'use strict'
const pushable = require('it-pushable')
-const { collect } = require('streaming-iterables')
+const all = require('it-all')
const { waitForPeers, getTopic } = require('./utils')
const { getDescribe, getIt, expect } = require('../utils/mocha')
const delay = require('delay')
@@ -108,10 +108,10 @@ module.exports = (common, options) => {
await ipfs1.pubsub.publish(topic, Buffer.from('hello'))
- const [handler1Msg] = await collect(msgStream1)
+ const [handler1Msg] = await all(msgStream1)
expect(handler1Msg.data.toString()).to.eql('hello')
- const [handler2Msg] = await collect(msgStream2)
+ const [handler2Msg] = await all(msgStream2)
expect(handler2Msg.data.toString()).to.eql('hello')
await ipfs1.pubsub.unsubscribe(topic, handler1)
@@ -181,11 +181,11 @@ module.exports = (common, options) => {
await ipfs2.pubsub.publish(topic, Buffer.from(expectedString))
- const [sub1Msg] = await collect(msgStream1)
+ const [sub1Msg] = await all(msgStream1)
expect(sub1Msg.data.toString()).to.be.eql(expectedString)
expect(sub1Msg.from).to.eql(ipfs2.peerId.id)
- const [sub2Msg] = await collect(msgStream2)
+ const [sub2Msg] = await all(msgStream2)
expect(sub2Msg.data.toString()).to.be.eql(expectedString)
expect(sub2Msg.from).to.eql(ipfs2.peerId.id)
})
@@ -215,11 +215,11 @@ module.exports = (common, options) => {
await ipfs2.pubsub.publish(topic, buffer)
- const [sub1Msg] = await collect(msgStream1)
+ const [sub1Msg] = await all(msgStream1)
expect(sub1Msg.data.toString('hex')).to.be.eql(expectedHex)
expect(sub1Msg.from).to.eql(ipfs2.peerId.id)
- const [sub2Msg] = await collect(msgStream2)
+ const [sub2Msg] = await all(msgStream2)
expect(sub2Msg.data.toString('hex')).to.be.eql(expectedHex)
expect(sub2Msg.from).to.eql(ipfs2.peerId.id)
})
@@ -253,12 +253,12 @@ module.exports = (common, options) => {
outbox.forEach(msg => ipfs2.pubsub.publish(topic, Buffer.from(msg)))
- const sub1Msgs = await collect(msgStream1)
+ const sub1Msgs = await all(msgStream1)
sub1Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2.peerId.id))
const inbox1 = sub1Msgs.map(msg => msg.data.toString())
expect(inbox1.sort()).to.eql(outbox.sort())
- const sub2Msgs = await collect(msgStream2)
+ const sub2Msgs = await all(msgStream2)
sub2Msgs.forEach(msg => expect(msg.from).to.eql(ipfs2.peerId.id))
const inbox2 = sub2Msgs.map(msg => msg.data.toString())
expect(inbox2.sort()).to.eql(outbox.sort())
@@ -292,7 +292,7 @@ module.exports = (common, options) => {
await ipfs2.pubsub.publish(topic, msgData)
}
- const msgs = await collect(msgStream)
+ const msgs = await all(msgStream)
const duration = new Date().getTime() - startTime
const opsPerSec = Math.floor(count / (duration / 1000))
diff --git a/src/pubsub/unsubscribe.js b/src/pubsub/unsubscribe.js
index f58f9308..3f83014c 100644
--- a/src/pubsub/unsubscribe.js
+++ b/src/pubsub/unsubscribe.js
@@ -46,7 +46,7 @@ module.exports = (common, options) => {
expect(topics).to.eql([])
})
- it(`should subscribe ${count} handlers and unsunscribe once with no reference to the handlers`, async () => {
+ it(`should subscribe ${count} handlers and unsubscribe once with no reference to the handlers`, async () => {
const someTopic = getTopic()
for (let i = 0; i < count; i++) {
await ipfs.pubsub.subscribe(someTopic, (msg) => {})
diff --git a/src/files-regular/refs-local-tests.js b/src/refs-local.js
similarity index 73%
rename from src/files-regular/refs-local-tests.js
rename to src/refs-local.js
index 663a8e26..fa8b9bb8 100644
--- a/src/files-regular/refs-local-tests.js
+++ b/src/refs-local.js
@@ -2,21 +2,20 @@
'use strict'
const { fixtures } = require('./utils')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
+const { getDescribe, getIt, expect } = require('./utils/mocha')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
* @param {Factory} common
- * @param {*} suiteName
- * @param {*} ipfsRefsLocal
* @param {Object} options
*/
-module.exports = (common, suiteName, ipfsRefsLocal, options) => {
+module.exports = (common, options) => {
const describe = getDescribe(options)
const it = getIt(options)
- describe(suiteName, function () {
- this.timeout(40 * 1000)
+ describe('.refs.local', function () {
+ this.timeout(60 * 1000)
let ipfs
@@ -37,9 +36,9 @@ module.exports = (common, suiteName, ipfsRefsLocal, options) => {
content('holmes.txt')
]
- await ipfs.add(dirs)
+ await all(ipfs.add(dirs))
- const refs = await ipfsRefsLocal(ipfs)
+ const refs = await all(ipfs.refs.local())
const cids = refs.map(r => r.ref)
expect(cids).to.include('QmVwdDCY4SPGVFnNCiZnX5CtzwWDn6kAM98JXzKxE3kCmn')
diff --git a/src/files-regular/refs-tests.js b/src/refs.js
similarity index 83%
rename from src/files-regular/refs-tests.js
rename to src/refs.js
index d974614f..eaa24670 100644
--- a/src/files-regular/refs-tests.js
+++ b/src/refs.js
@@ -1,25 +1,22 @@
/* eslint-env mocha */
'use strict'
-const pMapSeries = require('p-map-series')
-const pTimeout = require('p-timeout')
-const { getDescribe, getIt, expect } = require('../utils/mocha')
+const { getDescribe, getIt, expect } = require('./utils/mocha')
const loadFixture = require('aegir/fixtures')
const CID = require('cids')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
* @param {Factory} common
- * @param {*} suiteName
- * @param {*} ipfsRefs
* @param {Object} options
*/
-module.exports = (common, suiteName, ipfsRefs, options) => {
+module.exports = (common, options) => {
const describe = getDescribe(options)
const it = getIt(options)
- describe(suiteName, function () {
- this.timeout(40 * 1000)
+ describe('.refs', function () {
+ this.timeout(60 * 1000)
let ipfs, pbRootCb, dagRootCid
@@ -49,28 +46,28 @@ module.exports = (common, suiteName, ipfsRefs, options) => {
const p = (path ? path(pbRootCb) : pbRootCb)
if (expectTimeout) {
- return expect(pTimeout(ipfsRefs(ipfs)(p, params), expectTimeout)).to.eventually.be.rejected
+ return expect(all(ipfs.refs(p, params))).to.eventually.be.rejected
.and.be.an.instanceOf(Error)
.and.to.have.property('name')
.to.eql('TimeoutError')
}
if (expectError) {
- return expect(ipfsRefs(ipfs)(p, params)).to.be.eventually.rejected.and.be.an.instanceOf(Error)
+ return expect(all(ipfs.refs(p, params))).to.be.eventually.rejected.and.be.an.instanceOf(Error)
}
- const refs = await ipfsRefs(ipfs)(p, params)
+ const refs = await all(ipfs.refs(p, params))
// Check there was no error and the refs match what was expected
expect(refs.map(r => r.ref)).to.eql(expected)
})
}
- it('dag refs test', async function () {
+ it('should get refs with cbor links', async function () {
this.timeout(20 * 1000)
// Call out to IPFS
- const refs = await ipfsRefs(ipfs)(`/ipfs/${dagRootCid}`, { recursive: true })
+ const refs = await all(ipfs.refs(`/ipfs/${dagRootCid}`, { recursive: true }))
// Check the refs match what was expected
expect(refs.map(r => r.ref).sort()).to.eql([
'QmPDqvcuA4AkhBLBuh2y49yhUB98rCnxPxa3eVNC1kAbSC',
@@ -113,7 +110,7 @@ function getMockObjects () {
function getRefsTests () {
return {
- 'prints added files': {
+ 'should print added files': {
params: {},
expected: [
'QmYEJ7qQNZUvBnv4SZ3rEbksagaan3sGvnUq948vSG8Z34',
@@ -123,7 +120,7 @@ function getRefsTests () {
]
},
- 'prints files in edges format': {
+ 'should print files in edges format': {
params: { edges: true },
expected: [
'Qmd5MhNjx3NSZm3L2QKG1TFvqkTRbtZwGJinqEfqpfHH7s -> QmYEJ7qQNZUvBnv4SZ3rEbksagaan3sGvnUq948vSG8Z34',
@@ -133,7 +130,7 @@ function getRefsTests () {
]
},
- 'prints files in custom format': {
+ 'should print files in custom format': {
params: { format: ': => ' },
expected: [
'animals: Qmd5MhNjx3NSZm3L2QKG1TFvqkTRbtZwGJinqEfqpfHH7s => QmYEJ7qQNZUvBnv4SZ3rEbksagaan3sGvnUq948vSG8Z34',
@@ -143,7 +140,7 @@ function getRefsTests () {
]
},
- 'follows a path, /': {
+ 'should follow a path, /': {
path: (cid) => `/ipfs/${cid}/animals`,
params: { format: '' },
expected: [
@@ -152,7 +149,7 @@ function getRefsTests () {
]
},
- 'follows a path, //': {
+ 'should follow a path, //': {
path: (cid) => `/ipfs/${cid}/animals/land`,
params: { format: '' },
expected: [
@@ -162,7 +159,7 @@ function getRefsTests () {
]
},
- 'follows a path with recursion, /': {
+ 'should follow a path with recursion, /': {
path: (cid) => `/ipfs/${cid}/animals`,
params: { format: '', recursive: true },
expected: [
@@ -176,7 +173,7 @@ function getRefsTests () {
]
},
- 'recursively follows folders, -r': {
+ 'should recursively follows folders, -r': {
params: { format: '', recursive: true },
expected: [
'animals',
@@ -194,7 +191,7 @@ function getRefsTests () {
]
},
- 'recursive with unique option': {
+ 'should get refs with recursive and unique option': {
params: { format: '', recursive: true, unique: true },
expected: [
'animals',
@@ -211,7 +208,7 @@ function getRefsTests () {
]
},
- 'max depth of 1': {
+ 'should get refs with max depth of 1': {
params: { format: '', recursive: true, maxDepth: 1 },
expected: [
'animals',
@@ -221,7 +218,7 @@ function getRefsTests () {
]
},
- 'max depth of 2': {
+ 'should get refs with max depth of 2': {
params: { format: '', recursive: true, maxDepth: 2 },
expected: [
'animals',
@@ -234,7 +231,7 @@ function getRefsTests () {
]
},
- 'max depth of 3': {
+ 'should get refs with max depth of 3': {
params: { format: '', recursive: true, maxDepth: 3 },
expected: [
'animals',
@@ -252,12 +249,12 @@ function getRefsTests () {
]
},
- 'max depth of 0': {
+ 'should get refs with max depth of 0': {
params: { recursive: true, maxDepth: 0 },
expected: []
},
- 'follows a path with max depth 1, /': {
+ 'should follow a path with max depth 1, /': {
path: (cid) => `/ipfs/${cid}/animals`,
params: { format: '', recursive: true, maxDepth: 1 },
expected: [
@@ -266,7 +263,7 @@ function getRefsTests () {
]
},
- 'follows a path with max depth 2, /': {
+ 'should follow a path with max depth 2, /': {
path: (cid) => `/ipfs/${cid}/animals`,
params: { format: '', recursive: true, maxDepth: 2 },
expected: [
@@ -280,7 +277,7 @@ function getRefsTests () {
]
},
- 'prints refs for multiple paths': {
+ 'should print refs for multiple paths': {
path: (cid) => [`/ipfs/${cid}/animals`, `/ipfs/${cid}/fruits`],
params: { format: '', recursive: true },
expected: [
@@ -295,14 +292,15 @@ function getRefsTests () {
]
},
- 'cannot specify edges and format': {
+ 'should not be able to specify edges and format': {
params: { format: '', edges: true },
expectError: true
},
- 'prints nothing for non-existent hashes': {
+ 'should print nothing for non-existent hashes': {
path: () => 'QmYmW4HiZhotsoSqnv2o1oSssvkRM8b9RweBoH7ao5nki2',
- expectTimeout: 4000
+ params: { timeout: 2000 },
+ expectTimeout: true
}
}
}
@@ -322,8 +320,8 @@ function loadPbContent (ipfs, node) {
function loadDagContent (ipfs, node) {
const store = {
putData: async (data) => {
- const res = await ipfs.add(data)
- return res[0].hash
+ const res = await all(ipfs.add(data))
+ return res[0].cid
},
putLinks: (links) => {
const obj = {}
@@ -352,10 +350,12 @@ async function loadContent (ipfs, store, node) {
return 0
})
- const res = await pMapSeries(sorted, async ([name, child]) => {
- const cid = await loadContent(ipfs, store, child)
- return { name, cid: cid && cid.toString() }
- })
+ const res = await all((async function * () {
+ for (const [name, child] of sorted) {
+ const cid = await loadContent(ipfs, store, child)
+ yield { name, cid: cid && cid.toString() }
+ }
+ })())
return store.putLinks(res)
}
diff --git a/src/repo/gc.js b/src/repo/gc.js
index 861d062a..da496625 100644
--- a/src/repo/gc.js
+++ b/src/repo/gc.js
@@ -3,6 +3,7 @@
const { getDescribe, getIt, expect } = require('../utils/mocha')
const { DAGNode } = require('ipld-dag-pb')
+const all = require('it-all')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -23,205 +24,205 @@ module.exports = (common, options) => {
after(() => common.clean())
it('should run garbage collection', async () => {
- const res = await ipfs.add(Buffer.from('apples'))
+ const res = await all(ipfs.add(Buffer.from('apples')))
- const pinset = await ipfs.pin.ls()
- expect(pinset.map((obj) => obj.hash)).includes(res[0].hash)
+ const pinset = await all(ipfs.pin.ls())
+ expect(pinset.map(obj => obj.cid.toString())).includes(res[0].cid.toString())
- await ipfs.pin.rm(res[0].hash)
- await ipfs.repo.gc()
+ await ipfs.pin.rm(res[0].cid)
+ await all(ipfs.repo.gc())
- const finalPinset = await ipfs.pin.ls()
- expect(finalPinset.map((obj) => obj.hash)).not.includes(res[0].hash)
+ const finalPinset = await all(ipfs.pin.ls())
+ expect(finalPinset.map(obj => obj.cid.toString())).not.includes(res[0].cid.toString())
})
it('should clean up unpinned data', async () => {
// Get initial list of local blocks
- const refsBeforeAdd = await ipfs.refs.local()
+ const refsBeforeAdd = await all(ipfs.refs.local())
// Add some data. Note: this will implicitly pin the data, which causes
// some blocks to be added for the data itself and for the pinning
// information that refers to the blocks
- const addRes = await ipfs.add(Buffer.from('apples'))
- const hash = addRes[0].hash
+ const addRes = await all(ipfs.add(Buffer.from('apples')))
+ const cid = addRes[0].cid
// Get the list of local blocks after the add, should be bigger than
// the initial list and contain hash
- const refsAfterAdd = await ipfs.refs.local()
+ const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
- expect(refsAfterAdd.map(r => r.ref)).includes(hash)
+ expect(refsAfterAdd.map(r => r.ref)).includes(cid.toString())
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// Get the list of local blocks after GC, should still contain the hash,
// because the file is still pinned
- const refsAfterGc = await ipfs.refs.local()
- expect(refsAfterGc.map(r => r.ref)).includes(hash)
+ const refsAfterGc = await all(ipfs.refs.local())
+ expect(refsAfterGc.map(r => r.ref)).includes(cid.toString())
// Unpin the data
- await ipfs.pin.rm(hash)
+ await ipfs.pin.rm(cid)
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// The list of local blocks should no longer contain the hash
- const refsAfterUnpinAndGc = await ipfs.refs.local()
- expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash)
+ const refsAfterUnpinAndGc = await all(ipfs.refs.local())
+ expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(cid.toString())
})
it('should clean up removed MFS files', async () => {
// Get initial list of local blocks
- const refsBeforeAdd = await ipfs.refs.local()
+ const refsBeforeAdd = await all(ipfs.refs.local())
// Add a file to MFS
await ipfs.files.write('/test', Buffer.from('oranges'), { create: true })
const stats = await ipfs.files.stat('/test')
expect(stats.type).to.equal('file')
- const hash = stats.hash
+ const hash = stats.cid.toString()
// Get the list of local blocks after the add, should be bigger than
// the initial list and contain hash
- const refsAfterAdd = await ipfs.refs.local()
+ const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
expect(refsAfterAdd.map(r => r.ref)).includes(hash)
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// Get the list of local blocks after GC, should still contain the hash,
// because the file is in MFS
- const refsAfterGc = await ipfs.refs.local()
+ const refsAfterGc = await all(ipfs.refs.local())
expect(refsAfterGc.map(r => r.ref)).includes(hash)
// Remove the file
await ipfs.files.rm('/test')
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// The list of local blocks should no longer contain the hash
- const refsAfterUnpinAndGc = await ipfs.refs.local()
+ const refsAfterUnpinAndGc = await all(ipfs.refs.local())
expect(refsAfterUnpinAndGc.map(r => r.ref)).not.includes(hash)
})
it('should clean up block only after unpinned and removed from MFS', async () => {
// Get initial list of local blocks
- const refsBeforeAdd = await ipfs.refs.local()
+ const refsBeforeAdd = await all(ipfs.refs.local())
// Add a file to MFS
await ipfs.files.write('/test', Buffer.from('peaches'), { create: true })
const stats = await ipfs.files.stat('/test')
expect(stats.type).to.equal('file')
- const mfsFileHash = stats.hash
+ const mfsFileCid = stats.cid
// Get the CID of the data in the file
- const block = await ipfs.block.get(mfsFileHash)
+ const block = await ipfs.block.get(mfsFileCid)
// Add the data to IPFS (which implicitly pins the data)
- const addRes = await ipfs.add(block.data)
- const dataHash = addRes[0].hash
+ const addRes = await all(ipfs.add(block.data))
+ const dataCid = addRes[0].cid
// Get the list of local blocks after the add, should be bigger than
// the initial list and contain the data hash
- const refsAfterAdd = await ipfs.refs.local()
+ const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
- expect(hashesAfterAdd).includes(dataHash)
+ expect(hashesAfterAdd).includes(dataCid.toString())
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// Get the list of local blocks after GC, should still contain the hash,
// because the file is pinned and in MFS
- const refsAfterGc = await ipfs.refs.local()
+ const refsAfterGc = await all(ipfs.refs.local())
const hashesAfterGc = refsAfterGc.map(r => r.ref)
- expect(hashesAfterGc).includes(dataHash)
+ expect(hashesAfterGc).includes(dataCid.toString())
// Remove the file
await ipfs.files.rm('/test')
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// Get the list of local blocks after GC, should still contain the hash,
// because the file is still pinned
- const refsAfterRmAndGc = await ipfs.refs.local()
+ const refsAfterRmAndGc = await all(ipfs.refs.local())
const hashesAfterRmAndGc = refsAfterRmAndGc.map(r => r.ref)
- expect(hashesAfterRmAndGc).not.includes(mfsFileHash)
- expect(hashesAfterRmAndGc).includes(dataHash)
+ expect(hashesAfterRmAndGc).not.includes(mfsFileCid.toString())
+ expect(hashesAfterRmAndGc).includes(dataCid.toString())
// Unpin the data
- await ipfs.pin.rm(dataHash)
+ await ipfs.pin.rm(dataCid)
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// The list of local blocks should no longer contain the hashes
- const refsAfterUnpinAndGc = await ipfs.refs.local()
+ const refsAfterUnpinAndGc = await all(ipfs.refs.local())
const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
- expect(hashesAfterUnpinAndGc).not.includes(mfsFileHash)
- expect(hashesAfterUnpinAndGc).not.includes(dataHash)
+ expect(hashesAfterUnpinAndGc).not.includes(mfsFileCid.toString())
+ expect(hashesAfterUnpinAndGc).not.includes(dataCid.toString())
})
it('should clean up indirectly pinned data after recursive pin removal', async () => {
// Get initial list of local blocks
- const refsBeforeAdd = await ipfs.refs.local()
+ const refsBeforeAdd = await all(ipfs.refs.local())
// Add some data
- const addRes = await ipfs.add(Buffer.from('pears'))
- const dataHash = addRes[0].hash
+ const addRes = await all(ipfs.add(Buffer.from('pears')))
+ const dataCid = addRes[0].cid
// Unpin the data
- await ipfs.pin.rm(dataHash)
+ await ipfs.pin.rm(dataCid)
// Create a link to the data from an object
const obj = await new DAGNode(Buffer.from('fruit'), [{
Name: 'p',
- Hash: dataHash,
+ Hash: dataCid,
TSize: addRes[0].size
}])
// Put the object into IPFS
- const objHash = (await ipfs.object.put(obj)).toString()
+ const objCid = await ipfs.object.put(obj)
// Putting an object doesn't pin it
- expect((await ipfs.pin.ls()).map(p => p.hash)).not.includes(objHash)
+ expect((await all(ipfs.pin.ls())).map(p => p.cid.toString())).not.includes(objCid.toString())
// Get the list of local blocks after the add, should be bigger than
// the initial list and contain data and object hash
- const refsAfterAdd = await ipfs.refs.local()
+ const refsAfterAdd = await all(ipfs.refs.local())
expect(refsAfterAdd.length).to.be.gt(refsBeforeAdd.length)
const hashesAfterAdd = refsAfterAdd.map(r => r.ref)
- expect(hashesAfterAdd).includes(objHash)
- expect(hashesAfterAdd).includes(dataHash)
+ expect(hashesAfterAdd).includes(objCid.toString())
+ expect(hashesAfterAdd).includes(dataCid.toString())
// Recursively pin the object
- await ipfs.pin.add(objHash, { recursive: true })
+ await ipfs.pin.add(objCid, { recursive: true })
// The data should now be indirectly pinned
- const pins = await ipfs.pin.ls()
- expect(pins.find(p => p.hash === dataHash).type).to.eql('indirect')
+ const pins = await all(ipfs.pin.ls())
+ expect(pins.find(p => p.cid.toString() === dataCid.toString()).type).to.eql('indirect')
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// Get the list of local blocks after GC, should still contain the data
// hash, because the data is still (indirectly) pinned
- const refsAfterGc = await ipfs.refs.local()
- expect(refsAfterGc.map(r => r.ref)).includes(dataHash)
+ const refsAfterGc = await all(ipfs.refs.local())
+ expect(refsAfterGc.map(r => r.ref)).includes(dataCid.toString())
// Recursively unpin the object
- await ipfs.pin.rm(objHash)
+ await ipfs.pin.rm(objCid.toString())
// Run garbage collection
- await ipfs.repo.gc()
+ await all(ipfs.repo.gc())
// The list of local blocks should no longer contain the hashes
- const refsAfterUnpinAndGc = await ipfs.refs.local()
+ const refsAfterUnpinAndGc = await all(ipfs.refs.local())
const hashesAfterUnpinAndGc = refsAfterUnpinAndGc.map(r => r.ref)
- expect(hashesAfterUnpinAndGc).not.includes(objHash)
- expect(hashesAfterUnpinAndGc).not.includes(dataHash)
+ expect(hashesAfterUnpinAndGc).not.includes(objCid.toString())
+ expect(hashesAfterUnpinAndGc).not.includes(dataCid.toString())
})
})
}
diff --git a/src/stats/bw-pull-stream.js b/src/stats/bw-pull-stream.js
deleted file mode 100644
index f2329f32..00000000
--- a/src/stats/bw-pull-stream.js
+++ /dev/null
@@ -1,33 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { expectIsBandwidth } = require('./utils')
-const pullToPromise = require('pull-to-promise')
-const { getDescribe, getIt } = require('../utils/mocha')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.stats.bwPullStream', () => {
- let ipfs
-
- before(async () => {
- ipfs = (await common.spawn()).api
- })
-
- after(() => common.clean())
-
- it('should get bandwidth stats over pull stream', async () => {
- const stream = ipfs.stats.bwPullStream()
-
- const data = await pullToPromise.any(stream)
- expectIsBandwidth(null, data[0])
- })
- })
-}
diff --git a/src/stats/bw-readable-stream.js b/src/stats/bw-readable-stream.js
deleted file mode 100644
index 2e6ed767..00000000
--- a/src/stats/bw-readable-stream.js
+++ /dev/null
@@ -1,34 +0,0 @@
-/* eslint-env mocha */
-'use strict'
-
-const { expectIsBandwidth } = require('./utils')
-const { getDescribe, getIt } = require('../utils/mocha')
-const getStream = require('get-stream')
-
-/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
-/**
- * @param {Factory} common
- * @param {Object} options
- */
-module.exports = (common, options) => {
- const describe = getDescribe(options)
- const it = getIt(options)
-
- describe('.stats.bwReadableStream', () => {
- let ipfs
-
- before(async () => {
- ipfs = (await common.spawn()).api
- })
-
- after(() => common.clean())
-
- it('should get bandwidth stats over readable stream', async () => {
- const stream = ipfs.stats.bwReadableStream()
-
- const [data] = await getStream.array(stream)
-
- expectIsBandwidth(null, data)
- })
- })
-}
diff --git a/src/stats/bw.js b/src/stats/bw.js
index 178eb5f9..740fd7af 100644
--- a/src/stats/bw.js
+++ b/src/stats/bw.js
@@ -3,6 +3,7 @@
const { expectIsBandwidth } = require('./utils')
const { getDescribe, getIt } = require('../utils/mocha')
+const last = require('it-last')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
/**
@@ -23,7 +24,7 @@ module.exports = (common, options) => {
after(() => common.clean())
it('should get bandwidth stats ', async () => {
- const res = await ipfs.stats.bw()
+ const res = await last(ipfs.stats.bw())
expectIsBandwidth(null, res)
})
})
diff --git a/src/stats/index.js b/src/stats/index.js
index e07efd47..17aa13ba 100644
--- a/src/stats/index.js
+++ b/src/stats/index.js
@@ -4,8 +4,6 @@ const { createSuite } = require('../utils/suite')
const tests = {
bitswap: require('./bitswap'),
bw: require('./bw'),
- bwPullStream: require('./bw-pull-stream'),
- bwReadableStream: require('./bw-readable-stream'),
repo: require('./repo')
}
diff --git a/src/swarm/addrs.js b/src/swarm/addrs.js
index f7b7eeb4..86032250 100644
--- a/src/swarm/addrs.js
+++ b/src/swarm/addrs.js
@@ -1,7 +1,8 @@
/* eslint-env mocha */
'use strict'
-const PeerInfo = require('peer-info')
+const CID = require('cids')
+const Multiaddr = require('multiaddr')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -31,7 +32,10 @@ module.exports = (common, options) => {
const peerInfos = await ipfsA.swarm.addrs()
expect(peerInfos).to.not.be.empty()
expect(peerInfos).to.be.an('array')
- peerInfos.forEach(m => expect(PeerInfo.isPeerInfo(m)).to.be.true())
+ peerInfos.forEach(m => {
+ expect(CID.isCID(m.id)).to.be.true()
+ m.addrs.forEach(addr => expect(Multiaddr.isMultiaddr(addr)).to.be.true())
+ })
})
})
}
diff --git a/src/swarm/local-addrs.js b/src/swarm/local-addrs.js
index 6ae57ce3..3a484fce 100644
--- a/src/swarm/local-addrs.js
+++ b/src/swarm/local-addrs.js
@@ -25,7 +25,8 @@ module.exports = (common, options) => {
it('should list local addresses the node is listening on', async () => {
const multiaddrs = await ipfs.swarm.localAddrs()
- expect(multiaddrs).to.have.length.above(0)
+ // js-ipfs in the browser will have zero
+ expect(Array.isArray(multiaddrs)).to.be.true()
})
})
}
diff --git a/src/swarm/peers.js b/src/swarm/peers.js
index 9fa23e24..90a12e9d 100644
--- a/src/swarm/peers.js
+++ b/src/swarm/peers.js
@@ -2,8 +2,9 @@
'use strict'
const multiaddr = require('multiaddr')
-const PeerId = require('peer-id')
+const CID = require('cids')
const delay = require('delay')
+const { isNode } = require('ipfs-utils/src/env')
const { getDescribe, getIt, expect } = require('../utils/mocha')
/** @typedef { import("ipfsd-ctl/src/factory") } Factory */
@@ -41,7 +42,7 @@ module.exports = (common, options) => {
expect(peer).to.have.a.property('addr')
expect(multiaddr.isMultiaddr(peer.addr)).to.equal(true)
expect(peer).to.have.a.property('peer')
- expect(PeerId.isPeerId(peer.peer)).to.equal(true)
+ expect(CID.isCID(peer.peer)).to.equal(true)
expect(peer).to.not.have.a.property('latency')
/* TODO: These assertions must be uncommented as soon as
@@ -98,10 +99,10 @@ module.exports = (common, options) => {
it('should list peers only once even if they have multiple addresses', async () => {
// TODO: Change to port 0, needs: https://github.com/ipfs/interface-ipfs-core/issues/152
- const configA = getConfig([
+ const configA = getConfig(isNode ? [ // browser nodes cannot listen
'/ip4/127.0.0.1/tcp/16543',
'/ip4/127.0.0.1/tcp/16544'
- ])
+ ] : [])
const configB = getConfig([
'/ip4/127.0.0.1/tcp/26545/ws',
'/ip4/127.0.0.1/tcp/26546/ws'
diff --git a/src/utils/expect-timeout.js b/src/utils/expect-timeout.js
deleted file mode 100644
index 51c73307..00000000
--- a/src/utils/expect-timeout.js
+++ /dev/null
@@ -1,16 +0,0 @@
-'use strict'
-
-/**
- * Resolve if @param promise hangs for at least @param ms, throw otherwise
- * @param {Promise} promise promise that you expect to hang
- * @param {Number} ms millis to wait
- * @return {Promise}
- */
-module.exports = (promise, ms) => {
- return Promise.race([
- promise.then((out) => {
- throw new Error('Expected Promise to timeout but it was successful.')
- }),
- new Promise((resolve, reject) => setTimeout(resolve, ms))
- ])
-}
diff --git a/src/files-regular/utils.js b/src/utils/index.js
similarity index 100%
rename from src/files-regular/utils.js
rename to src/utils/index.js
diff --git a/src/utils/suite.js b/src/utils/suite.js
index b2ad5268..ce940298 100644
--- a/src/utils/suite.js
+++ b/src/utils/suite.js
@@ -31,4 +31,4 @@ function createSuite (tests, parent) {
return Object.assign(suite, tests)
}
-module.exports.createSuite = createSuite
+exports.createSuite = createSuite