diff --git a/.travis.yml b/.travis.yml index 1368eb6eb..21dbade12 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,6 +12,7 @@ node_js: os: - linux - osx + - windows script: npx nyc -s npm run test:node -- --bail after_success: npx nyc report --reporter=text-lcov > coverage.lcov && npx codecov @@ -21,13 +22,8 @@ jobs: - name: electron-renderer fast_finish: true - - include: - - os: windows - filter_secrets: false - cache: false - + include: - stage: check script: - npx aegir build --bundlesize @@ -45,12 +41,12 @@ jobs: addons: firefox: latest script: npx aegir test -t browser -t webworker -- --browsers FirefoxHeadless - + - stage: test name: electron-main script: - xvfb-run npx aegir test -t electron-main -- --bail - + - stage: test name: electron-renderer script: diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ad9df13d..c45dc7126 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,20 @@ + +# [34.0.0](https://github.com/ipfs/js-ipfs-http-client/compare/v33.1.1...v34.0.0) (2019-08-29) + + +### Bug Fixes + +* **package:** update err-code to version 2.0.0 ([#1053](https://github.com/ipfs/js-ipfs-http-client/issues/1053)) ([3515070](https://github.com/ipfs/js-ipfs-http-client/commit/3515070)) + + +### Features + +* browser pubsub ([#1059](https://github.com/ipfs/js-ipfs-http-client/issues/1059)) ([3764d06](https://github.com/ipfs/js-ipfs-http-client/commit/3764d06)) +* expose pin and preload arguments ([#1079](https://github.com/ipfs/js-ipfs-http-client/issues/1079)) ([e3ed6e9](https://github.com/ipfs/js-ipfs-http-client/commit/e3ed6e9)) +* support adding files via async iterator ([#1078](https://github.com/ipfs/js-ipfs-http-client/issues/1078)) ([377042b](https://github.com/ipfs/js-ipfs-http-client/commit/377042b)) + + + ## [33.1.1](https://github.com/ipfs/js-ipfs-http-client/compare/v33.1.0...v33.1.1) (2019-07-26) diff --git a/README.md b/README.md index c93113485..def4b05e5 100644 --- a/README.md +++ b/README.md @@ -203,7 +203,7 @@ const ipfs = ipfsClient({ - [`ipfs.lsPullStream(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#lspullstream) - [`ipfs.lsReadableStream(ipfsPath)`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#lsreadablestream) - [MFS (mutable file system) specific](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#mutable-file-system) - + _Explore the Mutable File System through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/mutable-file-system/)._ - [`ipfs.files.cp([from, to], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filescp) - [`ipfs.files.flush([path], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/FILES.md#filesflush) @@ -233,7 +233,7 @@ const ipfs = ipfsClient({ #### Graph - [dag](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md) - + _Explore the DAG API through interactive coding challenges in our [ProtoSchool tutorial](https://proto.school/#/basics)._ - [`ipfs.dag.get(cid, [path], [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md#dagget) - [`ipfs.dag.put(dagNode, [options], [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/DAG.md#dagput) @@ -339,28 +339,6 @@ const ipfs = ipfsClient({ - [`ipfs.key.rename(oldName, newName, [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/KEY.md#keyrename) - [`ipfs.key.rm(name, [callback])`](https://github.com/ipfs/interface-ipfs-core/blob/master/SPEC/KEY.md#keyrm) -#### Pubsub Caveat - -**Currently, the [PubSub API only works in Node.js environment](https://github.com/ipfs/js-ipfs-http-client/issues/518)** - -We currently don't support pubsub when run in the browser, and we test it with separate set of tests to make sure if it's being used in the browser, pubsub errors. - -More info: https://github.com/ipfs/js-ipfs-http-client/issues/518 - -This means: -- You can use pubsub from js-ipfs-http-client in Node.js -- You can use pubsub from js-ipfs-http-client in Electron - (when js-ipfs-http-client is ran in the main process of Electron) -- You can't use pubsub from js-ipfs-http-client in the browser -- You can't use pubsub from js-ipfs-http-client in Electron's - renderer process -- You can use pubsub from js-ipfs in the browsers -- You can use pubsub from js-ipfs in Node.js -- You can use pubsub from js-ipfs in Electron - (in both the main process and the renderer process) -- See https://github.com/ipfs/js-ipfs for details on - pubsub in js-ipfs - #### Instance utils - `ipfs.getEndpointConfig()` diff --git a/examples/browser-pubsub/.gitignore b/examples/browser-pubsub/.gitignore new file mode 100644 index 000000000..0e804e3a5 --- /dev/null +++ b/examples/browser-pubsub/.gitignore @@ -0,0 +1 @@ +bundle.js diff --git a/examples/browser-pubsub/README.md b/examples/browser-pubsub/README.md new file mode 100644 index 000000000..abe6a21f9 --- /dev/null +++ b/examples/browser-pubsub/README.md @@ -0,0 +1,94 @@ +# Pubsub in the browser + +> Use pubsub in the browser! + +This example is a demo web application that allows you to connect to an IPFS node, subscribe to a pubsub topic and send/receive messages. We'll start two IPFS nodes and two browsers and use the `ipfs-http-client` to instruct each node to listen to a pubsub topic and send/receive pubsub messages to/from each other. We're aiming for something like this: + +``` + +-----------+ +-----------+ + | +-------------------> | + | js-ipfs | pubsub | go-ipfs | + | <-------------------+ | + +-----^-----+ +-----^-----+ + | | + | HTTP API | HTTP API + | | ++-------------------+ +-------------------+ ++-------------------+ +-------------------+ +| | | | +| | | | +| Browser 1 | | Browser 2 | +| | | | +| | | | +| | | | ++-------------------+ +-------------------+ +``` + +## 1. Get started + +With Node.js and git installed, clone the repo and install the project dependencies: + +```sh +git clone https://github.com/ipfs/js-ipfs-http-client.git +cd js-ipfs-http-client +npm install # Installs ipfs-http-client dependencies +cd examples/browser-pubsub +npm install # Installs browser-pubsub app dependencies +``` + +Start the example application: + +```sh +npm start +``` + +You should see something similar to the following in your terminal and the web app should now be available if you navigate to http://127.0.0.1:8888 using your browser: + +```sh +Starting up http-server, serving ./ +Available on: + http://127.0.0.1:8888 +``` + +## 2. Start two IPFS nodes + +To demonstrate pubsub we need two nodes running so pubsub messages can be passed between them. + +Right now the easiest way to do this is to install and start a `js-ipfs` and `go-ipfs` node. There are other ways to do this, see [this document on running multiple nodes](https://github.com/ipfs/js-ipfs/tree/master/examples/running-multiple-nodes) for details. + +### Install and start the JS IPFS node + +```sh +npm install -g ipfs +jsipfs init +# Configure CORS to allow ipfs-http-client to access this IPFS node +jsipfs config --json API.HTTPHeaders.Access-Control-Allow-Origin '["http://127.0.0.1:8888"]' +# Start the IPFS node, enabling pubsub +jsipfs daemon --enable-pubsub-experiment +``` + +### Install and start the Go IPFS node + +Head over to https://dist.ipfs.io/#go-ipfs and hit the "Download go-ipfs" button. Extract the archive and read the instructions to install. + +After installation: + +```sh +ipfs init +# Configure CORS to allow ipfs-http-client to access this IPFS node +ipfs config --json API.HTTPHeaders.Access-Control-Allow-Origin '["http://127.0.0.1:8888"]' +# Start the IPFS node, enabling pubsub +ipfs daemon --enable-pubsub-experiment +``` + +## 3. Open two browsers and connect to each node + +Now, open up **two** browser windows. This could be two tabs in the same browser or two completely different browsers, it doesn't matter. Navigate to http://127.0.0.1:8888 in both. + +In the "API ADDR" field enter `/ip4/127.0.0.1/tcp/5001` in one browser and `/ip4/127.0.0.1/tcp/5002` in the other and hit the "Connect" button. + +This connects each browser to an IPFS node and now from the comfort of our browser we can instruct each node to listen to a pubsub topic and send/receive pubsub messages to each other. + +> N.B. Since our two IPFS nodes are running on the same network they should have already found each other by MDNS. So you probably won't need to use the "CONNECT TO PEER" field. If you find your pubsub messages aren't getting through, check the output from your `jsipfs daemon` command and find the first address listed in "Swarm listening on" - it'll look like `/ip4/127.0.0.1/tcp/4002/ipfs/Qm...`. Paste this address into the "CONNECT TO PEER" field for the browser that is connected to your go-ipfs node and hit connect. + +Finally, use the "SUBSCRIBE TO PUBSUB TOPIC" and "SEND MESSAGE" fields to do some pubsub-ing, you should see messages sent from one browser appear in the log of the other (provided they're both subscribed to the same topic). diff --git a/examples/browser-pubsub/index.html b/examples/browser-pubsub/index.html new file mode 100644 index 000000000..d5d84d564 --- /dev/null +++ b/examples/browser-pubsub/index.html @@ -0,0 +1,42 @@ + + + + Pubsub in the browser + + + + +
+ + + +

Pubsub

+
+
+
API Addr
+ + +
+
+
Connect to peer
+ + +
+
+
Subscribe to pubsub topic
+ + +
+
+
Send pubsub message
+ + +
+
+
Console
+
+
+
+ + + diff --git a/examples/browser-pubsub/index.js b/examples/browser-pubsub/index.js new file mode 100644 index 000000000..eae1dab17 --- /dev/null +++ b/examples/browser-pubsub/index.js @@ -0,0 +1,135 @@ +'use strict' + +const IpfsHttpClient = require('ipfs-http-client') +const { sleep, Logger, onEnterPress, catchAndLog } = require('./util') + +async function main () { + const apiUrlInput = document.getElementById('api-url') + const nodeConnectBtn = document.getElementById('node-connect') + + const peerAddrInput = document.getElementById('peer-addr') + const peerConnectBtn = document.getElementById('peer-connect') + + const topicInput = document.getElementById('topic') + const subscribeBtn = document.getElementById('subscribe') + + const messageInput = document.getElementById('message') + const sendBtn = document.getElementById('send') + + let log = Logger(document.getElementById('console')) + let ipfs + let topic + let peerId + + async function reset () { + if (ipfs && topic) { + log(`Unsubscribing from topic ${topic}`) + await ipfs.pubsub.unsubscribe(topic) + } + log = Logger(document.getElementById('console')) + topicInput.value = '' + topic = null + peerId = null + ipfs = null + } + + async function nodeConnect (url) { + await reset() + log(`Connecting to ${url}`) + ipfs = IpfsHttpClient(url) + const { id, agentVersion } = await ipfs.id() + peerId = id + log(`Success!`) + log(`Version ${agentVersion}`) + log(`Peer ID ${id}`) + } + + async function peerConnect (addr) { + if (!addr) throw new Error('Missing peer multiaddr') + if (!ipfs) throw new Error('Connect to a node first') + log(`Connecting to peer ${addr}`) + await ipfs.swarm.connect(addr) + log(`Success!`) + log('Listing swarm peers...') + await sleep() + const peers = await ipfs.swarm.peers() + peers.forEach(peer => { + const fullAddr = `${peer.addr}/ipfs/${peer.peer.toB58String()}` + log(`${fullAddr}`) + }) + log(`(${peers.length} peers total)`) + } + + async function subscribe (nextTopic) { + if (!nextTopic) throw new Error('Missing topic name') + if (!ipfs) throw new Error('Connect to a node first') + + const lastTopic = topic + + if (topic) { + topic = null + log(`Unsubscribing from topic ${lastTopic}`) + await ipfs.pubsub.unsubscribe(lastTopic) + } + + log(`Subscribing to ${nextTopic}...`) + + await ipfs.pubsub.subscribe(nextTopic, msg => { + const from = msg.from + const seqno = msg.seqno.toString('hex') + if (from === peerId) return log(`Ignoring message ${seqno} from self`) + log(`Message ${seqno} from ${from}:`) + try { + log(JSON.stringify(msg.data.toString(), null, 2)) + } catch (_) { + log(msg.data.toString('hex')) + } + }, { + onError: (err, fatal) => { + if (fatal) { + console.error(err) + log(`${err.message}`) + topic = null + log('Resubscribing in 5s...') + setTimeout(catchAndLog(() => subscribe(nextTopic), log), 5000) + } else { + console.warn(err) + } + } + }) + + topic = nextTopic + log(`Success!`) + } + + async function send (msg) { + if (!msg) throw new Error('Missing message') + if (!topic) throw new Error('Subscribe to a topic first') + if (!ipfs) throw new Error('Connect to a node first') + + log(`Sending message to ${topic}...`) + await ipfs.pubsub.publish(topic, msg) + log(`Success!`) + } + + const onNodeConnectClick = catchAndLog(() => nodeConnect(apiUrlInput.value), log) + apiUrlInput.addEventListener('keydown', onEnterPress(onNodeConnectClick)) + nodeConnectBtn.addEventListener('click', onNodeConnectClick) + + const onPeerConnectClick = catchAndLog(() => peerConnect(peerAddrInput.value), log) + peerAddrInput.addEventListener('keydown', onEnterPress(onPeerConnectClick)) + peerConnectBtn.addEventListener('click', onPeerConnectClick) + + const onSubscribeClick = catchAndLog(() => subscribe(topicInput.value), log) + topicInput.addEventListener('keydown', onEnterPress(onSubscribeClick)) + subscribeBtn.addEventListener('click', onSubscribeClick) + + const onSendClick = catchAndLog(async () => { + await send(messageInput.value) + messageInput.value = '' + }, log) + messageInput.addEventListener('keydown', onEnterPress(onSendClick)) + sendBtn.addEventListener('click', onSendClick) +} + +main() diff --git a/examples/browser-pubsub/package.json b/examples/browser-pubsub/package.json new file mode 100644 index 000000000..c4c6e2727 --- /dev/null +++ b/examples/browser-pubsub/package.json @@ -0,0 +1,20 @@ +{ + "name": "browser-pubsub-example", + "version": "0.0.0", + "description": "An example demonstrating pubsub in the browser", + "private": true, + "main": "index.js", + "scripts": { + "start": "npm run build && npm run serve", + "build": "browserify index.js > bundle.js", + "serve": "http-server -a 127.0.0.1 -p 8888", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "Alan Shaw", + "license": "MIT", + "dependencies": { + "browserify": "^16.5.0", + "http-server": "^0.11.1", + "ipfs-http-client": "../../" + } +} diff --git a/examples/browser-pubsub/util.js b/examples/browser-pubsub/util.js new file mode 100644 index 000000000..e6aada61f --- /dev/null +++ b/examples/browser-pubsub/util.js @@ -0,0 +1,31 @@ +exports.sleep = (ms = 1000) => new Promise(resolve => setTimeout(resolve, ms)) + +exports.Logger = outEl => { + outEl.innerHTML = '' + return message => { + const container = document.createElement('div') + container.innerHTML = message + outEl.appendChild(container) + outEl.scrollTop = outEl.scrollHeight + } +} + +exports.onEnterPress = fn => { + return e => { + if (event.which == 13 || event.keyCode == 13) { + e.preventDefault() + fn() + } + } +} + +exports.catchAndLog = (fn, log) => { + return async (...args) => { + try { + await fn(...args) + } catch (err) { + console.error(err) + log(`${err.message}`) + } + } +} diff --git a/greenkeeper.json b/greenkeeper.json index ae2649cc9..2d593daf5 100644 --- a/greenkeeper.json +++ b/greenkeeper.json @@ -4,15 +4,6 @@ "packages": [ "package.json" ] - }, - "examples": { - "packages": [ - "examples/bundle-browserify/package.json", - "examples/bundle-webpack/package.json", - "examples/name-api/package.json", - "examples/sub-module/package.json", - "examples/upload-file-via-browser/package.json" - ] } } } diff --git a/package.json b/package.json index ece2c88e9..b055227e5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ipfs-http-client", - "version": "33.1.1", + "version": "34.0.0", "description": "A client library for the IPFS HTTP API", "keywords": [ "ipfs" @@ -17,7 +17,8 @@ "browser": { "glob": false, "fs": false, - "stream": "readable-stream" + "stream": "readable-stream", + "ky-universal": "ky/umd" }, "repository": "github:ipfs/js-ipfs-http-client", "scripts": { @@ -33,17 +34,19 @@ "coverage": "npx nyc -r html npm run test:node -- --bail" }, "dependencies": { + "abort-controller": "^3.0.0", "async": "^2.6.1", "bignumber.js": "^9.0.0", "bl": "^3.0.0", "bs58": "^4.0.1", - "buffer": "^5.2.1", + "buffer": "^5.4.2", "cids": "~0.7.1", "concat-stream": "github:hugomrdias/concat-stream#feat/smaller", "debug": "^4.1.0", "detect-node": "^2.0.4", "end-of-stream": "^1.4.1", - "err-code": "^1.1.2", + "err-code": "^2.0.0", + "explain-error": "^1.0.4", "flatmap": "0.0.3", "glob": "^7.1.3", "ipfs-block": "~0.8.1", @@ -56,9 +59,12 @@ "is-stream": "^2.0.0", "iso-stream-http": "~0.1.2", "iso-url": "~0.4.6", + "iterable-ndjson": "^1.1.0", "just-kebab-case": "^1.1.0", "just-map-keys": "^1.1.0", "kind-of": "^6.0.2", + "ky": "^0.11.2", + "ky-universal": "^0.2.2", "lru-cache": "^5.1.1", "multiaddr": "^6.0.6", "multibase": "~0.6.0", @@ -66,8 +72,9 @@ "multihashes": "~0.4.14", "ndjson": "github:hugomrdias/ndjson#feat/readable-stream3", "once": "^1.4.0", - "peer-id": "~0.12.2", + "peer-id": "~0.12.3", "peer-info": "~0.15.1", + "promise-nodeify": "^3.0.1", "promisify-es6": "^1.0.3", "pull-defer": "~0.2.3", "pull-stream": "^3.6.9", @@ -80,13 +87,13 @@ "through2": "^3.0.1" }, "devDependencies": { - "aegir": "^19.0.3", + "aegir": "^20.0.0", "browser-process-platform": "~0.1.1", "chai": "^4.2.0", "cross-env": "^5.2.0", "dirty-chai": "^2.0.1", - "go-ipfs-dep": "~0.4.21", - "interface-ipfs-core": "^0.109.0", + "go-ipfs-dep": "^0.4.22", + "interface-ipfs-core": "^0.111.0", "ipfsd-ctl": "~0.43.0", "nock": "^10.0.2", "stream-equal": "^1.1.1" @@ -176,6 +183,7 @@ "dmitriy ryajov ", "elsehow ", "ethers ", + "greenkeeper[bot] <23040076+greenkeeper[bot]@users.noreply.github.com>", "greenkeeper[bot] ", "haad ", "kumavis ", diff --git a/src/dag/get.js b/src/dag/get.js index d0a1dac13..771c38bf4 100644 --- a/src/dag/get.js +++ b/src/dag/get.js @@ -42,8 +42,8 @@ module.exports = (send) => { }, cb) }, (resolved, cb) => { - block(send).get(new CID(resolved['Cid']['/']), (err, ipfsBlock) => { - cb(err, ipfsBlock, resolved['RemPath']) + block(send).get(new CID(resolved.Cid['/']), (err, ipfsBlock) => { + cb(err, ipfsBlock, resolved.RemPath) }) }, (ipfsBlock, path, cb) => { diff --git a/src/dag/put.js b/src/dag/put.js index 42d92b285..6cfca99c0 100644 --- a/src/dag/put.js +++ b/src/dag/put.js @@ -77,8 +77,8 @@ module.exports = (send) => { if (err) { return callback(err) } - if (result['Cid']) { - return callback(null, new CID(result['Cid']['/'])) + if (result.Cid) { + return callback(null, new CID(result.Cid['/'])) } else { return callback(result) } diff --git a/src/dht/findprovs.js b/src/dht/findprovs.js index 41ef4aaeb..695ef04df 100644 --- a/src/dht/findprovs.js +++ b/src/dht/findprovs.js @@ -27,7 +27,7 @@ module.exports = (send) => { res = [res] } - let responses = [] + const responses = [] res.forEach(result => { // 4 = Provider if (result.Type !== 4) return diff --git a/src/dht/get.js b/src/dht/get.js index 279ad51bf..ab64fa892 100644 --- a/src/dht/get.js +++ b/src/dht/get.js @@ -35,8 +35,7 @@ module.exports = (send) => { if (res.Type === 5) { done(null, res.Extra) } else { - let error = new Error('key was not found (type 6)') - done(error) + done(new Error('key was not found (type 6)')) } } diff --git a/src/files-regular/add-async-iterator.js b/src/files-regular/add-async-iterator.js new file mode 100644 index 000000000..3fa2b23ed --- /dev/null +++ b/src/files-regular/add-async-iterator.js @@ -0,0 +1,23 @@ +'use strict' + +const SendFilesStream = require('../utils/send-files-stream') +const FileResultStreamConverter = require('../utils/file-result-stream-converter') + +module.exports = (send) => { + return async function * (source, options) { + options = options || {} + options.converter = FileResultStreamConverter + + const stream = SendFilesStream(send, 'add')(options) + + for await (const entry of source) { + stream.write(entry) + } + + stream.end() + + for await (const entry of stream) { + yield entry + } + } +} diff --git a/src/files-regular/index.js b/src/files-regular/index.js index 059d7ea1c..e5b49e495 100644 --- a/src/files-regular/index.js +++ b/src/files-regular/index.js @@ -12,6 +12,7 @@ module.exports = (arg) => { addFromFs: require('../files-regular/add-from-fs')(send), addFromURL: require('../files-regular/add-from-url')(send), addFromStream: require('../files-regular/add')(send), + _addAsyncIterator: require('../files-regular/add-async-iterator')(send), cat: require('../files-regular/cat')(send), catReadableStream: require('../files-regular/cat-readable-stream')(send), catPullStream: require('../files-regular/cat-pull-stream')(send), diff --git a/src/lib/configure.js b/src/lib/configure.js new file mode 100644 index 000000000..a9036d1cd --- /dev/null +++ b/src/lib/configure.js @@ -0,0 +1,59 @@ +'use strict' +/* eslint-env browser */ + +const ky = require('ky-universal').default +const { isBrowser, isWebWorker } = require('ipfs-utils/src/env') +const { toUri } = require('./multiaddr') +const errorHandler = require('./error-handler') + +// Set default configuration and call create function with them +module.exports = create => config => { + config = config || {} + + if (typeof config === 'string') { + config = { apiAddr: config } + } else if (config.constructor && config.constructor.isMultiaddr) { + config = { apiAddr: config } + } else { + config = { ...config } + } + + config.apiAddr = (config.apiAddr || getDefaultApiAddr(config)).toString() + config.apiAddr = config.apiAddr.startsWith('/') ? toUri(config.apiAddr) : config.apiAddr + config.apiPath = config.apiPath || config['api-path'] || '/api/v0' + + return create({ + // TODO configure ky to use config.fetch when this is released: + // https://github.com/sindresorhus/ky/pull/153 + ky: ky.extend({ + prefixUrl: config.apiAddr + config.apiPath, + timeout: config.timeout || 60 * 1000, + headers: config.headers, + hooks: { + afterResponse: [errorHandler] + } + }), + ...config + }) +} + +function getDefaultApiAddr ({ protocol, host, port }) { + if (isBrowser || isWebWorker) { + if (!protocol && !host && !port) { // Use current origin + return '' + } + + if (!protocol) { + protocol = location.protocol.startsWith('http') + ? location.protocol.split(':')[0] + : 'http' + } + + host = host || location.hostname + port = port || location.port + + return `${protocol}://${host}${port ? ':' + port : ''}` + } + + return `${protocol || 'http'}://${host || 'localhost'}:${port || 5001}` +} diff --git a/src/lib/error-handler.js b/src/lib/error-handler.js new file mode 100644 index 000000000..1e788227c --- /dev/null +++ b/src/lib/error-handler.js @@ -0,0 +1,31 @@ +'use strict' + +const { HTTPError } = require('ky-universal') +const log = require('debug')('ipfs-http-client:lib:error-handler') + +function isJsonResponse (res) { + return (res.headers.get('Content-Type') || '').startsWith('application/json') +} + +module.exports = async function errorHandler (response) { + if (response.ok) return + + let msg + + try { + if (isJsonResponse(response)) { + const data = await response.json() + log(data) + msg = data.Message || data.message + } else { + msg = await response.text() + } + } catch (err) { + log('Failed to parse error response', err) + // Failed to extract/parse error message from response + throw new HTTPError(response) + } + + if (!msg) throw new HTTPError(response) + throw Object.assign(new Error(msg), { status: response.status }) +} diff --git a/src/lib/multiaddr.js b/src/lib/multiaddr.js new file mode 100644 index 000000000..09462ab34 --- /dev/null +++ b/src/lib/multiaddr.js @@ -0,0 +1,18 @@ +'use strict' + +// Convert a multiaddr to a URI +// Assumes multiaddr is in a format that can be converted to a HTTP(s) URI +exports.toUri = ma => { + const parts = `${ma}`.split('/') + const port = getPort(parts) + return `${getProtocol(parts)}://${parts[2]}${port == null ? '' : ':' + port}` +} + +function getProtocol (maParts) { + return maParts.indexOf('https') === -1 ? 'http' : 'https' +} + +function getPort (maParts) { + const tcpIndex = maParts.indexOf('tcp') + return tcpIndex === -1 ? null : maParts[tcpIndex + 1] +} diff --git a/src/lib/stream-to-iterable.js b/src/lib/stream-to-iterable.js new file mode 100644 index 000000000..5e06a99c6 --- /dev/null +++ b/src/lib/stream-to-iterable.js @@ -0,0 +1,25 @@ +'use strict' + +module.exports = function toIterable (body) { + // Node.js stream + if (body[Symbol.asyncIterator]) return body + + // Browser ReadableStream + if (body.getReader) { + return (async function * () { + const reader = body.getReader() + + try { + while (true) { + const { done, value } = await reader.read() + if (done) return + yield value + } + } finally { + reader.releaseLock() + } + })() + } + + throw new Error('unknown stream') +} diff --git a/src/pubsub.js b/src/pubsub.js deleted file mode 100644 index 6b298351d..000000000 --- a/src/pubsub.js +++ /dev/null @@ -1,212 +0,0 @@ -'use strict' - -const promisify = require('promisify-es6') -const EventEmitter = require('events') -const eos = require('end-of-stream') -const isNode = require('detect-node') -const setImmediate = require('async/setImmediate') -const PubsubMessageStream = require('./utils/pubsub-message-stream') -const stringlistToArray = require('./utils/stringlist-to-array') -const moduleConfig = require('./utils/module-config') - -const NotSupportedError = () => new Error('pubsub is currently not supported when run in the browser') - -/* Public API */ -module.exports = (arg) => { - const send = moduleConfig(arg) - - /* Internal subscriptions state and functions */ - const ps = new EventEmitter() - const subscriptions = {} - ps.id = Math.random() - return { - subscribe: (topic, handler, options, callback) => { - const defaultOptions = { - discover: false - } - - if (typeof options === 'function') { - callback = options - options = defaultOptions - } - - if (!options) { - options = defaultOptions - } - - // Throw an error if ran in the browsers - if (!isNode) { - if (!callback) { - return Promise.reject(NotSupportedError()) - } - - return setImmediate(() => callback(NotSupportedError())) - } - - // promisify doesn't work as we always pass a - // function as last argument (`handler`) - if (!callback) { - return new Promise((resolve, reject) => { - subscribe(topic, handler, options, (err) => { - if (err) { - return reject(err) - } - resolve() - }) - }) - } - - subscribe(topic, handler, options, callback) - }, - unsubscribe: (topic, handler, callback) => { - if (!isNode) { - if (!callback) { - return Promise.reject(NotSupportedError()) - } - - return setImmediate(() => callback(NotSupportedError())) - } - - if (ps.listenerCount(topic) === 0 || !subscriptions[topic]) { - const err = new Error(`Not subscribed to '${topic}'`) - - if (!callback) { - return Promise.reject(err) - } - - return setImmediate(() => callback(err)) - } - - if (!handler && !callback) { - ps.removeAllListeners(topic) - } else { - ps.removeListener(topic, handler) - } - - // Drop the request once we are actually done - if (ps.listenerCount(topic) === 0) { - if (!callback) { - return new Promise((resolve, reject) => { - // When the response stream has ended, resolve the promise - eos(subscriptions[topic].res, (err) => { - // FIXME: Artificial timeout needed to ensure unsubscribed - setTimeout(() => { - if (err) return reject(err) - resolve() - }) - }) - subscriptions[topic].req.abort() - subscriptions[topic] = null - }) - } - - // When the response stream has ended, call the callback - eos(subscriptions[topic].res, (err) => { - // FIXME: Artificial timeout needed to ensure unsubscribed - setTimeout(() => callback(err)) - }) - subscriptions[topic].req.abort() - subscriptions[topic] = null - return - } - - if (!callback) { - return Promise.resolve() - } - - setImmediate(() => callback()) - }, - publish: promisify((topic, data, callback) => { - if (!isNode) { - return callback(NotSupportedError()) - } - - if (!Buffer.isBuffer(data)) { - return callback(new Error('data must be a Buffer')) - } - - const request = { - path: 'pubsub/pub', - args: [topic, data] - } - - send(request, callback) - }), - ls: promisify((callback) => { - if (!isNode) { - return callback(NotSupportedError()) - } - - const request = { - path: 'pubsub/ls' - } - - send.andTransform(request, stringlistToArray, callback) - }), - peers: promisify((topic, callback) => { - if (!isNode) { - return callback(NotSupportedError()) - } - - const request = { - path: 'pubsub/peers', - args: [topic] - } - - send.andTransform(request, stringlistToArray, callback) - }), - setMaxListeners (n) { - return ps.setMaxListeners(n) - } - } - - function subscribe (topic, handler, options, callback) { - ps.on(topic, handler) - - if (subscriptions[topic]) { - // TODO: should a callback error be returned? - return callback() - } - - // Request params - const request = { - path: 'pubsub/sub', - args: [topic], - qs: { - discover: options.discover - } - } - - // Start the request and transform the response - // stream to Pubsub messages stream - subscriptions[topic] = {} - subscriptions[topic].req = send.andTransform(request, PubsubMessageStream.from, (err, stream) => { - if (err) { - subscriptions[topic] = null - ps.removeListener(topic, handler) - return callback(err) - } - - subscriptions[topic].res = stream - - stream.on('data', (msg) => { - ps.emit(topic, msg) - }) - - stream.on('error', (err) => { - ps.emit('error', err) - }) - - eos(stream, (err) => { - if (err) { - ps.emit('error', err) - } - - subscriptions[topic] = null - ps.removeListener(topic, handler) - }) - - callback() - }) - } -} diff --git a/src/pubsub/index.js b/src/pubsub/index.js new file mode 100644 index 000000000..2738bd5ac --- /dev/null +++ b/src/pubsub/index.js @@ -0,0 +1,50 @@ +'use strict' + +const nodeify = require('promise-nodeify') + +// This file is temporary and for compatibility with legacy usage +module.exports = (send, options) => { + if (typeof send !== 'function') { + options = send + } + + const ls = require('./ls')(options) + const peers = require('./peers')(options) + const publish = require('./publish')(options) + const subscribe = require('./subscribe')(options) + const unsubscribe = require('./unsubscribe')(options) + + return { + ls: (options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(ls(options), callback) + }, + peers: (topic, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(peers(topic, options), callback) + }, + publish: (topic, data, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(publish(topic, data, options), callback) + }, + subscribe: (topic, handler, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(subscribe(topic, handler, options), callback) + }, + unsubscribe: (topic, handler, callback) => { + return nodeify(unsubscribe(topic, handler), callback) + } + } +} diff --git a/src/pubsub/ls.js b/src/pubsub/ls.js new file mode 100644 index 000000000..177dcd491 --- /dev/null +++ b/src/pubsub/ls.js @@ -0,0 +1,18 @@ +'use strict' + +const configure = require('../lib/configure') + +module.exports = configure(({ ky }) => { + return async (options) => { + options = options || {} + + const { Strings } = await ky.get('pubsub/ls', { + timeout: options.timeout, + signal: options.signal, + headers: options.headers, + searchParams: options.searchParams + }).json() + + return Strings || [] + } +}) diff --git a/src/pubsub/peers.js b/src/pubsub/peers.js new file mode 100644 index 000000000..bdeca60e4 --- /dev/null +++ b/src/pubsub/peers.js @@ -0,0 +1,26 @@ +'use strict' + +const configure = require('../lib/configure') + +module.exports = configure(({ ky }) => { + return async (topic, options) => { + if (!options && typeof topic === 'object') { + options = topic + topic = null + } + + options = options || {} + + const searchParams = new URLSearchParams(options.searchParams) + searchParams.set('arg', topic) + + const { Strings } = await ky.get('pubsub/peers', { + timeout: options.timeout, + signal: options.signal, + headers: options.headers, + searchParams + }).json() + + return Strings || [] + } +}) diff --git a/src/pubsub/publish.js b/src/pubsub/publish.js new file mode 100644 index 000000000..a41c8fba0 --- /dev/null +++ b/src/pubsub/publish.js @@ -0,0 +1,45 @@ +'use strict' + +const { Buffer } = require('buffer') +const configure = require('../lib/configure') + +module.exports = configure(({ ky }) => { + return async (topic, data, options) => { + options = options || {} + data = Buffer.from(data) + + const searchParams = new URLSearchParams(options.searchParams) + searchParams.set('arg', topic) + + const res = await ky.post(`pubsub/pub?${searchParams}&arg=${encodeBuffer(data)}`, { + timeout: options.timeout, + signal: options.signal, + headers: options.headers + }).text() + + return res + } +}) + +function encodeBuffer (buf) { + let uriEncoded = '' + for (const byte of buf) { + // https://tools.ietf.org/html/rfc3986#page-14 + // ALPHA (%41-%5A and %61-%7A), DIGIT (%30-%39), hyphen (%2D), period (%2E), + // underscore (%5F), or tilde (%7E) + if ( + (byte >= 0x41 && byte <= 0x5A) || + (byte >= 0x61 && byte <= 0x7A) || + (byte >= 0x30 && byte <= 0x39) || + (byte === 0x2D) || + (byte === 0x2E) || + (byte === 0x5F) || + (byte === 0x7E) + ) { + uriEncoded += String.fromCharCode(byte) + } else { + uriEncoded += `%${byte.toString(16).padStart(2, '0')}` + } + } + return uriEncoded +} diff --git a/src/pubsub/subscribe.js b/src/pubsub/subscribe.js new file mode 100644 index 000000000..ae95ec5c8 --- /dev/null +++ b/src/pubsub/subscribe.js @@ -0,0 +1,85 @@ +'use strict' + +const ndjson = require('iterable-ndjson') +const explain = require('explain-error') +const bs58 = require('bs58') +const { Buffer } = require('buffer') +const log = require('debug')('ipfs-http-client:pubsub:subscribe') +const configure = require('../lib/configure') +const toIterable = require('../lib/stream-to-iterable') +const SubscriptionTracker = require('./subscription-tracker') + +module.exports = configure(({ ky }) => { + const subsTracker = SubscriptionTracker.singleton() + const publish = require('./publish')({ ky }) + + return async (topic, handler, options) => { + options = options || {} + options.signal = subsTracker.subscribe(topic, handler, options.signal) + + const searchParams = new URLSearchParams(options.searchParams) + searchParams.set('arg', topic) + if (options.discover != null) searchParams.set('discover', options.discover) + + let res + + // In Firefox, the initial call to fetch does not resolve until some data + // is received. If this doesn't happen within 1 second send an empty message + // to kickstart the process. + const ffWorkaround = setTimeout(async () => { + log(`Publishing empty message to "${topic}" to resolve subscription request`) + try { + await publish(topic, Buffer.alloc(0), options) + } catch (err) { + log('Failed to publish empty message', err) + } + }, 1000) + + try { + res = await ky.post('pubsub/sub', { + timeout: options.timeout, + signal: options.signal, + headers: options.headers, + searchParams + }) + } catch (err) { // Initial subscribe fail, ensure we clean up + subsTracker.unsubscribe(topic, handler) + throw err + } + + clearTimeout(ffWorkaround) + + readMessages(ndjson(toIterable(res.body)), { + onMessage: handler, + onEnd: () => subsTracker.unsubscribe(topic, handler), + onError: options.onError + }) + } +}) + +async function readMessages (msgStream, { onMessage, onEnd, onError }) { + onError = onError || log + + try { + for await (const msg of msgStream) { + try { + onMessage({ + from: bs58.encode(Buffer.from(msg.from, 'base64')).toString(), + data: Buffer.from(msg.data, 'base64'), + seqno: Buffer.from(msg.seqno, 'base64'), + topicIDs: msg.topicIDs + }) + } catch (err) { + onError(explain(err, 'Failed to parse pubsub message'), false, msg) // Not fatal + } + } + } catch (err) { + // FIXME: In testing with Chrome, err.type is undefined (should not be!) + // Temporarily use the name property instead. + if (err.type !== 'aborted' && err.name !== 'AbortError') { + onError(err, true) // Fatal + } + } finally { + onEnd() + } +} diff --git a/src/pubsub/subscription-tracker.js b/src/pubsub/subscription-tracker.js new file mode 100644 index 000000000..bbd7c2d7a --- /dev/null +++ b/src/pubsub/subscription-tracker.js @@ -0,0 +1,52 @@ +'use strict' + +const AbortController = require('abort-controller') + +class SubscriptionTracker { + constructor () { + this._subs = new Map() + } + + static singleton () { + if (SubscriptionTracker.instance) return SubscriptionTracker.instance + SubscriptionTracker.instance = new SubscriptionTracker() + return SubscriptionTracker.instance + } + + subscribe (topic, handler, signal) { + const topicSubs = this._subs.get(topic) || [] + + if (topicSubs.find(s => s.handler === handler)) { + throw new Error(`Already subscribed to ${topic} with this handler`) + } + + // Create controller so a call to unsubscribe can cancel the request + const controller = new AbortController() + + this._subs.set(topic, [{ handler, controller }].concat(topicSubs)) + + // If there is an external signal, forward the abort event + if (signal) { + signal.addEventListener('abort', () => this.unsubscribe(topic, handler)) + } + + return controller.signal + } + + unsubscribe (topic, handler) { + const subs = this._subs.get(topic) || [] + let unsubs + + if (handler) { + this._subs.set(topic, subs.filter(s => s.handler !== handler)) + unsubs = subs.filter(s => s.handler === handler) + } else { + this._subs.set(topic, []) + unsubs = subs + } + + unsubs.forEach(s => s.controller.abort()) + } +} + +module.exports = SubscriptionTracker diff --git a/src/pubsub/unsubscribe.js b/src/pubsub/unsubscribe.js new file mode 100644 index 000000000..6e7c727f4 --- /dev/null +++ b/src/pubsub/unsubscribe.js @@ -0,0 +1,10 @@ +'use strict' + +const configure = require('../lib/configure') +const SubscriptionTracker = require('./subscription-tracker') + +module.exports = configure(({ ky }) => { + const subsTracker = SubscriptionTracker.singleton() + // eslint-disable-next-line require-await + return async (topic, handler) => subsTracker.unsubscribe(topic, handler) +}) diff --git a/src/utils/load-commands.js b/src/utils/load-commands.js index e4a914dd0..467af3cea 100644 --- a/src/utils/load-commands.js +++ b/src/utils/load-commands.js @@ -9,6 +9,7 @@ function requireCommands () { addFromFs: require('../files-regular/add-from-fs'), addFromURL: require('../files-regular/add-from-url'), addFromStream: require('../files-regular/add'), + _addAsyncIterator: require('../files-regular/add-async-iterator'), cat: require('../files-regular/cat'), catReadableStream: require('../files-regular/cat-readable-stream'), catPullStream: require('../files-regular/cat-pull-stream'), diff --git a/src/utils/multipart.js b/src/utils/multipart.js index a6df61fd4..78bead247 100644 --- a/src/utils/multipart.js +++ b/src/utils/multipart.js @@ -67,7 +67,7 @@ class Multipart extends Transform { this.push(leading) - let content = file.content || Buffer.alloc(0) + const content = file.content || Buffer.alloc(0) if (Buffer.isBuffer(content)) { this.push(content) diff --git a/src/utils/prepare-file.js b/src/utils/prepare-file.js index 1ffe3f31c..9a02f8338 100644 --- a/src/utils/prepare-file.js +++ b/src/utils/prepare-file.js @@ -106,7 +106,7 @@ function contentToStream (content) { } function prepareFile (file, opts) { - let files = [].concat(file) + const files = [].concat(file) return flatmap(files, (file) => { // add from fs with file path diff --git a/src/utils/pubsub-message-stream.js b/src/utils/pubsub-message-stream.js deleted file mode 100644 index 992529213..000000000 --- a/src/utils/pubsub-message-stream.js +++ /dev/null @@ -1,34 +0,0 @@ -'use strict' - -const TransformStream = require('readable-stream').Transform -const PubsubMessage = require('./pubsub-message-utils') - -class PubsubMessageStream extends TransformStream { - constructor (options) { - const opts = Object.assign(options || {}, { objectMode: true }) - super(opts) - } - - static from (inputStream, callback) { - let outputStream = inputStream.pipe(new PubsubMessageStream()) - inputStream.on('end', () => outputStream.emit('end')) - callback(null, outputStream) - } - - _transform (obj, enc, callback) { - // go-ipfs returns '{}' as the very first object atm, we skip that - if (Object.keys(obj).length === 0) { - return callback() - } - - try { - const msg = PubsubMessage.deserialize(obj, 'base64') - this.push(msg) - callback() - } catch (err) { - return callback(err) - } - } -} - -module.exports = PubsubMessageStream diff --git a/src/utils/pubsub-message-utils.js b/src/utils/pubsub-message-utils.js deleted file mode 100644 index 53d1e397a..000000000 --- a/src/utils/pubsub-message-utils.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict' - -const bs58 = require('bs58') - -module.exports = { - deserialize (data, enc) { - enc = enc ? enc.toLowerCase() : 'json' - - if (enc === 'json') { - return deserializeFromJson(data) - } else if (enc === 'base64') { - return deserializeFromBase64(data) - } - - throw new Error(`Unsupported encoding: '${enc}'`) - } -} - -function deserializeFromJson (data) { - const json = JSON.parse(data) - return deserializeFromBase64(json) -} - -function deserializeFromBase64 (obj) { - if (!isPubsubMessage(obj)) { - throw new Error(`Not a pubsub message`) - } - - return { - from: bs58.encode(Buffer.from(obj.from, 'base64')).toString(), - seqno: Buffer.from(obj.seqno, 'base64'), - data: Buffer.from(obj.data, 'base64'), - topicIDs: obj.topicIDs || obj.topicCIDs - } -} - -function isPubsubMessage (obj) { - return obj && obj.from && obj.seqno && obj.data && (obj.topicIDs || obj.topicCIDs) -} diff --git a/src/utils/send-files-stream.js b/src/utils/send-files-stream.js index fa2b51b4c..91bd90fb4 100644 --- a/src/utils/send-files-stream.js +++ b/src/utils/send-files-stream.js @@ -27,7 +27,6 @@ function headers (file, i) { module.exports = (send, path) => { return (options) => { - let request let ended = false let writing = false @@ -79,10 +78,12 @@ module.exports = (send, path) => { qs['raw-leaves'] = propOrProp(options, 'raw-leaves', 'rawLeaves') qs['only-hash'] = propOrProp(options, 'only-hash', 'onlyHash') qs['wrap-with-directory'] = propOrProp(options, 'wrap-with-directory', 'wrapWithDirectory') + qs['pin'] = propOrProp(options, 'pin') + qs['preload'] = propOrProp(options, 'preload') qs.hash = propOrProp(options, 'hash', 'hashAlg') if (options.strategy === 'trickle' || options.trickle) { - qs['trickle'] = 'true' + qs.trickle = 'true' } const args = { @@ -100,7 +101,7 @@ module.exports = (send, path) => { retStream.emit('error', err) }) - request = send(args, (err, response) => { + const request = send(args, (err, response) => { if (err) { return retStream.emit('error', err) } diff --git a/src/utils/stringlist-to-array.js b/src/utils/stringlist-to-array.js deleted file mode 100644 index df28ee6df..000000000 --- a/src/utils/stringlist-to-array.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict' - -// Converts a go-ipfs "stringList" to an array -// { Strings: ['A', 'B'] } --> ['A', 'B'] -function stringlistToArray (res, cb) { - cb(null, res.Strings || []) -} - -module.exports = stringlistToArray diff --git a/src/utils/tar-stream-to-objects.js b/src/utils/tar-stream-to-objects.js index 6d7765a03..724544583 100644 --- a/src/utils/tar-stream-to-objects.js +++ b/src/utils/tar-stream-to-objects.js @@ -20,8 +20,8 @@ class ObjectsStreams extends ReadableStream { { path: 'string', content: Stream } */ const TarStreamToObjects = (inputStream, callback) => { - let outputStream = new ObjectsStreams() - let extractStream = tar.extract() + const outputStream = new ObjectsStreams() + const extractStream = tar.extract() extractStream .on('entry', (header, stream, next) => { diff --git a/test/files-mfs.spec.js b/test/files-mfs.spec.js index f9b46cdd7..beda9b24a 100644 --- a/test/files-mfs.spec.js +++ b/test/files-mfs.spec.js @@ -65,10 +65,10 @@ describe('.files (the MFS API part)', function () { }) it('.add with Buffer module', (done) => { - let Buffer = require('buffer').Buffer + const { Buffer } = require('buffer') - let expectedBufferMultihash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' - let file = Buffer.from('hello') + const expectedBufferMultihash = 'QmWfVY9y3xjsixTgbd9AorQxH7VtMpzfx2HaWtsoUYecaX' + const file = Buffer.from('hello') ipfs.add(file, (err, res) => { expect(err).to.not.exist() diff --git a/test/interface.spec.js b/test/interface.spec.js index 962a076b4..86ffac21d 100644 --- a/test/interface.spec.js +++ b/test/interface.spec.js @@ -178,11 +178,6 @@ describe('interface-ipfs-core tests', () => { isNode ? null : { name: 'should readable stream ls with a base58 encoded CID', reason: 'FIXME https://github.com/ipfs/js-ipfs-http-client/issues/339' - }, - // .refs - { - name: 'dag refs test', - reason: 'FIXME unskip when 0.4.21 is released' } ] }) @@ -231,7 +226,7 @@ describe('interface-ipfs-core tests', () => { tests.namePubsub(CommonFactory.create({ spawnOptions: { args: ['--enable-namesys-pubsub'], - initOptions: { bits: 1024 } + initOptions: { bits: 1024, profile: 'test' } } }), { skip: [ @@ -272,22 +267,20 @@ describe('interface-ipfs-core tests', () => { tests.pubsub(CommonFactory.create({ spawnOptions: { args: ['--enable-pubsub-experiment'], - initOptions: { bits: 1024 } + initOptions: { bits: 1024, profile: 'test' } } }), { - skip: isNode ? [ + skip: isWindows ? [ // pubsub.subscribe - isWindows ? { + { name: 'should send/receive 100 messages', reason: 'FIXME https://github.com/ipfs/interface-ipfs-core/pull/188#issuecomment-354673246 and https://github.com/ipfs/go-ipfs/issues/4778' - } : null, - isWindows ? { + }, + { name: 'should receive multiple messages', reason: 'FIXME https://github.com/ipfs/interface-ipfs-core/pull/188#issuecomment-354673246 and https://github.com/ipfs/go-ipfs/issues/4778' - } : null - ] : { - reason: 'FIXME pubsub is not supported in the browser https://github.com/ipfs/js-ipfs-http-client/issues/518' - } + } + ] : null }) tests.repo(defaultCommonFactory) diff --git a/test/lib.configure.spec.js b/test/lib.configure.spec.js new file mode 100644 index 000000000..f58ca4de7 --- /dev/null +++ b/test/lib.configure.spec.js @@ -0,0 +1,77 @@ +/* eslint-env mocha, browser */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const Multiaddr = require('multiaddr') +const { isBrowser, isWebWorker } = require('ipfs-utils/src/env') + +const configure = require('../src/lib/configure') + +describe('lib/configure', () => { + it('should accept no config', () => { + configure(config => { + if (isBrowser || isWebWorker) { + expect(config.apiAddr).to.eql('') + } else { + expect(config.apiAddr).to.eql('http://localhost:5001') + } + })() + }) + + it('should accept string multiaddr', () => { + const input = '/ip4/127.0.0.1/tcp/5001' + configure(config => { + expect(config.apiAddr).to.eql('http://127.0.0.1:5001') + })(input) + }) + + it('should accept multiaddr instance', () => { + const input = Multiaddr('/ip4/127.0.0.1') + configure(config => { + expect(config.apiAddr).to.eql('http://127.0.0.1') + })(input) + }) + + it('should accept object with protocol, host and port', () => { + const input = { protocol: 'https', host: 'ipfs.io', port: 138 } + configure(config => { + expect(config.apiAddr).to.eql('https://ipfs.io:138') + })(input) + }) + + it('should accept object with protocol only', () => { + const input = { protocol: 'https' } + configure(config => { + if (isBrowser || isWebWorker) { + expect(config.apiAddr).to.eql(`https://${location.host}`) + } else { + expect(config.apiAddr).to.eql('https://localhost:5001') + } + })(input) + }) + + it('should accept object with host only', () => { + const input = { host: 'ipfs.io' } + configure(config => { + if (isBrowser || isWebWorker) { + expect(config.apiAddr).to.eql(`http://ipfs.io:${location.port}`) + } else { + expect(config.apiAddr).to.eql('http://ipfs.io:5001') + } + })(input) + }) + + it('should accept object with port only', () => { + const input = { port: 138 } + configure(config => { + if (isBrowser || isWebWorker) { + expect(config.apiAddr).to.eql(`http://${location.hostname}:138`) + } else { + expect(config.apiAddr).to.eql('http://localhost:138') + } + })(input) + }) +}) diff --git a/test/lib.error-handler.spec.js b/test/lib.error-handler.spec.js new file mode 100644 index 000000000..4e97260ec --- /dev/null +++ b/test/lib.error-handler.spec.js @@ -0,0 +1,54 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const { HTTPError } = require('ky-universal') +const expect = chai.expect +chai.use(dirtyChai) +const throwsAsync = require('./utils/throws-async') +const errorHandler = require('../src/lib/error-handler') + +describe('lib/error-handler', () => { + it('should parse json error response', async () => { + const res = { + ok: false, + headers: { get: () => 'application/json' }, + json: () => Promise.resolve({ + Message: 'boom', + Code: 0, + Type: 'error' + }), + status: 500 + } + + const err = await throwsAsync(errorHandler(res)) + + expect(err.message).to.eql('boom') + expect(err.status).to.eql(500) + }) + + it('should gracefully fail on parse json', async () => { + const res = { + ok: false, + headers: { get: () => 'application/json' }, + json: () => 'boom', // not valid json! + status: 500 + } + + const err = await throwsAsync(errorHandler(res)) + expect(err instanceof HTTPError).to.be.true() + }) + + it('should gracefully fail on read text', async () => { + const res = { + ok: false, + headers: { get: () => 'text/plain' }, + text: () => Promise.reject(new Error('boom')), + status: 500 + } + + const err = await throwsAsync(errorHandler(res)) + expect(err instanceof HTTPError).to.be.true() + }) +}) diff --git a/test/lib.stream-to-iterable.spec.js b/test/lib.stream-to-iterable.spec.js new file mode 100644 index 000000000..6c14cac94 --- /dev/null +++ b/test/lib.stream-to-iterable.spec.js @@ -0,0 +1,43 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const toIterable = require('../src/lib/stream-to-iterable') + +describe('lib/stream-to-iterable', () => { + it('should return input if already async iterable', () => { + const input = { [Symbol.asyncIterator] () { return this } } + expect(toIterable(input)).to.equal(input) + }) + + it('should convert reader to async iterable', async () => { + const inputData = [2, 31, 3, 4] + const input = { + getReader () { + let i = 0 + return { + read () { + return i === inputData.length + ? { done: true } + : { value: inputData[i++] } + }, + releaseLock () {} + } + } + } + + const chunks = [] + for await (const chunk of toIterable(input)) { + chunks.push(chunk) + } + + expect(chunks).to.eql(inputData) + }) + + it('should throw on unknown stream', () => { + expect(() => toIterable({})).to.throw('unknown stream') + }) +}) diff --git a/test/log.spec.js b/test/log.spec.js index 16782d66a..1e800f075 100644 --- a/test/log.spec.js +++ b/test/log.spec.js @@ -31,7 +31,7 @@ describe('.log', function () { }) it('.log.tail', (done) => { - let i = setInterval(() => { + const i = setInterval(() => { ipfs.add(Buffer.from('just adding some data to generate logs')) }, 1000) diff --git a/test/pubsub-in-browser.spec.js b/test/pubsub-in-browser.spec.js deleted file mode 100644 index ff1a22347..000000000 --- a/test/pubsub-in-browser.spec.js +++ /dev/null @@ -1,162 +0,0 @@ -/* - We currently don't support pubsub when run in the browser, - and we test it with separate set of tests to make sure - if it's being used in the browser, pubsub errors. - - More info: https://github.com/ipfs/js-ipfs-http-client/issues/518 - - This means: - - You can use pubsub from js-ipfs-http-client in Node.js - - You can use pubsub from js-ipfs-http-client in Electron - (when js-ipfs-http-client is ran in the main process of Electron) - - - You can't use pubsub from js-ipfs-http-client in the browser - - You can't use pubsub from js-ipfs-http-client in Electron's - renderer process - - - You can use pubsub from js-ipfs in the browsers - - You can use pubsub from js-ipfs in Node.js - - You can use pubsub from js-ipfs in Electron - (in both the main process and the renderer process) - - See https://github.com/ipfs/js-ipfs for details on - pubsub in js-ipfs -*/ - -/* eslint-env mocha */ -/* eslint max-nested-callbacks: ['error', 8] */ -'use strict' - -const isNode = require('detect-node') -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) - -const ipfsClient = require('../src') -const f = require('./utils/factory') - -const expectedError = 'pubsub is currently not supported when run in the browser' - -describe('.pubsub is not supported in the browser, yet!', function () { - this.timeout(50 * 1000) - - if (isNode) { return } - - const topic = 'pubsub-tests' - let ipfs - let ipfsd - - before((done) => { - f.spawn({ initOptions: { bits: 1024, profile: 'test' } }, (err, _ipfsd) => { - expect(err).to.not.exist() - ipfsd = _ipfsd - ipfs = ipfsClient(_ipfsd.apiAddr) - done() - }) - }) - - after((done) => { - if (!ipfsd) return done() - ipfsd.stop(done) - }) - - describe('everything errors', () => { - describe('Callback API', () => { - describe('.publish', () => { - it('throws an error if called in the browser', (done) => { - ipfs.pubsub.publish(topic, 'hello friend', (err, topics) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - done() - }) - }) - }) - - describe('.subscribe', () => { - const handler = () => {} - it('throws an error if called in the browser', (done) => { - ipfs.pubsub.subscribe(topic, handler, {}, (err, topics) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - done() - }) - }) - }) - - describe('.peers', () => { - it('throws an error if called in the browser', (done) => { - ipfs.pubsub.peers(topic, (err, topics) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - done() - }) - }) - }) - - describe('.ls', () => { - it('throws an error if called in the browser', (done) => { - ipfs.pubsub.ls((err, topics) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - done() - }) - }) - }) - }) - - describe('Promise API', () => { - describe('.publish', () => { - it('throws an error if called in the browser', () => { - return ipfs.pubsub.publish(topic, 'hello friend') - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - }) - }) - }) - - describe('.subscribe', () => { - const handler = () => {} - it('throws an error if called in the browser', (done) => { - ipfs.pubsub.subscribe(topic, handler, {}) - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - done() - }) - }) - }) - - describe('.peers', () => { - it('throws an error if called in the browser', (done) => { - ipfs.pubsub.peers(topic) - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - done() - }) - }) - }) - - describe('.ls', () => { - it('throws an error if called in the browser', () => { - return ipfs.pubsub.ls() - .catch((err) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - }) - }) - }) - }) - - describe('.unsubscribe', () => { - it('throws an error if called in the browser', (done) => { - ipfs.pubsub.unsubscribe('test', () => {}, (err) => { - expect(err).to.exist() - expect(err.message).to.equal(expectedError) - done() - }) - }) - }) - }) -}) diff --git a/test/utils/throws-async.js b/test/utils/throws-async.js new file mode 100644 index 000000000..0d4e677fd --- /dev/null +++ b/test/utils/throws-async.js @@ -0,0 +1,10 @@ +'use strict' + +module.exports = async fnOrPromise => { + try { + await (fnOrPromise.then ? fnOrPromise : fnOrPromise()) + } catch (err) { + return err + } + throw new Error('did not throw') +}