diff --git a/.aegir.js b/.aegir.js
index 848c4a030..f7a4806f3 100644
--- a/.aegir.js
+++ b/.aegir.js
@@ -12,7 +12,7 @@ module.exports = {
served: true,
included: false
}],
- browserNoActivityTimeout: 100 * 1000,
+ browserNoActivityTimeout: 150 * 1000,
singleRun: true
},
hooks: {
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d4765e51f..45aa8c5bc 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,23 @@
+
+# [18.2.0](https://github.com/ipfs/js-ipfs-api/compare/v18.1.2...v18.2.0) (2018-03-16)
+
+
+### Bug Fixes
+
+* disable Browser test on Windows ([385a6c3](https://github.com/ipfs/js-ipfs-api/commit/385a6c3))
+* don't create one webpack bundle for every test file ([3967e96](https://github.com/ipfs/js-ipfs-api/commit/3967e96))
+* last fixes for green ([#719](https://github.com/ipfs/js-ipfs-api/issues/719)) ([658bad2](https://github.com/ipfs/js-ipfs-api/commit/658bad2))
+* set the FileResultStreamConverter explicitly ([dfad55e](https://github.com/ipfs/js-ipfs-api/commit/dfad55e)), closes [#696](https://github.com/ipfs/js-ipfs-api/issues/696)
+* use a different remote server for test ([1fc15a5](https://github.com/ipfs/js-ipfs-api/commit/1fc15a5))
+
+
+### Features
+
+* --only-hash ([#717](https://github.com/ipfs/js-ipfs-api/issues/717)) ([1137401](https://github.com/ipfs/js-ipfs-api/commit/1137401)), closes [#700](https://github.com/ipfs/js-ipfs-api/issues/700)
+* add support for ipfs files stat --with-local ([#695](https://github.com/ipfs/js-ipfs-api/issues/695)) ([b08f21a](https://github.com/ipfs/js-ipfs-api/commit/b08f21a))
+
+
+
## [18.1.2](https://github.com/ipfs/js-ipfs-api/compare/v18.1.1...v18.1.2) (2018-03-09)
diff --git a/package.json b/package.json
index c30438681..769d31cc2 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "ipfs-api",
- "version": "18.1.2",
+ "version": "18.2.0",
"description": "A client library for the IPFS HTTP API",
"main": "src/index.js",
"browser": {
@@ -12,23 +12,23 @@
"ipfs": false
},
"scripts": {
- "test": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir test",
+ "test": "aegir test",
"test:node": "aegir test -t node",
- "test:browser": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir test -t browser",
- "test:webworker": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir test -t webworker",
+ "test:browser": "aegir test -t browser",
+ "test:webworker": "aegir test -t webworker",
"lint": "aegir lint",
"build": "aegir build",
- "release": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir release ",
- "release-minor": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir release --type minor ",
- "release-major": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir release --type major ",
- "coverage": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir coverage --timeout 100000",
- "coverage-publish": "cross-env NODE_OPTIONS='--max-old-space-size=8192' aegir coverage --provider coveralls --timeout 100000"
+ "release": "aegir release ",
+ "release-minor": "aegir release --type minor ",
+ "release-major": "aegir release --type major ",
+ "coverage": "aegir coverage --timeout 100000",
+ "coverage-publish": "aegir coverage --provider coveralls --timeout 100000"
},
"dependencies": {
"async": "^2.6.0",
"big.js": "^5.0.3",
"bs58": "^4.0.1",
- "cids": "~0.5.2",
+ "cids": "~0.5.3",
"concat-stream": "^1.6.1",
"detect-node": "^2.0.3",
"flatmap": "0.0.3",
@@ -51,7 +51,7 @@
"pump": "^3.0.0",
"qs": "^6.5.1",
"readable-stream": "^2.3.5",
- "stream-http": "^2.8.0",
+ "stream-http": "^2.8.1",
"stream-to-pull-stream": "^1.7.2",
"streamifier": "^0.1.1",
"tar-stream": "^1.5.5"
@@ -66,20 +66,20 @@
},
"devDependencies": {
"aegir": "^13.0.6",
+ "browser-process-platform": "^0.1.1",
"chai": "^4.1.2",
- "cross-env": "^5.1.3",
"dirty-chai": "^2.0.1",
"eslint-plugin-react": "^7.7.0",
"go-ipfs-dep": "^0.4.13",
"gulp": "^3.9.1",
- "hapi": "^17.2.2",
- "interface-ipfs-core": "~0.55.1",
- "ipfs": "~0.28.0",
- "ipfsd-ctl": "~0.29.1",
+ "hapi": "^17.2.3",
+ "interface-ipfs-core": "~0.56.5",
+ "ipfs": "~0.28.2",
+ "ipfsd-ctl": "~0.30.1",
"pre-commit": "^1.2.2",
"socket.io": "^2.0.4",
"socket.io-client": "^2.0.4",
- "stream-equal": "^1.1.0"
+ "stream-equal": "^1.1.1"
},
"pre-commit": [
"lint",
diff --git a/src/dht/provide.js b/src/dht/provide.js
index 52fceb41e..08bcad6d7 100644
--- a/src/dht/provide.js
+++ b/src/dht/provide.js
@@ -1,6 +1,7 @@
'use strict'
const promisify = require('promisify-es6')
+const CID = require('cids')
module.exports = (send) => {
return promisify((cids, opts, callback) => {
@@ -20,6 +21,13 @@ module.exports = (send) => {
cids = [cids]
}
+ // Validate CID(s) and serialize
+ try {
+ cids = cids.map(cid => new CID(cid).toBaseEncodedString('base58btc'))
+ } catch (err) {
+ return callback(err)
+ }
+
send({
path: 'dht/provide',
args: cids,
diff --git a/src/files/add-pull-stream.js b/src/files/add-pull-stream.js
index daf050de8..2076ffa8d 100644
--- a/src/files/add-pull-stream.js
+++ b/src/files/add-pull-stream.js
@@ -1,6 +1,13 @@
'use strict'
const SendFilesStream = require('../utils/send-files-stream')
+const FileResultStreamConverter = require('../utils/file-result-stream-converter')
const toPull = require('stream-to-pull-stream')
-module.exports = (send) => (options) => toPull(SendFilesStream(send, 'add')(options))
+module.exports = (send) => {
+ return (options) => {
+ options = options || {}
+ options.converter = FileResultStreamConverter
+ return toPull(SendFilesStream(send, 'add')({ qs: options }))
+ }
+}
diff --git a/src/files/add-readable-stream.js b/src/files/add-readable-stream.js
index b3e03d4e8..320abe692 100644
--- a/src/files/add-readable-stream.js
+++ b/src/files/add-readable-stream.js
@@ -1,5 +1,12 @@
'use strict'
const SendFilesStream = require('../utils/send-files-stream')
+const FileResultStreamConverter = require('../utils/file-result-stream-converter')
-module.exports = (send) => SendFilesStream(send, 'add')
+module.exports = (send) => {
+ return (options) => {
+ options = options || {}
+ options.converter = FileResultStreamConverter
+ return SendFilesStream(send, 'add')(options)
+ }
+}
diff --git a/src/files/add.js b/src/files/add.js
index 2d35b414d..dd937855c 100644
--- a/src/files/add.js
+++ b/src/files/add.js
@@ -5,6 +5,7 @@ const ConcatStream = require('concat-stream')
const once = require('once')
const isStream = require('is-stream')
const OtherBuffer = require('buffer').Buffer
+const FileResultStreamConverter = require('../utils/file-result-stream-converter')
const SendFilesStream = require('../utils/send-files-stream')
module.exports = (send) => {
@@ -21,6 +22,7 @@ module.exports = (send) => {
if (!options) {
options = {}
}
+ options.converter = FileResultStreamConverter
const ok = Buffer.isBuffer(_files) ||
isStream.readable(_files) ||
@@ -34,7 +36,7 @@ module.exports = (send) => {
const files = [].concat(_files)
- const stream = createAddStream(options)
+ const stream = createAddStream({ qs: options })
const concat = ConcatStream((result) => callback(null, result))
stream.once('error', callback)
stream.pipe(concat)
diff --git a/src/files/stat.js b/src/files/stat.js
index d53da0dab..4077d1945 100644
--- a/src/files/stat.js
+++ b/src/files/stat.js
@@ -1,14 +1,18 @@
'use strict'
const promisify = require('promisify-es6')
+const _ = require('lodash')
-const transform = function (res, callback) {
+const transform = function (data, callback) {
callback(null, {
- type: res.Type,
- blocks: res.Blocks,
- size: res.Size,
- hash: res.Hash,
- cumulativeSize: res.CumulativeSize
+ type: data.Type,
+ blocks: data.Blocks,
+ size: data.Size,
+ hash: data.Hash,
+ cumulativeSize: data.CumulativeSize,
+ withLocality: data.WithLocality || false,
+ local: data.Local || undefined,
+ sizeLocal: data.SizeLocal || undefined
})
}
@@ -18,6 +22,9 @@ module.exports = (send) => {
callback = opts
opts = {}
}
+
+ opts = _.mapKeys(opts, (v, k) => _.kebabCase(k))
+
send.andTransform({
path: 'files/stat',
args: args,
diff --git a/src/files/write.js b/src/files/write.js
index 5e9efa03b..0485406bd 100644
--- a/src/files/write.js
+++ b/src/files/write.js
@@ -3,6 +3,7 @@
const promisify = require('promisify-es6')
const concatStream = require('concat-stream')
const once = require('once')
+const FileResultStreamConverter = require('../utils/file-result-stream-converter')
const SendFilesStream = require('../utils/send-files-stream')
module.exports = (send) => {
@@ -28,10 +29,11 @@ module.exports = (send) => {
const options = {
args: pathDst,
- qs: opts
+ qs: opts,
+ converter: FileResultStreamConverter
}
- const stream = sendFilesStream(options)
+ const stream = sendFilesStream({ qs: options })
const concat = concatStream((result) => callback(null, result))
stream.once('error', callback)
stream.pipe(concat)
diff --git a/src/util/fs-add.js b/src/util/fs-add.js
index 8a3ea404f..2320fc537 100644
--- a/src/util/fs-add.js
+++ b/src/util/fs-add.js
@@ -2,11 +2,11 @@
const isNode = require('detect-node')
const promisify = require('promisify-es6')
-const moduleConfig = require('../utils/module-config')
const SendOneFile = require('../utils/send-one-file-multiple-results')
+const FileResultStreamConverter = require('../utils/file-result-stream-converter')
-module.exports = (arg) => {
- const sendOneFile = SendOneFile(moduleConfig(arg), 'add')
+module.exports = (send) => {
+ const sendOneFile = SendOneFile(send, 'add')
return promisify((path, opts, callback) => {
if (typeof opts === 'function' &&
@@ -31,6 +31,10 @@ module.exports = (arg) => {
return callback(new Error('"path" must be a string'))
}
- sendOneFile(path, { qs: opts }, callback)
+ const requestOpts = {
+ qs: opts,
+ converter: FileResultStreamConverter
+ }
+ sendOneFile(path, requestOpts, callback)
})
}
diff --git a/src/util/url-add.js b/src/util/url-add.js
index 3caf11cb2..34ccefe26 100644
--- a/src/util/url-add.js
+++ b/src/util/url-add.js
@@ -3,11 +3,11 @@
const promisify = require('promisify-es6')
const parseUrl = require('url').parse
const request = require('../utils/request')
-const moduleConfig = require('../utils/module-config')
const SendOneFile = require('../utils/send-one-file-multiple-results')
+const FileResultStreamConverter = require('../utils/file-result-stream-converter')
-module.exports = (arg) => {
- const sendOneFile = SendOneFile(moduleConfig(arg), 'add')
+module.exports = (send) => {
+ const sendOneFile = SendOneFile(send, 'add')
return promisify((url, opts, callback) => {
if (typeof (opts) === 'function' &&
@@ -49,7 +49,11 @@ const requestWithRedirect = (url, opts, sendOneFile, callback) => {
}
requestWithRedirect(redirection, opts, sendOneFile, callback)
} else {
- sendOneFile(res, { qs: opts }, callback)
+ const requestOpts = {
+ qs: opts,
+ converter: FileResultStreamConverter
+ }
+ sendOneFile(res, requestOpts, callback)
}
}).end()
}
diff --git a/src/utils/converter.js b/src/utils/file-result-stream-converter.js
similarity index 60%
rename from src/utils/converter.js
rename to src/utils/file-result-stream-converter.js
index b721173e1..7f5b19aeb 100644
--- a/src/utils/converter.js
+++ b/src/utils/file-result-stream-converter.js
@@ -1,14 +1,12 @@
'use strict'
-const pump = require('pump')
const TransformStream = require('readable-stream').Transform
-const streamToValue = require('./stream-to-value')
/*
Transforms a stream of {Name, Hash} objects to include size
of the DAG object.
- Usage: inputStream.pipe(new Converter())
+ Usage: inputStream.pipe(new FileResultStreamConverter())
Input object format:
{
@@ -24,7 +22,7 @@ const streamToValue = require('./stream-to-value')
size: 20
}
*/
-class ConverterStream extends TransformStream {
+class FileResultStreamConverter extends TransformStream {
constructor (options) {
const opts = Object.assign({}, options || {}, { objectMode: true })
super(opts)
@@ -43,19 +41,4 @@ class ConverterStream extends TransformStream {
}
}
-function converter (inputStream, callback) {
- const outputStream = new ConverterStream()
- pump(
- inputStream,
- outputStream,
- (err) => {
- if (err) {
- callback(err)
- }
- })
-
- streamToValue(outputStream, callback)
-}
-
-exports = module.exports = converter
-exports.ConverterStream = ConverterStream
+module.exports = FileResultStreamConverter
diff --git a/src/utils/send-files-stream.js b/src/utils/send-files-stream.js
index 997202b4b..e6379c205 100644
--- a/src/utils/send-files-stream.js
+++ b/src/utils/send-files-stream.js
@@ -6,7 +6,6 @@ const isStream = require('is-stream')
const once = require('once')
const prepareFile = require('./prepare-file')
const Multipart = require('./multipart')
-const Converter = require('./converter').ConverterStream
function headers (file) {
const name = file.path
@@ -32,7 +31,7 @@ module.exports = (send, path) => {
let ended = false
let writing = false
- options = options || {}
+ options = options ? Object.assign({}, options, options.qs) : {}
const multipart = new Multipart()
@@ -43,7 +42,7 @@ module.exports = (send, path) => {
retStream._write = (file, enc, _next) => {
const next = once(_next)
try {
- const files = prepareFile(file, Object.assign({}, options, options.qs))
+ const files = prepareFile(file, options)
.map((file) => Object.assign({headers: headers(file)}, file))
writing = true
@@ -76,23 +75,10 @@ module.exports = (send, path) => {
const qs = options.qs || {}
- if (options['cid-version'] != null) {
- qs['cid-version'] = options['cid-version']
- } else if (options.cidVersion != null) {
- qs['cid-version'] = options.cidVersion
- }
-
- if (options['raw-leaves'] != null) {
- qs['raw-leaves'] = options['raw-leaves']
- } else if (options.rawLeaves != null) {
- qs['raw-leaves'] = options.rawLeaves
- }
-
- if (options.hash != null) {
- qs.hash = options.hash
- } else if (options.hashAlg != null) {
- qs.hash = options.hashAlg
- }
+ qs['cid-version'] = propOrProp(options, 'cid-version', 'cidVersion')
+ qs['raw-leaves'] = propOrProp(options, 'raw-leaves', 'rawLeaves')
+ qs['only-hash'] = propOrProp(options, 'only-hash', 'onlyHash')
+ qs.hash = propOrProp(options, 'hash', 'hashAlg')
const args = {
path: path,
@@ -127,15 +113,27 @@ module.exports = (send, path) => {
response.on('error', (err) => retStream.emit('error', err))
- response.on('data', (d) => {
- if (d.Bytes && options.progress) {
- options.progress(d.Bytes)
- }
- })
- const convertedResponse = new Converter()
- convertedResponse.once('end', () => retStream.push(null))
- convertedResponse.on('data', (d) => retStream.push(d))
- response.pipe(convertedResponse)
+ if (options.converter) {
+ response.on('data', (d) => {
+ if (d.Bytes && options.progress) {
+ options.progress(d.Bytes)
+ }
+ })
+
+ const Converter = options.converter
+ const convertedResponse = new Converter()
+ convertedResponse.once('end', () => retStream.push(null))
+ convertedResponse.on('data', (d) => retStream.push(d))
+ response.pipe(convertedResponse)
+ } else {
+ response.on('data', (d) => {
+ if (d.Bytes && options.progress) {
+ options.progress(d.Bytes)
+ }
+ retStream.push(d)
+ })
+ response.once('end', () => retStream.push(null))
+ }
})
// signal the multipart that the underlying stream has drained and that
@@ -147,3 +145,11 @@ module.exports = (send, path) => {
return retStream
}
}
+
+function propOrProp (source, prop1, prop2) {
+ if (prop1 in source) {
+ return source[prop1]
+ } else if (prop2 in source) {
+ return source[prop2]
+ }
+}
diff --git a/test/diag.spec.js b/test/diag.spec.js
index 216f0ac9a..3a2bb07a5 100644
--- a/test/diag.spec.js
+++ b/test/diag.spec.js
@@ -5,7 +5,7 @@ const chai = require('chai')
const dirtyChai = require('dirty-chai')
const expect = chai.expect
chai.use(dirtyChai)
-const os = require('os')
+const platform = require('browser-process-platform')
const IPFSApi = require('../src')
const f = require('./utils/factory')
@@ -14,7 +14,7 @@ describe('.diag', function () {
this.timeout(50 * 1000)
// go-ipfs does not support these on Windows
- if (os.platform() === 'win32') { return }
+ if (platform === 'win32') { return }
let ipfsd
let ipfs
diff --git a/test/files.spec.js b/test/files.spec.js
index 41c5bc1a8..f2cee3040 100644
--- a/test/files.spec.js
+++ b/test/files.spec.js
@@ -13,6 +13,7 @@ const CID = require('cids')
const IPFSApi = require('../src')
const f = require('./utils/factory')
+const expectTimeout = require('./utils/expect-timeout')
const testfile = loadFixture('test/fixtures/testfile.txt')
@@ -102,6 +103,19 @@ describe('.files (the MFS API part)', function () {
})
})
+ it('files.add with only-hash=true', function () {
+ this.slow(10 * 1000)
+ const content = String(Math.random() + Date.now())
+
+ return ipfs.files.add(Buffer.from(content), { onlyHash: true })
+ .then(files => {
+ expect(files).to.have.length(1)
+
+ // 'ipfs.object.get()' should timeout because content wasn't actually added
+ return expectTimeout(ipfs.object.get(files[0].hash), 4000)
+ })
+ })
+
it('files.add with options', (done) => {
ipfs.files.add(testfile, { pin: false }, (err, res) => {
expect(err).to.not.exist()
@@ -113,6 +127,42 @@ describe('.files (the MFS API part)', function () {
})
})
+ it('files.add pins by default', (done) => {
+ const newContent = Buffer.from(String(Math.random()))
+
+ ipfs.pin.ls((err, pins) => {
+ expect(err).to.not.exist()
+ const initialPinCount = pins.length
+ ipfs.files.add(newContent, (err, res) => {
+ expect(err).to.not.exist()
+
+ ipfs.pin.ls((err, pins) => {
+ expect(err).to.not.exist()
+ expect(pins.length).to.eql(initialPinCount + 1)
+ done()
+ })
+ })
+ })
+ })
+
+ it('files.add with pin=false', (done) => {
+ const newContent = Buffer.from(String(Math.random()))
+
+ ipfs.pin.ls((err, pins) => {
+ expect(err).to.not.exist()
+ const initialPinCount = pins.length
+ ipfs.files.add(newContent, { pin: false }, (err, res) => {
+ expect(err).to.not.exist()
+
+ ipfs.pin.ls((err, pins) => {
+ expect(err).to.not.exist()
+ expect(pins.length).to.eql(initialPinCount)
+ done()
+ })
+ })
+ })
+ })
+
HASH_ALGS.forEach((name) => {
it(`files.add with hash=${name} and raw-leaves=false`, (done) => {
const content = String(Math.random() + Date.now())
@@ -282,7 +332,10 @@ describe('.files (the MFS API part)', function () {
size: 12,
cumulativeSize: 20,
blocks: 0,
- type: 'file'
+ type: 'file',
+ withLocality: false,
+ local: undefined,
+ sizeLocal: undefined
})
done()
diff --git a/test/util.spec.js b/test/util.spec.js
index 8f5362b09..766fd3a65 100644
--- a/test/util.spec.js
+++ b/test/util.spec.js
@@ -9,9 +9,11 @@ chai.use(dirtyChai)
const isNode = require('detect-node')
const path = require('path')
const fs = require('fs')
+const os = require('os')
const IPFSApi = require('../src')
const f = require('./utils/factory')
+const expectTimeout = require('./utils/expect-timeout')
describe('.util', () => {
if (!isNode) { return }
@@ -92,31 +94,66 @@ describe('.util', () => {
done()
})
})
+
+ it('with only-hash=true', function () {
+ this.slow(10 * 1000)
+ const content = String(Math.random() + Date.now())
+ const filepath = path.join(os.tmpdir(), `${content}.txt`)
+ fs.writeFileSync(filepath, content)
+
+ return ipfs.util.addFromFs(filepath, { onlyHash: true })
+ .then(out => {
+ fs.unlinkSync(filepath)
+ return expectTimeout(ipfs.object.get(out[0].hash), 4000)
+ })
+ })
})
- it('.urlAdd http', function (done) {
- this.timeout(20 * 1000)
+ describe('.urlAdd', () => {
+ it('http', function (done) {
+ this.timeout(20 * 1000)
- ipfs.util.addFromURL('http://example.com/', (err, result) => {
- expect(err).to.not.exist()
- expect(result.length).to.equal(1)
- done()
+ ipfs.util.addFromURL('http://example.com/', (err, result) => {
+ expect(err).to.not.exist()
+ expect(result.length).to.equal(1)
+ done()
+ })
})
- })
- it('.urlAdd https', (done) => {
- ipfs.util.addFromURL('https://example.com/', (err, result) => {
- expect(err).to.not.exist()
- expect(result.length).to.equal(1)
- done()
+ it('https', function (done) {
+ this.timeout(20 * 1000)
+
+ ipfs.util.addFromURL('https://example.com/', (err, result) => {
+ expect(err).to.not.exist()
+ expect(result.length).to.equal(1)
+ done()
+ })
})
- })
- it('.urlAdd http with redirection', (done) => {
- ipfs.util.addFromURL('http://covers.openlibrary.org/book/id/969165.jpg', (err, result) => {
- expect(err).to.not.exist()
- expect(result[0].hash).to.equal('QmaL9zy7YUfvWmtD5ZXp42buP7P4xmZJWFkm78p8FJqgjg')
- done()
+ it('http with redirection', function (done) {
+ this.timeout(20 * 1000)
+
+ ipfs.util.addFromURL('http://covers.openlibrary.org/book/id/969165.jpg', (err, result) => {
+ expect(err).to.not.exist()
+ expect(result[0].hash).to.equal('QmaL9zy7YUfvWmtD5ZXp42buP7P4xmZJWFkm78p8FJqgjg')
+ done()
+ })
+ })
+
+ it('.urlAdd http with redirection', (done) => {
+ ipfs.util.addFromURL('https://coverartarchive.org/release/6e2a1694-d8b9-466a-aa33-b1077b2333c1', (err, result) => {
+ expect(err).to.not.exist()
+ expect(result[0].hash).to.equal('QmSUdDvmXuq5YGrL4M3SEz7UZh5eT9WMuAsd9K34sambSj')
+ done()
+ })
+ })
+
+ it('with only-hash=true', function () {
+ this.timeout(10 * 1000)
+ this.slow(10 * 1000)
+
+ return ipfs.util.addFromURL('http://www.randomtext.me/#/gibberish', { onlyHash: true })
+ .then(out => expectTimeout(ipfs.object.get(out[0].hash), 4000))
})
})
})
diff --git a/test/utils/expect-timeout.js b/test/utils/expect-timeout.js
new file mode 100644
index 000000000..51c733075
--- /dev/null
+++ b/test/utils/expect-timeout.js
@@ -0,0 +1,16 @@
+'use strict'
+
+/**
+ * Resolve if @param promise hangs for at least @param ms, throw otherwise
+ * @param {Promise} promise promise that you expect to hang
+ * @param {Number} ms millis to wait
+ * @return {Promise}
+ */
+module.exports = (promise, ms) => {
+ return Promise.race([
+ promise.then((out) => {
+ throw new Error('Expected Promise to timeout but it was successful.')
+ }),
+ new Promise((resolve, reject) => setTimeout(resolve, ms))
+ ])
+}