From 69b56d0de18c22a7a60367b405d69ac14535e50b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 13 Jan 2019 12:24:14 +0100 Subject: [PATCH 01/77] feat(http1): Added support for transparent HTTP/1(.1) Both unencrypted (http) and encrypted (https) is supported for both HTTP/1(.1) and HTTP/2. For https, fetch-h2 will use ALPN to negotiate version. BREAKING CHANGE: "http://" will default to HTTP/1(.1) rather than HTTP/2 --- README.md | 2 +- certs/cert.pem | 33 +- certs/key.pem | 52 +- index.ts | 5 +- lib/context-http1.ts | 345 +++++++++++++ lib/context-http2.ts | 337 ++++++++++++ lib/context-https.ts | 66 +++ lib/context.ts | 508 ++++++++----------- lib/core.ts | 60 ++- lib/fetch-common.ts | 304 +++++++++++ lib/fetch-http1.ts | 252 +++++++++ lib/fetch-http2.ts | 317 ++++++++++++ lib/fetch.ts | 496 ------------------ lib/request.ts | 14 +- lib/response.ts | 36 +- lib/types.ts | 12 + lib/utils-http2.ts | 11 + lib/utils.ts | 36 +- package.json | 8 +- test/fetch-h2/context.ts | 70 ++- test/fetch-h2/{nghttp2.org.ts => httpbin.ts} | 56 +- test/fetch-h2/index.ts | 147 ++++-- test/lib/server-common.ts | 85 ++++ test/lib/server-helpers.ts | 49 ++ test/lib/server-http1.ts | 282 ++++++++++ test/lib/{server.ts => server-http2.ts} | 69 +-- test/lib/utils.ts | 3 + 27 files changed, 2647 insertions(+), 1008 deletions(-) create mode 100644 lib/context-http1.ts create mode 100644 lib/context-http2.ts create mode 100644 lib/context-https.ts create mode 100644 lib/fetch-common.ts create mode 100644 lib/fetch-http1.ts create mode 100644 lib/fetch-http2.ts delete mode 100644 lib/fetch.ts create mode 100644 lib/types.ts create mode 100644 lib/utils-http2.ts rename test/fetch-h2/{nghttp2.org.ts => httpbin.ts} (71%) create mode 100644 test/lib/server-common.ts create mode 100644 test/lib/server-helpers.ts create mode 100644 test/lib/server-http1.ts rename test/lib/{server.ts => server-http2.ts} (84%) diff --git a/README.md b/README.md index 6fda15c..4f618cc 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Regardless of whether you're actually interested in the Fetch API per se or not, By default, `fetch-h2` will accept `gzip` and `deflate` encodings, and decode transparently. If you also want to allow Brotli (`br`), use the [`fetch-h2-br`](https://www.npmjs.com/package/fetch-h2-br) package. -**NOTE;** HTTP/2 support was introduced in Node.js (version 8.4), and required `node` to be started with a flag `--expose-http2` up to version 8.7 (this module won't work without it). From Node.js 8.8, the `http2` module is available without any flag. The API has changed and not settled until 10.x, **and `fetch-h2` requires 10.x**. +**NOTE;** HTTP/2 support was introduced in Node.js (version 8.4), and required `node` to be started with a flag `--expose-http2` up to version 8.7 (this module won't work without it). From Node.js 8.8, the `http2` module is available without any flag. The API has changed and not settled until 10.x, **and `fetch-h2` requires 10.4+**. ## Releases diff --git a/certs/cert.pem b/certs/cert.pem index 050c514..5d31ca0 100644 --- a/certs/cert.pem +++ b/certs/cert.pem @@ -1,17 +1,20 @@ -----BEGIN CERTIFICATE----- -MIICpDCCAYwCCQCDxBRhc+faETANBgkqhkiG9w0BAQ0FADAUMRIwEAYDVQQDDAls -b2NhbGhvc3QwHhcNMTgwMTE2MTAwODExWhcNMTgwMjE1MTAwODExWjAUMRIwEAYD -VQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQD3 -2vxYIFETHAQ8NhXWMYg3OTzqT0Q4dbsdBU6UOblPz7T96KyLl4MKsvOGfgtlqBW6 -o3GEHDTUysLPwxG2oHpLKBT38QQtDYWMtPdPcHQcntiZD5AtZHDOEWeYCzQ4ef5e -vYishBHJV9xQjZTAg9faaK5cbfZJn7Csm3wqjXvh0CB0VQY6hCmrbZAff4paEkqL -ZnG/M7k8xUAM5Hi54rCyxO59J2zCVUs1R0pOmwHd5QFeBM5ih+pFAB84ECxfm4Yl -F2n++qrK7gxIsxlKZi0m87dXuAVTCOnuWysYlOsgHfuPbqfLjU6DZJP5Qt30z7rm -qrW74BXm0N5wl9FROOEpAgMBAAEwDQYJKoZIhvcNAQENBQADggEBAIq7lM7tGxhb -WEbEmiStplxce36P/L0jQk5x924aUYixY5S6G4zIJV+i/EOHE3lrFjufzsrAGByi -sGL/VWa64NahKTjnMjkhZ6iDb1hhIyv4+QE3cBd+srLnkcsvI5ToQLa7m1gF/nSv -qmEzNSfvPkNVFnqw+e15N2pG9r8OpVPOVaOSjRP4kE3H02aV7HQnQBiCYHATIeqy -7xy2NV2ZhtbVHIQKVX7m6Vx4Bi6D7kzvogD7IvkyC5OHrxWCLVp8MXLTS37bJaLE -ThTzV4Nh31Upz3A2K9RXPiPyD+KY4DKtcH/cn/c1d1AR0YtuoGFZ42gIKT/HU8G5 -6J89zv/AYgY= +MIIDUDCCAjgCCQDsXVZ67TzRPjANBgkqhkiG9w0BAQsFADBqMQswCQYDVQQGEwJz +ZTEMMAoGA1UECAwDZm9vMQwwCgYDVQQHDANiYXIxDzANBgNVBAoMBmZvb2JhcjES +MBAGA1UEAwwJZm9vYmFyLnNlMRowGAYJKoZIhvcNAQkBFgtmb29AYmFyLnRsZDAe +Fw0xOTAxMTIyMzIzNTBaFw0zOTAxMDcyMzIzNTBaMGoxCzAJBgNVBAYTAnNlMQww +CgYDVQQIDANmb28xDDAKBgNVBAcMA2JhcjEPMA0GA1UECgwGZm9vYmFyMRIwEAYD +VQQDDAlmb29iYXIuc2UxGjAYBgkqhkiG9w0BCQEWC2Zvb0BiYXIudGxkMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwd8VxBHL0Ovi0T1vbIhc52CIOdqP +lFnRtg/i8jNTrGCXjS2oERrHyvPHYwXSRis2zGbl+WQqZEHVDlVk/SY/z2DH1BTo +h+DIjd9fIlTXpaBTrU5QOKvJdIFjC7oSbxf1E8BtBrnuhwURHqPhEYKne8QdBGCT +HKRRprDa0GQQEJKVBDLmwMfVoLIh0k8ckjTOPx7126PfmsCTfae7psaplXLcJu9m +g/IcIPc8aRKvWLe8tM93p2rA0/1sO3Cj+ZCxWWaPoKmDa53TkFNLBaWMvO+sppXH +u57o5Wq2bF4fUpIvk6jNpqFvvGJhHiyMOpgzk1vtn+N/zraUTyeREkCHOQIDAQAB +MA0GCSqGSIb3DQEBCwUAA4IBAQCJQQA0YbqZEkQRWs0SWxF5NcZcXxyWrgagZ1Pb +LeuYpC3dczP2ugtUvzC5Gq1T6yOXyi2SI/wVu6AVOKx4WWtB61vGJUoVywcUR1ER +kshgQNcOMDPdVXEwZGCJZ162XhpWqGcYSbxZMPVvMmFB+qPkhmtimSSGOKUea29J +Zh6eyRIwgdrf7hfLqSB++Rr5kDGmT/jI7t/B9TySGfrO02+XDFoX19+ga5BV64pY +65fq9tkgpsbX1l6K+dGpTXSG+X/y4X4MJRjue3vOVcmMfXROO3G/MD99JSI+P+xU +jrgBhvpqcfC61nx62eNrXB/QpPUHdb2w+yXX0N2m5vnsX1nM -----END CERTIFICATE----- diff --git a/certs/key.pem b/certs/key.pem index 5501319..d67339a 100644 --- a/certs/key.pem +++ b/certs/key.pem @@ -1,28 +1,28 @@ -----BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQD32vxYIFETHAQ8 -NhXWMYg3OTzqT0Q4dbsdBU6UOblPz7T96KyLl4MKsvOGfgtlqBW6o3GEHDTUysLP -wxG2oHpLKBT38QQtDYWMtPdPcHQcntiZD5AtZHDOEWeYCzQ4ef5evYishBHJV9xQ -jZTAg9faaK5cbfZJn7Csm3wqjXvh0CB0VQY6hCmrbZAff4paEkqLZnG/M7k8xUAM -5Hi54rCyxO59J2zCVUs1R0pOmwHd5QFeBM5ih+pFAB84ECxfm4YlF2n++qrK7gxI -sxlKZi0m87dXuAVTCOnuWysYlOsgHfuPbqfLjU6DZJP5Qt30z7rmqrW74BXm0N5w -l9FROOEpAgMBAAECggEAb2tE5wTYDWQZz0ts85XeqxyS8q3heBQMolYhZeaxFFzF -+yJedn4MzYF2ke4Vh4RRCE6zF/VqFoJzotwJGXT4pNKG4pK5EtuyPneXeWGPANKz -gdMKOC2fvDL8w8+9kOneXI6NYygXqtBRXPDYftaF8Uv/ndNc1OnxjRZ0cdiaaP65 -LhVGuEOa9LryKzG/Ix0Oq91Yz5UbXE3CvtFK6WUA979EZaNfiOhPslH/JwPSu0VQ -mjjXwYRDkUXRP5ywwnkAaGa2v6jycVdEzb+DnK0KA/Z5ZlWLm+GctqzUD2EscAQc -hLxpfUfBZkc9vzp3C6KqMn/R9OyNsnYxZPNfOIF7FQKBgQD9Xj3lrHFjScbbzRlZ -DYw05Zui5tE6OqqI4Z3GIHcnSkLy4YCpkDnPpNPbZJFj3IgR2vaBHzQBRztsiCPV -GBcZsapJr3BU5MTXFSseEMLogM8XPkp/+3Vh9E+lBzJQgro5qc9rilzxMq73rsgE -jRx9pc0D5jg65b/UUzBobpsn8wKBgQD6bhWgJ6gXtACCwYm8/eZf0BMvbdyIA/9j -AbSarhEjIHhaiobIpngbZICX+v/lWoZtyExad8S5fXTfRODb40Byc6jqw1azsMaF -OyPvwN/wy7ZGXXDOVG3FMzv6eFO85th1+EdkR+vOuJYVv3+4vZSefKhxFT/WWuzX -KtcBwoUVcwKBgQCI/DAJAh/X56aNZiljPXDllJJ+E79hdSCImzr7SMhDROJHgOZY -RvMKsfodLxVwYWZsCO+nxiAO5N1bA4wkBT9QE/+WkTTxoTJPe1FxkuxeWm4dCf+r -jF/dkwKQngB1CQj4bjgH06oGejmhDi10UHrr7/2VMx6JsXfyqvuMKujWQwKBgQDF -SOYWdkdQ7Qgd+jP1RBwxzOzgR28dY/DUYWqTFKABiTnnMgw+lA44njNEB4OCfo86 -eznTZ1j+O9xPa6as81k5EO64i0yJYLD0EoQcA1koDIO66S/OC+syGEue5R4qyb0r -Kn2rfZFCGF58IZGPyyICvPfBpljVGGpOk7wv8bsACwKBgCNKich65/gd6ftgHLVJ -4CpOrBzDO0dATAeYNXgG7ro6sFQDeWh2W5X4KtF/qSrf3AOc+R7uM1O1hUQdbCMJ -Jkgm56CsAxzFsQlm3JPpYg5n2VEL8NOzVHNt6X2JHKrgeQZS3dirjA6Dwyw+McNX -gxaXs3kc/V0IE93UVrlH3+Cf +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDB3xXEEcvQ6+LR +PW9siFznYIg52o+UWdG2D+LyM1OsYJeNLagRGsfK88djBdJGKzbMZuX5ZCpkQdUO +VWT9Jj/PYMfUFOiH4MiN318iVNeloFOtTlA4q8l0gWMLuhJvF/UTwG0Gue6HBREe +o+ERgqd7xB0EYJMcpFGmsNrQZBAQkpUEMubAx9WgsiHSTxySNM4/HvXbo9+awJN9 +p7umxqmVctwm72aD8hwg9zxpEq9Yt7y0z3enasDT/Ww7cKP5kLFZZo+gqYNrndOQ +U0sFpYy876ymlce7nujlarZsXh9Ski+TqM2moW+8YmEeLIw6mDOTW+2f43/OtpRP +J5ESQIc5AgMBAAECggEAC5fahloGFR01+AszcYsJ+zATlVoTgeyJFNkIWjFljIZO +KbwUM8mlLua7ApnjhByrbzesAujRfCNPqUbD/jteT3lbGbySVyXC+HDmEHiAWMAo +oNFxDKKBLn1aPeZHmesV1bOJEYDm2Z4c8vcby19DwqvsjEl2Ip1U4KHsw89oAoWW +u98dsEv5XX30HobngVCU4EPy5mblCYTcWQxE55FHknK3oZ4q1xmAURhGHwN0VwYT +InwzLA79fvBlnppKjuBv8mc2nKj3zgjmDprFsmx2iJ4N5VRmjt2yegRrSyzG+I4T +pclPrB0qQ43SUsyS7gMI2z7z0oH2m996RKLlQK+3XQKBgQDvAZv5F3aHcM3acjfZ +FndMTsFLCXIXWUzjyvpaceHEOFSg00e9rR7c+nP8vK1CXfsIhukRgSvUDMkMpNXN +yptliRvWVQuy/0TZ8om8TiePCE86GZeRjSJTfYKo1z1mWj0pz+75p5kaSR4U669p +rkqFc+tcMoxX+Fisi5ku6Iy4+wKBgQDPp+5snzbv5VZsURCp9eNh4BypCNFWTkMH +kCWC7IEjkZ5jzhU9wfcEMLrTBdnOzT7RA/DfgJOcwylc2EKTN3NQuSg7ffFELM7P +tin0R+kO0JygDj9YhiIW1DfsKe2yJ59pdMDaXr214pI8WTtfZABpc0LnKnbDpJXP ++pzTuKFyWwKBgD8608KwXGE0jKEv+mpqMSF07Fono5FdxKO2/UiUPEAnDuyFOMOL +W1DmyWyhlcyrBFCbMGm7HJc60q2PpiiNY1MXVM/9K90s/1ARhDLXEkwazKr4Pkr5 +ZY1k9P4qA0pisS+wnO5bUnvLwDOUrpFs1LY9lpSLoulbAEqVm+73AtOlAoGAMfR+ +QRdUSgXr8obV8W071FHr0yZR5edR7MHapFJtBreDWRM8vOyqlhF7AEUKDtwFXpcK +HVp7KF0y2CkWawAN979zVEyJ/BKjdgimsyORh4TcCQ0kZBFwpflLsr6rdg5eJSp3 +MpFUJitpbqcwx1PxXWzjDWWDyLERcUUi8TQbcr0CgYAXjId/k5Sm2EfeDsnGlR6L +HcPwJZ5iI+DrNg8sZn5u3EjhjcVT+mSe0CzMMqvKmwZ0LhmiB0ee0XiF0+iTsedd +Sru2OQgPkHgqxj71gBPk1NKozOb3pEDPqHMhuMjP/WBH2OPjB/bc84ayImlHGWaF +1lcTncGab2YneX4hdU15Qg== -----END PRIVATE KEY----- diff --git a/index.ts b/index.ts index 6cf9699..69c942f 100644 --- a/index.ts +++ b/index.ts @@ -1,11 +1,13 @@ import { Body, DataBody, JsonBody, StreamBody } from "./lib/body"; -import { Context, ContextOptions, PushHandler } from "./lib/context"; +import { Context, ContextOptions } from "./lib/context"; +import { PushHandler } from "./lib/context-http2"; import { CookieJar } from "./lib/cookie-jar"; import { AbortError, DecodeFunction, Decoder, FetchInit, + HttpProtocols, OnTrailers, TimeoutError, } from "./lib/core"; @@ -53,6 +55,7 @@ export { onPush, // Re-export + HttpProtocols, Body, JsonBody, StreamBody, diff --git a/lib/context-http1.ts b/lib/context-http1.ts new file mode 100644 index 0000000..65c79ff --- /dev/null +++ b/lib/context-http1.ts @@ -0,0 +1,345 @@ +import { request as requestHttp } from "http"; +import { request as requestHttps, RequestOptions } from "https"; +import { createConnection, Socket } from "net"; +import { URL } from "url"; + +import { defer, Deferred } from "already"; + +import { + Http1Options, +} from "./core"; +import { + Request +} from "./request"; +import { parseInput } from "./utils"; + + +export interface FreeSocketInfo +{ + socket?: Socket; + shouldCreateNew: boolean; +} + +export interface ConnectOptions +{ + rejectUnauthorized: boolean | undefined; + createConnection: ( ) => Socket; +} + +class OriginPool +{ + private usedSockets = new Set< Socket >( ); + private unusedSockets = new Set< Socket >( ); + private waiting: Array< Deferred< Socket > > = [ ]; + + private keepAlive: boolean; + private keepAliveMsecs: number; + private maxSockets: number; + private maxFreeSockets: number; + private connOpts: { timeout?: number; }; + + constructor( + keepAlive: boolean, + keepAliveMsecs: number, + maxSockets: number, + maxFreeSockets: number, + timeout: number | void + ) + { + this.keepAlive = keepAlive; + this.keepAliveMsecs = keepAliveMsecs; + this.maxSockets = maxSockets; + this.maxFreeSockets = maxFreeSockets; + this.connOpts = timeout == null ? { } : { timeout }; + } + + public connect( options: RequestOptions ) + { + const request = + options.protocol === "https:" + ? requestHttps + : requestHttp; + + const opts = { ...options }; + if ( opts.rejectUnauthorized == null || options.protocol === "https" ) + delete opts.rejectUnauthorized; + + const req = request( { ...this.connOpts, ...opts } ); + + return req; + } + + public addUsed( socket: Socket ) + { + if ( this.keepAlive ) + socket.setKeepAlive( true, this.keepAliveMsecs ); + + socket.once( "close", ( ) => + { + this.usedSockets.delete( socket ); + this.unusedSockets.delete( socket ); + } ); + + this.usedSockets.add( socket ); + } + + public getFreeSocket( ): FreeSocketInfo + { + const socket = this.getFirstUnused( ); + + if ( socket ) + return { socket, shouldCreateNew: false }; + + const shouldCreateNew = this.maxSockets >= this.usedSockets.size; + + return { shouldCreateNew }; + } + + public waitForSocket( ): Promise< Socket > + { + const deferred = defer< Socket >( ); + + this.waiting.push( deferred ); + + // Trigger due to potential race-condition + this.pumpWaiting( ); + + return deferred.promise; + } + + public async disconnectAll( ) + { + await Promise.all( + [ ...this.usedSockets, ...this.unusedSockets ] + .map( socket => + socket.destroyed ? void 0 : this.disconnectSocket( socket ) + ) + ); + + const waiting = this.waiting; + this.waiting.length = 0; + waiting.forEach( waiter => + // TODO: Better error class + message + waiter.reject( new Error( "Disconnected" ) ) + ); + } + + private getFirstUnused( ) + { + for ( const socket of this.unusedSockets.values( ) ) + // We obviously have a socket + return this.moveToUsed( socket ); + + return null; + } + + private tryReuse( socket: Socket ): boolean + { + if ( this.waiting.length === 0 ) + return false; + + const waiting = < Deferred< Socket > >this.waiting.shift( ); + waiting.resolve( socket ); + return true; + } + + private pumpWaiting( ) + { + while ( this.waiting.length > 0 && this.unusedSockets.size > 0 ) + { + const socket = < Socket >this.getFirstUnused( ); + const waiting = < Deferred< Socket > >this.waiting.shift( ); + waiting.resolve( socket ); + } + } + + private async disconnectSocket( socket: Socket ) + { + await new Promise< void >( ( resolve ) => + socket.end( Buffer.from( [ ] ), ( ) => resolve ) + ); + } + + // @ts-ignore + private async moveToUnused( socket: Socket ) + { + if ( this.tryReuse( socket ) ) + return; + + this.usedSockets.delete( socket ); + + if ( this.maxFreeSockets >= this.unusedSockets.size + 1 ) + { + await this.disconnectSocket( socket ); + return; + } + + this.unusedSockets.add( socket ); + socket.unref( ); + } + + private moveToUsed( socket: Socket ) + { + this.unusedSockets.delete( socket ); + this.usedSockets.add( socket ); + socket.ref( ); + return socket; + } +} + +class ContextPool +{ + private options: Http1Options; + private pools = new Map< string, OriginPool >( ); + + constructor( options: Http1Options ) + { + this.options = options; + } + + public hasOrigin( origin: string ) + { + return this.pools.has( origin ); + } + + public getOriginPool( origin: string ): OriginPool + { + const pool = this.pools.get( origin ); + + if ( !pool ) + { + const runIfFunction = + < T extends number | boolean | void > + ( value: T | ( ( origin: string ) => T ) ) => + typeof value === "function" ? value( origin ) : value; + + const keepAlive = runIfFunction( this.options.keepAlive ); + const keepAliveMsecs = runIfFunction( this.options.keepAliveMsecs ); + const maxSockets = runIfFunction( this.options.maxSockets ); + const maxFreeSockets = runIfFunction( this.options.maxFreeSockets ); + const timeout = runIfFunction( this.options.timeout ); + + const newPool = new OriginPool( + keepAlive, + keepAliveMsecs, + maxSockets, + maxFreeSockets, + timeout + ); + this.pools.set( origin, newPool ); + return newPool; + } + + return pool; + } + + public async disconnect( origin: string ) + { + const pool = this.pools.get( origin ); + if ( pool ) + await pool.disconnectAll( ); + } + + public async disconnectAll( ) + { + const pools = [ ...this.pools.values( ) ]; + await Promise.all( pools.map( pool => pool.disconnectAll( ) ) ); + } +} + +export class H1Context +{ + private contextPool: ContextPool; + + constructor( options: Http1Options ) + { + this.contextPool = new ContextPool( options ); + } + + public getFreeSocketForOrigin( origin: string ): FreeSocketInfo + { + return this.contextPool.hasOrigin( origin ) + ? this.contextPool.getOriginPool( origin ).getFreeSocket( ) + : { shouldCreateNew: true }; + } + + public addUsedSocket( origin: string, socket: Socket ) + { + return this.contextPool.getOriginPool( origin ).addUsed( socket ); + } + + public waitForSocket( origin: string ): Promise< Socket > + { + return this.contextPool.getOriginPool( origin ).waitForSocket( ); + } + + public connect( url: URL, extraOptions: ConnectOptions, request: Request ) + { + const { + origin, + protocol, + hostname, + password, + pathname, + search, + username, + } = url; + + const path = pathname + search; + + const port = parseInt( parseInput( url.href ).port, 10 ); + + const method = request.method; + + const auth = + ( username || password ) + ? { auth: `${username}:${password}` } + : { }; + + const options: RequestOptions = { + ...extraOptions, + agent: false, + hostname, + method, + path, + port, + protocol, + ...auth, + }; + + return this.contextPool.getOriginPool( origin ).connect( options ); + } + + public async makeNewConnection( url: string ) + { + return new Promise< Socket >( ( resolve, reject ) => + { + const { hostname, port } = parseInput( url ); + + const socket = createConnection( + parseInt( port, 10 ), + hostname, + ( ) => + { + resolve( socket ); + } + ); + + socket.once( "error", reject ); + + return socket; + } ); + } + + public disconnect( url: string ) + { + const { origin } = new URL( url ); + + this.contextPool.disconnect( origin ); + } + + public disconnectAll( ) + { + this.contextPool.disconnectAll( ); + } +} diff --git a/lib/context-http2.ts b/lib/context-http2.ts new file mode 100644 index 0000000..82bca40 --- /dev/null +++ b/lib/context-http2.ts @@ -0,0 +1,337 @@ +import { + ClientHttp2Session, + ClientHttp2Stream, + connect as http2Connect, + constants as h2constants, + IncomingHttpHeaders as IncomingHttp2Headers, + SecureClientSessionOptions, +} from "http2"; +import { URL } from "url"; + +import { asyncGuard, syncGuard } from "callguard"; + +import { + AbortError, + BaseContext, + TimeoutError, +} from "./core"; + +import { Request } from "./request"; +import { Response, StreamResponse } from "./response"; +import { makeOkError } from "./utils"; +import { setGotGoaway } from "./utils-http2"; + + +const { + HTTP2_HEADER_PATH, +} = h2constants; + +interface H2SessionItem +{ + session: ClientHttp2Session; + promise: Promise< ClientHttp2Session >; +} + +export type PushHandler = + ( + origin: string, + request: Request, + getResponse: ( ) => Promise< Response > + ) => void; + +export class H2Context +{ + public _pushHandler?: PushHandler; + + private _h2sessions: Map< string, H2SessionItem > = new Map( ); + private _h2staleSessions: Map< string, Set< ClientHttp2Session > > = + new Map( ); + private _context: BaseContext; + + constructor( context: BaseContext ) + { + this._context = context; + } + + public hasOrigin( origin: string ) + { + return this._h2sessions.has( origin ); + } + + public getOrCreateHttp2( + origin: string, + extraOptions?: SecureClientSessionOptions + ) + : { didCreate: boolean; session: Promise< ClientHttp2Session > } + { + const willCreate = !this._h2sessions.has( origin ); + + if ( willCreate ) + { + const sessionItem = this.connectHttp2( origin, extraOptions ); + + const { promise } = sessionItem; + + // Handle session closure (delete from store) + promise + .then( session => + { + session.once( + "close", + ( ) => this.disconnect( origin, session ) + ); + + session.once( + "goaway", + ( + _errorCode: number, + _lastStreamID: number, + _opaqueData: Buffer + ) => + { + setGotGoaway( session ); + this.releaseSession( origin ); + } + ); + } ) + .catch( ( ) => + { + if ( sessionItem.session ) + this.disconnect( origin, sessionItem.session ); + } ); + + this._h2sessions.set( origin, sessionItem ); + } + + const session = + ( < H2SessionItem >this._h2sessions.get( origin ) ).promise; + + return { didCreate: willCreate, session }; + } + + public disconnectSession( session: ClientHttp2Session ): Promise< void > + { + return new Promise< void >( resolve => + { + if ( session.destroyed ) + return resolve( ); + + session.once( "close", ( ) => resolve( ) ); + session.destroy( ); + } ); + } + + public releaseSession( origin: string ): void + { + const sessionItem = this.deleteActiveSession( origin ); + + if ( !sessionItem ) + return; + + if ( !this._h2staleSessions.has( origin ) ) + this._h2staleSessions.set( origin, new Set( ) ); + + ( < Set< ClientHttp2Session > >this._h2staleSessions.get( origin ) ) + .add( sessionItem.session ); + } + + public deleteActiveSession( origin: string ): H2SessionItem | void + { + if ( !this._h2sessions.has( origin ) ) + return; + + const sessionItem = this._h2sessions.get( origin ); + this._h2sessions.delete( origin ); + + return sessionItem; + } + + public async disconnectStaleSessions( origin: string ): Promise< void > + { + const promises: Array< Promise< void > > = [ ]; + + if ( !this._h2staleSessions.has( origin ) ) + return; + + const sessionSet = + < Set< ClientHttp2Session > >this._h2staleSessions.get( origin ); + this._h2staleSessions.delete( origin ); + + for ( const session of sessionSet ) + promises.push( this.disconnectSession( session ) ); + + return Promise.all( promises ).then( ( ) => { } ); + } + + public disconnectAll( ): Promise< void > + { + const promises: Array< Promise< void > > = [ ]; + + for ( const eventualH2session of this._h2sessions.values( ) ) + { + promises.push( this.handleDisconnect( eventualH2session ) ); + } + this._h2sessions.clear( ); + + for ( const origin of this._h2staleSessions.keys( ) ) + { + promises.push( this.disconnectStaleSessions( origin ) ); + } + + return Promise.all( promises ).then( ( ) => { } ); + } + + public disconnect( url: string, session?: ClientHttp2Session ): Promise< void > + { + const { origin } = new URL( url ); + const promises: Array< Promise< void > > = [ ]; + + const sessionItem = this.deleteActiveSession( origin ); + + if ( sessionItem && ( !session || sessionItem.session === session ) ) + promises.push( this.handleDisconnect( sessionItem ) ); + + if ( !session ) + { + promises.push( this.disconnectStaleSessions( origin ) ); + } + else if ( this._h2staleSessions.has( origin ) ) + { + const sessionSet = + < Set< ClientHttp2Session > > + this._h2staleSessions.get( origin ); + if ( sessionSet.has( session ) ) + { + sessionSet.delete( session ); + promises.push( this.disconnectSession( session ) ); + } + } + + return Promise.all( promises ).then( ( ) => { } ); + } + + private handleDisconnect( sessionItem: H2SessionItem ): Promise< void > + { + const { promise, session } = sessionItem; + + if ( session ) + session.destroy( ); + + return promise + .then( _h2session => { } ) + .catch( err => + { + const debugMode = false; + if ( debugMode ) + // tslint:disable-next-line + console.warn( "Disconnect error", err ); + } ); + } + + private handlePush( + origin: string, + pushedStream: ClientHttp2Stream, + requestHeaders: IncomingHttp2Headers + ) + { + if ( !this._pushHandler ) + return; // Drop push. TODO: Signal through error log: #8 + + const path = requestHeaders[ HTTP2_HEADER_PATH ] as string; + + // Remove pseudo-headers + Object.keys( requestHeaders ) + .filter( name => name.charAt( 0 ) === ":" ) + .forEach( name => { delete requestHeaders[ name ]; } ); + + const pushedRequest = new Request( path, { headers: requestHeaders } ); + + const futureResponse = new Promise< Response >( ( resolve, reject ) => + { + const guard = syncGuard( reject, { catchAsync: true } ); + + pushedStream.once( "aborted", ( ) => + reject( new AbortError( "Response aborted" ) ) + ); + pushedStream.once( "frameError", ( ) => + reject( new Error( "Push request failed" ) ) + ); + pushedStream.once( "error", reject ); + + pushedStream.once( "push", guard( + ( responseHeaders: IncomingHttp2Headers ) => + { + const response = new StreamResponse( + this._context._decoders, + path, + pushedStream, + responseHeaders, + false, + { }, + 2 + ); + + resolve( response ); + } + ) ); + } ); + + futureResponse + .catch( _err => { } ); // TODO: #8 + + const getResponse = ( ) => futureResponse; + + return this._pushHandler( origin, pushedRequest, getResponse ); + } + + private connectHttp2( + origin: string, + extraOptions: SecureClientSessionOptions = { } + ) + : H2SessionItem + { + const makeConnectionTimeout = ( ) => + new TimeoutError( `Connection timeout to ${origin}` ); + + const makeError = ( event?: string ) => + event + ? new Error( `Unknown connection error (${event}): ${origin}` ) + : new Error( `Connection closed` ); + + let session: ClientHttp2Session = < ClientHttp2Session >< any >void 0; + + // TODO: #8 + // tslint:disable-next-line + const aGuard = asyncGuard( console.error.bind( console ) ); + + const pushHandler = aGuard( + ( stream: ClientHttp2Stream, headers: IncomingHttp2Headers ) => + this.handlePush( origin, stream, headers ) + ); + + const options = { + ...this._context._sessionOptions, + ...extraOptions, + }; + + const promise = new Promise< ClientHttp2Session >( + ( resolve, reject ) => + { + session = + http2Connect( origin, options, ( ) => resolve( session ) ); + + session.on( "stream", pushHandler ); + + session.once( "close", ( ) => + reject( makeOkError( makeError( ) ) ) ); + + session.once( "timeout", ( ) => + reject( makeConnectionTimeout( ) ) ); + + session.once( "error", reject ); + } + ); + + return { promise, session }; + } +} diff --git a/lib/context-https.ts b/lib/context-https.ts new file mode 100644 index 0000000..77d5061 --- /dev/null +++ b/lib/context-https.ts @@ -0,0 +1,66 @@ +import { SecureClientSessionOptions } from "http2"; +import { connect, ConnectionOptions, TLSSocket } from "tls"; + +import { FetchError, HttpProtocols } from "./core"; + +const alpnProtocols = +{ + http1: Buffer.from( "\x08http/1.1" ), + http2: Buffer.from( "\x02h2" ), +}; + +export interface HttpsSocketResult +{ + socket: TLSSocket; + protocol: "http1" | "http2"; +} + +export function connectTLS( + host: string, + port: string, + protocols: ReadonlyArray< HttpProtocols >, + connOpts: SecureClientSessionOptions +): Promise< HttpsSocketResult > +{ + const usedProtocol = new Set< string >( ); + const _protocols = protocols.filter( protocol => + { + if ( protocol !== "http1" && protocol !== "http2" ) + return false; + if ( usedProtocol.has( protocol ) ) + return false; + usedProtocol.add( protocol ); + return true; + } ); + + const orderedProtocols = Buffer.concat( + _protocols.map( protocol => alpnProtocols[ protocol ] ) + ); + + const opts: ConnectionOptions = { + ...connOpts, + ALPNProtocols: orderedProtocols, + servername: host, + }; + + return new Promise< HttpsSocketResult >( ( resolve, reject ) => + { + const socket: TLSSocket = connect( parseInt( port, 10 ), host, opts, ( ) => + { + const { authorized, authorizationError, alpnProtocol = "" } = + socket; + + if ( !authorized && opts.rejectUnauthorized !== false ) + return reject( authorizationError ); + + if ( ![ "h2", "http/1.1", "http/1.0" ].includes( alpnProtocol ) ) + return reject( new FetchError( "Invalid ALPN response" ) ); + + const protocol = alpnProtocol === "h2" ? "http2" : "http1"; + + resolve( { socket, protocol } ); + } ); + + socket.once( "error", reject ); + } ); +} diff --git a/lib/context.ts b/lib/context.ts index f5915a8..93fa017 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -1,32 +1,33 @@ +import { ClientRequest } from "http"; import { ClientHttp2Session, - ClientHttp2Stream, - connect as http2Connect, - constants as h2constants, - IncomingHttpHeaders as IncomingHttp2Headers, SecureClientSessionOptions, } from "http2"; - -import { asyncGuard, syncGuard } from "callguard"; +import { Socket } from "net"; import { URL } from "url"; +import { H1Context } from "./context-http1"; +import { H2Context, PushHandler } from "./context-http2"; +import { connectTLS } from "./context-https"; import { CookieJar } from "./cookie-jar"; import { - AbortError, + BaseContext, Decoder, + FetchError, FetchInit, + Http1Options, + HttpProtocols, SimpleSession, - TimeoutError, + SimpleSessionHttp1, + SimpleSessionHttp2, } from "./core"; -import { fetch } from "./fetch"; +import { fetch as fetchHttp1 } from "./fetch-http1"; +import { fetch as fetchHttp2 } from "./fetch-http2"; import { version } from "./generated/version"; import { Request } from "./request"; -import { H2StreamResponse, Response } from "./response"; -import { setGotGoaway } from "./utils"; +import { Response } from "./response"; +import { parseInput } from "./utils"; -const { - HTTP2_HEADER_PATH, -} = h2constants; function makeDefaultUserAgent( ): string { @@ -49,51 +50,45 @@ export interface ContextOptions cookieJar: CookieJar; decoders: ReadonlyArray< Decoder >; session: SecureClientSessionOptions; + httpProtocol: HttpProtocols; + httpsProtocols: ReadonlyArray< HttpProtocols >; + http1: Partial< Http1Options >; } -interface SessionItem -{ - session: ClientHttp2Session; - promise: Promise< ClientHttp2Session >; -} - -function makeOkError( err: Error ): Error +export class Context implements BaseContext { - ( < any >err ).metaData = ( < any >err ).metaData || { }; - ( < any >err ).metaData.ok = true; - return err; -} - -export type PushHandler = - ( - origin: string, - request: Request, - getResponse: ( ) => Promise< Response > - ) => void; + public _decoders: ReadonlyArray< Decoder >; + public _sessionOptions: SecureClientSessionOptions; -export class Context -{ - private _h2sessions: Map< string, SessionItem >; - private _h2staleSessions: Map< string, Set< ClientHttp2Session > >; + private h1Context: H1Context; + private h2Context = new H2Context( this ); private _userAgent: string; private _accept: string; private _cookieJar: CookieJar; - private _decoders: ReadonlyArray< Decoder >; - private _sessionOptions: SecureClientSessionOptions; - private _pushHandler?: PushHandler; + private _httpProtocol: HttpProtocols; + private _httpsProtocols: Array< HttpProtocols >; + private _http1Options: Http1Options; constructor( opts?: Partial< ContextOptions > ) { - this._h2sessions = new Map( ); - this._h2staleSessions = new Map( ); - this._userAgent = ""; this._accept = ""; this._cookieJar = < CookieJar >< any >void 0; this._decoders = [ ]; this._sessionOptions = { }; + this._httpProtocol = "http1"; + this._httpsProtocols = [ "http2", "http1" ]; + this._http1Options = { + keepAlive: false, + keepAliveMsecs: 1000, + maxFreeSockets: 256, + maxSockets: Infinity, + timeout: void 0, + }; this.setup( opts ); + + this.h1Context = new H1Context( this._http1Options ); } public setup( opts?: Partial< ContextOptions > ) @@ -126,303 +121,242 @@ export class Context this._sessionOptions = "session" in opts ? opts.session || { } : { }; - } - public onPush( pushHandler?: PushHandler ) - { - this._pushHandler = pushHandler; - } + this._httpProtocol = "httpProtocol" in opts + ? opts.httpProtocol || "http1" + : "http1"; - public fetch( input: string | Request, init?: Partial< FetchInit > ) - : Promise< Response > - { - const sessionGetter: SimpleSession = { - accept: ( ) => this._accept, - contentDecoders: ( ) => this._decoders, - cookieJar: this._cookieJar, - get: ( url: string ) => this.get( url ), - userAgent: ( ) => this._userAgent, - }; - return fetch( sessionGetter, input, init ); + this._httpsProtocols = "httpsProtocols" in opts + ? [ ...( opts.httpsProtocols || [ ] ) ] + : [ "http2", "http1" ]; + + Object.assign( this._http1Options, opts.http1 || { } ); } - public releaseSession( origin: string ): void + public onPush( pushHandler?: PushHandler ) { - const sessionItem = this.deleteActiveSession( origin ); - - if ( !sessionItem ) - return; - - if ( !this._h2staleSessions.has( origin ) ) - this._h2staleSessions.set( origin, new Set( ) ); - - ( < Set< ClientHttp2Session > >this._h2staleSessions.get( origin ) ) - .add( sessionItem.session ); + this.h2Context._pushHandler = pushHandler; } - public deleteActiveSession( origin: string ): SessionItem | void + public async fetch( input: string | Request, init?: Partial< FetchInit > ) + : Promise< Response > { - if ( !this._h2sessions.has( origin ) ) - return; + const { hostname, origin, port, protocol, url } = + this.parseInput( input ); + + // Rewrite url to get rid of "http1://" and "http2://" + const request = + input instanceof Request + ? input.url !== url + ? input.clone( url ) + : input + : new Request( input, { ...( init || { } ), url } ); + + const { rejectUnauthorized } = this._sessionOptions; + + const makeSimpleSession = ( protocol: HttpProtocols ): SimpleSession => + ( { + accept: ( ) => this._accept, + contentDecoders: ( ) => this._decoders, + cookieJar: this._cookieJar, + protocol, + userAgent: ( ) => this._userAgent, + } ); - const sessionItem = this._h2sessions.get( origin ); - this._h2sessions.delete( origin ); + const doFetchHttp1 = ( socket: Socket ) => + { + const sessionGetterHttp1: SimpleSessionHttp1 = { + get: ( url: string ) => + this.getHttp1( url, socket, request, rejectUnauthorized ), + ...makeSimpleSession( "http1" ), + }; + return fetchHttp1( sessionGetterHttp1, request, init ); + }; - return sessionItem; - } + const doFetchHttp2 = ( ) => + { + const sessionGetterHttp2: SimpleSessionHttp2 = { + get: ( url: string ) => this.getHttp2( url ), + ...makeSimpleSession( "http2" ), + }; + return fetchHttp2( sessionGetterHttp2, request, init ); + }; - public disconnectSession( session: ClientHttp2Session ): Promise< void > - { - return new Promise< void >( resolve => + const tryWaitForHttp1 = async ( ) => { - if ( session.destroyed ) - return resolve( ); + const { socket: freeHttp1Socket, shouldCreateNew } = + this.h1Context.getFreeSocketForOrigin( origin ); - session.once( "close", ( ) => resolve( ) ); - session.destroy( ); - } ); - } + if ( freeHttp1Socket ) + return doFetchHttp1( freeHttp1Socket ); - public disconnectStaleSessions( origin: string ): Promise< void > - { - const promises: Array< Promise< void > > = [ ]; + if ( !shouldCreateNew ) + { + // We've maxed out HTTP/1 connections, wait for one to be + // freed. + const socket = await this.h1Context.waitForSocket( origin ); + return doFetchHttp1( socket ); + } + }; - if ( this._h2staleSessions.has( origin ) ) + if ( protocol === "http1" ) { - const sessionSet = - < Set< ClientHttp2Session > > - this._h2staleSessions.get( origin ); - this._h2staleSessions.delete( origin ); - - for ( const session of sessionSet ) - promises.push( this.disconnectSession( session ) ); + // Plain text HTTP/1(.1) + const resp = await tryWaitForHttp1( ); + if ( resp ) + return resp; + + const socket = await this.h1Context.makeNewConnection( url ); + this.h1Context.addUsedSocket( origin, socket ); + return doFetchHttp1( socket ); } - - return Promise.all( promises ).then( ( ) => { } ); - } - - public disconnect( url: string, session?: ClientHttp2Session ): Promise< void > - { - const { origin } = new URL( url ); - const promises: Array< Promise< void > > = [ ]; - - const sessionItem = this.deleteActiveSession( origin ); - - if ( sessionItem && ( !session || sessionItem.session === session ) ) - promises.push( this.handleDisconnect( sessionItem ) ); - - if ( !session ) + else if ( protocol === "http2" ) { - promises.push( this.disconnectStaleSessions( origin ) ); + // Plain text HTTP/2 + return doFetchHttp2( ); } - else if ( this._h2staleSessions.has( origin ) ) + else // protocol === "https" { - const sessionSet = - < Set< ClientHttp2Session > > - this._h2staleSessions.get( origin ); - if ( sessionSet.has( session ) ) + // If we already have a session/socket open to this origin, + // re-use it + + if ( this.h2Context.hasOrigin( origin ) ) + return doFetchHttp2( ); + + const resp = await tryWaitForHttp1( ); + if ( resp ) + return resp; + + // TODO: Make queue for subsequent fetch requests to the same + // origin, so they can re-use the http2 session, or http1 + // pool once we know what protocol will be used. + // This must apply to plain-text http1 too. + + // Use ALPN to figure out protocol lazily + const { protocol, socket } = await connectTLS( + hostname, + port, + this._httpsProtocols, + this._sessionOptions + ); + + if ( protocol === "http2" ) { - sessionSet.delete( session ); - promises.push( this.disconnectSession( session ) ); + // Convert socket into http2 session + await this.h2Context.getOrCreateHttp2( + origin, + { + createConnection: ( ) => socket, + } + ); + // Session now lingering, it will be re-used by the next get() + return doFetchHttp2( ); + } + else // protocol === "http1" + { + this.h1Context.addUsedSocket( origin, socket ); + return doFetchHttp1( socket ); } } - - return Promise.all( promises ).then( ( ) => { } ); } - public disconnectAll( ): Promise< void > + public async disconnect( url: string ) { - const promises: Array< Promise< void > > = [ ]; - - for ( const eventualH2session of this._h2sessions.values( ) ) - { - promises.push( this.handleDisconnect( eventualH2session ) ); - } - this._h2sessions.clear( ); - - for ( const origin of this._h2staleSessions.keys( ) ) - { - promises.push( this.disconnectStaleSessions( origin ) ); - } - - return Promise.all( promises ).then( ( ) => { } ); + await Promise.all( [ + this.h1Context.disconnect( url ), + this.h2Context.disconnect( url ), + ] ); } - private handlePush( - origin: string, - pushedStream: ClientHttp2Stream, - requestHeaders: IncomingHttp2Headers - ) + public async disconnectAll( ) { - if ( !this._pushHandler ) - return; // Drop push. TODO: Signal through error log: #8 - - const path = requestHeaders[ HTTP2_HEADER_PATH ] as string; - - // Remove pseudo-headers - Object.keys( requestHeaders ) - .filter( name => name.charAt( 0 ) === ":" ) - .forEach( name => { delete requestHeaders[ name ]; } ); - - const pushedRequest = new Request( path, { headers: requestHeaders } ); - - const futureResponse = new Promise< Response >( ( resolve, reject ) => - { - const guard = syncGuard( reject, { catchAsync: true } ); - - pushedStream.once( "aborted", ( ) => - reject( new AbortError( "Response aborted" ) ) - ); - pushedStream.once( "frameError", ( ) => - reject( new Error( "Push request failed" ) ) - ); - pushedStream.once( "error", reject ); - - pushedStream.once( "push", guard( - ( responseHeaders: IncomingHttp2Headers ) => - { - const response = new H2StreamResponse( - this._decoders, - path, - pushedStream, - responseHeaders, - false - ); - - resolve( response ); - } - ) ); - } ); - - futureResponse - .catch( _err => { } ); // TODO: #8 - - const getResponse = ( ) => futureResponse; - - return this._pushHandler( origin, pushedRequest, getResponse ); + await Promise.all([ + this.h1Context.disconnectAll( ), + this.h2Context.disconnectAll( ), + ]); } - private connect( origin: string ) - : SessionItem + private getHttp1( + url: string, + socket: Socket, + request: Request, + rejectUnauthorized?: boolean + ) + : ClientRequest { - const makeConnectionTimeout = ( ) => - new TimeoutError( `Connection timeout to ${origin}` ); - - const makeError = ( event?: string ) => - event - ? new Error( `Unknown connection error (${event}): ${origin}` ) - : new Error( `Connection closed` ); - - let session: ClientHttp2Session = < ClientHttp2Session >< any >void 0; - - // TODO: #8 - // tslint:disable-next-line - const aGuard = asyncGuard( console.error.bind( console ) ); - - const pushHandler = aGuard( - ( stream: ClientHttp2Stream, headers: IncomingHttp2Headers ) => - this.handlePush( origin, stream, headers ) - ); - - const options = this._sessionOptions; - - const promise = new Promise< ClientHttp2Session >( - ( resolve, reject ) => + return this.h1Context.connect( + new URL( url ), { - session = - http2Connect( origin, options, ( ) => resolve( session ) ); - - session.on( "stream", pushHandler ); - - session.once( "close", ( ) => - reject( makeOkError( makeError( ) ) ) ); - - session.once( "timeout", ( ) => - reject( makeConnectionTimeout( ) ) ); - - session.once( "error", reject ); - } + createConnection: ( ) => socket, + rejectUnauthorized, + }, + request ); - - return { promise, session }; } - private getOrCreate( origin: string, created = false ) + private getOrCreateHttp2( origin: string, created = false ) : Promise< ClientHttp2Session > { - const willCreate = !this._h2sessions.has( origin ); - - if ( willCreate ) - { - const sessionItem = this.connect( origin ); - - const { promise } = sessionItem; - - // Handle session closure (delete from store) - promise - .then( session => - { - session.once( - "close", - ( ) => this.disconnect( origin, session ) - ); + const { didCreate, session } = + this.h2Context.getOrCreateHttp2( origin ); - session.once( - "goaway", - ( - _errorCode: number, - _lastStreamID: number, - _opaqueData: Buffer - ) => - { - setGotGoaway( session ); - this.releaseSession( origin ); - } - ); - } ) - .catch( ( ) => - { - if ( sessionItem.session ) - this.disconnect( origin, sessionItem.session ); - } ); - - this._h2sessions.set( origin, sessionItem ); - } - - return ( < SessionItem >this._h2sessions.get( origin ) ).promise + return session .catch( err => { - if ( willCreate || created ) + if ( didCreate || created ) // Created in this request, forward error throw err; // Not created in this request, try again - return this.getOrCreate( origin, true ); + return this.getOrCreateHttp2( origin, true ); } ); } - private get( url: string ) + private getHttp2( url: string ) : Promise< ClientHttp2Session > { - const { origin } = new URL( url ); + const { origin } = typeof url === "string" ? new URL( url ) : url; - return this.getOrCreate( origin ); + return this.getOrCreateHttp2( origin ); } - private handleDisconnect( sessionItem: SessionItem ): Promise< void > + private parseInput( input: string | Request ) { - const { promise, session } = sessionItem; - - if ( session ) - session.destroy( ); - - return promise - .then( _h2session => { } ) - .catch( err => - { - const debugMode = false; - if ( debugMode ) - // tslint:disable-next-line - console.warn( "Disconnect error", err ); - } ); + const { hostname, origin, port, protocol, url } = + parseInput( typeof input !== "string" ? input.url : input ); + + const defaultHttp = this._httpProtocol; + + if ( + ( protocol === "http" && defaultHttp === "http1" ) + || protocol === "http1" + ) + return { + hostname, + origin, + port, + protocol: "http1", + url, + }; + else if ( + ( protocol === "http" && defaultHttp === "http2" ) + || protocol === "http2" + ) + return { + hostname, + origin, + port, + protocol: "http2", + url, + }; + else if ( protocol === "https" ) + return { + hostname, + origin, + port, + protocol: "https", + url, + }; + else + throw new FetchError( `Invalid protocol "${protocol}"` ); } } diff --git a/lib/core.ts b/lib/core.ts index e8cb057..a220320 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -1,14 +1,10 @@ -import { - ClientHttp2Session, - SecureClientSessionOptions, - SessionOptions, -} from "http2"; - -import { URL } from "url"; +import { ClientRequest } from "http"; +import { ClientHttp2Session, SecureClientSessionOptions } from "http2"; import { CookieJar } from "./cookie-jar"; import { Headers, RawHeaders } from "./headers"; + export type Method = "ACL" | "BIND" | @@ -93,6 +89,8 @@ export type ResponseTypes = "cors" | "error"; +export type HttpProtocols = "http1" | "http2"; + export interface IBody { readonly bodyUsed: boolean; @@ -128,6 +126,11 @@ export interface RequestInit extends RequestInitWithoutBody json: any; } +export interface RequestInitWithUrl extends RequestInit +{ + url: string; +} + export type OnTrailers = ( headers: Headers ) => void; export interface FetchInit extends RequestInit @@ -150,6 +153,15 @@ export interface ResponseInit headers: RawHeaders | Headers; } +export class FetchError extends Error +{ + constructor( message: string ) + { + super( message ); + Object.setPrototypeOf( this, FetchError.prototype ); + } +} + export class AbortError extends Error { constructor( message: string ) @@ -177,17 +189,41 @@ export interface Decoder decode: DecodeFunction; } +export type PerOriginOption< T > = ( origin: string ) => T; + +export interface Http1Options +{ + keepAlive: boolean | PerOriginOption< boolean >; + keepAliveMsecs: number | PerOriginOption< number >; + maxSockets: number | PerOriginOption< number >; + maxFreeSockets: number | PerOriginOption< number >; + timeout: void | number | PerOriginOption< void | number >; +} + +export interface BaseContext +{ + _decoders: ReadonlyArray< Decoder >; + _sessionOptions: SecureClientSessionOptions; +} + export interface SimpleSession { - cookieJar: CookieJar; + protocol: HttpProtocols; - get( - url: string | URL, - options?: SessionOptions | SecureClientSessionOptions - ): Promise< ClientHttp2Session >; + cookieJar: CookieJar; userAgent( ): string; accept( ): string; contentDecoders( ): ReadonlyArray< Decoder >; } + +export interface SimpleSessionHttp1 extends SimpleSession +{ + get( url: string ): ClientRequest; +} + +export interface SimpleSessionHttp2 extends SimpleSession +{ + get( url: string ): Promise< ClientHttp2Session >; +} diff --git a/lib/fetch-common.ts b/lib/fetch-common.ts new file mode 100644 index 0000000..6eb4b77 --- /dev/null +++ b/lib/fetch-common.ts @@ -0,0 +1,304 @@ +import { constants as h2constants } from "http2"; +import { URL } from "url"; + +import { Finally } from "already"; + +import { BodyInspector } from "./body"; +import { + AbortError, + FetchInit, + SimpleSession, + TimeoutError, +} from "./core"; +import { Headers, RawHeaders } from "./headers"; +import { Request } from "./request"; +import { Response } from "./response"; +import { arrayify } from "./utils"; + +const { + // Required for a request + HTTP2_HEADER_METHOD, + HTTP2_HEADER_SCHEME, + HTTP2_HEADER_PATH, + + // Methods + HTTP2_METHOD_GET, + HTTP2_METHOD_HEAD, + + // Requests + HTTP2_HEADER_USER_AGENT, + HTTP2_HEADER_ACCEPT, + HTTP2_HEADER_COOKIE, + HTTP2_HEADER_CONTENT_TYPE, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_ACCEPT_ENCODING, +} = h2constants; + + +function ensureNotCircularRedirection( redirections: ReadonlyArray< string > ) +: void +{ + const urls = [ ...redirections ]; + const last = urls.pop( ); + + for ( let i = 0; i < urls.length; ++i ) + if ( urls[ i ] === last ) + { + const err = new Error( "Redirection loop detected" ); + ( < any >err ).urls = urls.slice( i ); + throw err; + } +} + +export interface FetchExtra +{ + redirected: Array< string >; + timeoutAt?: number; +} + +export interface TimeoutInfo +{ + promise: Promise< Response >; + clear: ( ) => void; +} + +export async function setupFetch( + session: SimpleSession, + request: Request, + init: Partial< FetchInit > = { }, + extra: FetchExtra +) +{ + const { redirected } = extra; + + ensureNotCircularRedirection( redirected ); + + const { url, method, redirect, integrity } = request; + + const { signal, onTrailers } = init; + + const { + origin, + protocol, + pathname, search, hash, + } = new URL( url ); + const path = pathname + search + hash; + + const endStream = + method === HTTP2_METHOD_GET || method === HTTP2_METHOD_HEAD; + + const headers = new Headers( request.headers ); + + const cookies = ( await session.cookieJar.getCookies( url ) ) + .map( cookie => cookie.cookieString( ) ); + + const contentDecoders = session.contentDecoders( ); + + const acceptEncoding = + contentDecoders.length === 0 + ? "gzip;q=1.0, deflate;q=0.5" + : contentDecoders + .map( decoder => `${decoder.name};q=1.0` ) + .join( ", " ) + ", gzip;q=0.8, deflate;q=0.5"; + + if ( headers.has( HTTP2_HEADER_COOKIE ) ) + cookies.push( ...arrayify( headers.get( HTTP2_HEADER_COOKIE ) ) ); + + const headersToSend: RawHeaders = { + // Set required headers + ...( session.protocol === "http1" ? { } : { + [ HTTP2_HEADER_METHOD ]: method, + [ HTTP2_HEADER_SCHEME ]: protocol.replace( /:.*/, "" ), + [ HTTP2_HEADER_PATH ]: path, + } ), + + // Set default headers + [ HTTP2_HEADER_ACCEPT ]: session.accept( ), + [ HTTP2_HEADER_USER_AGENT ]: session.userAgent( ), + [ HTTP2_HEADER_ACCEPT_ENCODING ]: acceptEncoding, + }; + + if ( cookies.length > 0 ) + headersToSend[ HTTP2_HEADER_COOKIE ] = cookies.join( "; " ); + + for ( const [ key, val ] of headers.entries( ) ) + { + if ( key === "host" && session.protocol === "http2" ) + // Convert to :authority like curl does: + // https://github.com/grantila/fetch-h2/issues/9 + headersToSend[ ":authority" ] = val; + else if ( key !== HTTP2_HEADER_COOKIE ) + headersToSend[ key ] = val; + } + + const inspector = new BodyInspector( request ); + + if ( + !endStream && + inspector.length != null && + !request.headers.has( HTTP2_HEADER_CONTENT_LENGTH ) + ) + headersToSend[ HTTP2_HEADER_CONTENT_LENGTH ] = "" + inspector.length; + + if ( + !endStream && + !request.headers.has( "content-type" ) && + inspector.mime + ) + headersToSend[ HTTP2_HEADER_CONTENT_TYPE ] = inspector.mime; + + function timeoutError( ) + { + return new TimeoutError( + `${method} ${url} timed out after ${init.timeout} ms` ); + } + + const timeoutAt = extra.timeoutAt || ( + ( "timeout" in init && typeof init.timeout === "number" ) + // Setting the timeoutAt here at first time allows async cookie + // jar to not take part of timeout for at least the first request + // (in a potential redirect chain) + ? Date.now( ) + init.timeout + : void 0 + ); + + function setupTimeout( ): TimeoutInfo | null + { + if ( !timeoutAt ) + return null; + + const now = Date.now( ); + if ( now >= timeoutAt ) + throw timeoutError( ); + + let timerId: NodeJS.Timeout | null; + + return { + clear: ( ) => + { + if ( timerId ) + clearTimeout( timerId ); + }, + promise: new Promise( ( _resolve, reject ) => + { + timerId = setTimeout( ( ) => + { + timerId = null; + reject( timeoutError( ) ); + }, + timeoutAt - now + ); + } ), + }; + + } + + const timeoutInfo = setupTimeout( ); + + function abortError( ) + { + return new AbortError( `${method} ${url} aborted` ); + } + + if ( signal && signal.aborted ) + throw abortError( ); + + const signalPromise: Promise< Response > | null = + signal + ? + new Promise< Response >( ( _resolve, reject ) => + { + signal.onabort = ( ) => + { + reject( abortError( ) ); + }; + } ) + : null; + + function cleanup( ) + { + if ( timeoutInfo && timeoutInfo.clear ) + timeoutInfo.clear( ); + + if ( signal ) + delete signal.onabort; + } + + return { + cleanup, + contentDecoders, + endStream, + headersToSend, + integrity, + method, + onTrailers, + origin, + redirect, + redirected, + request, + signal, + signalPromise, + timeoutAt, + timeoutInfo, + url, + }; +} + +export function handleSignalAndTimeout( + signalPromise: Promise< Response > | null, + timeoutInfo: TimeoutInfo | null, + cleanup: ( ) => void, + fetcher: ( ) => Promise< Response > +) +{ + return Promise.race( + [ + < Promise< any > >signalPromise, + < Promise< any > >( timeoutInfo && timeoutInfo.promise ), + fetcher( ), + ] + .filter( promise => promise ) + ) + .then( ...Finally( cleanup ) ); +} + +export function make100Error( ) +{ + return new Error( + "Request failed with 100 continue. " + + "This can't happen unless a server failure" + ); +} + +export function makeAbortedError( ) +{ + return new AbortError( "Request aborted" ); +} + +export function makeTimeoutError( ) +{ + return new TimeoutError( "Request timed out" ); +} + +export function makeIllegalRedirectError( ) +{ + return new Error( + "Server responded illegally with a " + + "redirect code but missing 'location' header" + ); +} + +export function makeRedirectionError( location: string | null ) +{ + return new Error( `URL got redirected to ${location}` ); +} + +export function makeRedirectionMethodError( + location: string | null, method: string +) +{ + return new Error( + `URL got redirected to ${location}, which ` + + `'fetch-h2' doesn't support for ${method}` + ); +} diff --git a/lib/fetch-http1.ts b/lib/fetch-http1.ts new file mode 100644 index 0000000..82f97d0 --- /dev/null +++ b/lib/fetch-http1.ts @@ -0,0 +1,252 @@ +import { IncomingMessage } from "http"; +import { constants as h2constants } from "http2"; +import { Socket } from "net"; + +import { syncGuard } from "callguard"; + +import { + FetchInit, + SimpleSessionHttp1, +} from "./core"; +import { + FetchExtra, + handleSignalAndTimeout, + make100Error, + makeAbortedError, + makeIllegalRedirectError, + makeRedirectionError, + makeRedirectionMethodError, + makeTimeoutError, + setupFetch, +} from "./fetch-common"; +import { GuardedHeaders } from "./headers"; +import { Request } from "./request"; +import { Response, StreamResponse } from "./response"; +import { arrayify, isRedirectStatus, parseLocation } from "./utils"; + +const { + // Responses, these are the same in HTTP/1.1 and HTTP/2 + HTTP2_HEADER_LOCATION: HTTP1_HEADER_LOCATION, + HTTP2_HEADER_SET_COOKIE: HTTP1_HEADER_SET_COOKIE, +} = h2constants; + + +export async function fetchImpl( + session: SimpleSessionHttp1, + input: Request, + init: Partial< FetchInit > = { }, + extra: FetchExtra +) +: Promise< Response > +{ + const { + cleanup, + contentDecoders, + endStream, + headersToSend, + integrity, + method, + onTrailers, + redirect, + redirected, + request, + signal, + signalPromise, + timeoutAt, + timeoutInfo, + url, + } = await setupFetch( session, input, init, extra ); + + const doFetch = async ( ): Promise< Response > => + { + const req = session.get( url ); + + for ( const [ key, value ] of Object.entries( headersToSend ) ) + { + if ( value != null ) + req.setHeader( key, value ); + } + + const response = new Promise< Response >( ( resolve, reject ) => + { + const guard = syncGuard( reject, { catchAsync: true } ); + + req.once( "error", reject ); + + req.once( "aborted", guard( ( ) => + { + reject( makeAbortedError( ) ); + } ) ); + + req.once( "continue", guard( ( ) => + { + reject( make100Error( ) ); + } ) ); + + req.once( "information", guard( ( res: any ) => + { + resolve( new Response( + null, // No body + { status: res.statusCode } + ) ); + } ) ); + + req.once( "timeout", guard( ( ) => + { + reject( makeTimeoutError( ) ); + req.abort( ); + } ) ); + + req.once( "upgrade", guard( + ( + _res: IncomingMessage, + _socket: Socket, + _upgradeHead: Buffer + ) => + { + reject( new Error( "Upgrade not implemented!" ) ); + req.abort( ); + } ) + ); + + req.once( "response", guard( ( res: IncomingMessage ) => + { + if ( signal && signal.aborted ) + { + // No reason to continue, the request is aborted + req.abort( ); + return; + } + + const { headers, statusCode } = res; + + res.once( "end", guard( ( ) => + { + if ( !onTrailers ) + return; + + try + { + const { trailers } = res; + const headers = new GuardedHeaders( "response" ); + + Object.keys( trailers ).forEach( key => + { + if ( trailers[ key ] != null ) + headers.set( key, "" + trailers[ key ] ); + } ); + + onTrailers( headers ); + } + catch ( err ) + { + // TODO: Implement #8 + // tslint:disable-next-line + console.warn( "Trailer handling failed", err ); + } + } ) ); + + const location = parseLocation( + headers[ HTTP1_HEADER_LOCATION ], + url + ); + + const isRedirected = isRedirectStatus[ "" + statusCode ]; + + if ( headers[ HTTP1_HEADER_SET_COOKIE ] ) + { + const setCookies = + arrayify( headers[ HTTP1_HEADER_SET_COOKIE ] ); + + session.cookieJar.setCookies( setCookies, url ); + } + + delete headers[ "set-cookie" ]; + delete headers[ "set-cookie2" ]; + + if ( isRedirected && !location ) + return reject( makeIllegalRedirectError( ) ); + + if ( !isRedirected || redirect === "manual" ) + return resolve( + new StreamResponse( + contentDecoders, + url, + res, + headers, + redirect === "manual" + ? false + : extra.redirected.length > 0, + { + status: res.statusCode, + statusText: res.statusMessage, + }, + 1, + integrity + ) + ); + + if ( redirect === "error" ) + return reject( makeRedirectionError( location ) ); + + // redirect is 'follow' + + // We don't support re-sending a non-GET/HEAD request (as + // we don't want to [can't, if its' streamed] re-send the + // body). The concept is fundementally broken anyway... + if ( !endStream ) + return reject( + makeRedirectionMethodError( location, method ) + ); + + if ( !location ) + return reject( makeIllegalRedirectError( ) ); + + res.destroy( ); + resolve( + fetchImpl( + session, + request.clone( location ), + { signal, onTrailers }, + { + redirected: redirected.concat( url ), + timeoutAt, + } + ) + ); + } ) ); + } ); + + if ( endStream ) + req.end( ); + else + await request.readable( ) + .then( readable => + { + readable.pipe( req ); + } ); + + return response; + }; + + return handleSignalAndTimeout( + signalPromise, + timeoutInfo, + cleanup, + doFetch + ); +} + +export function fetch( + session: SimpleSessionHttp1, + input: Request, + init?: Partial< FetchInit > +) +: Promise< Response > +{ + const timeoutAt = void 0; + + const extra = { timeoutAt, redirected: [ ] }; + + return fetchImpl( session, input, init, extra ); +} diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts new file mode 100644 index 0000000..60f7478 --- /dev/null +++ b/lib/fetch-http2.ts @@ -0,0 +1,317 @@ +import { + constants as h2constants, + IncomingHttpHeaders as IncomingHttp2Headers, +} from "http2"; + +import { syncGuard } from "callguard"; + +import { + AbortError, + FetchInit, + SimpleSessionHttp2, +} from "./core"; +import { + FetchExtra, + handleSignalAndTimeout, + make100Error, + makeAbortedError, + makeIllegalRedirectError, + makeRedirectionError, + makeRedirectionMethodError, + makeTimeoutError, + setupFetch, +} from "./fetch-common"; +import { GuardedHeaders } from "./headers"; +import { Request } from "./request"; +import { Response, StreamResponse } from "./response"; +import { arrayify, isRedirectStatus, parseLocation } from "./utils"; +import { hasGotGoaway } from "./utils-http2"; + +const { + // Responses + HTTP2_HEADER_STATUS, + HTTP2_HEADER_LOCATION, + HTTP2_HEADER_SET_COOKIE, + + // Error codes + NGHTTP2_NO_ERROR, +} = h2constants; + +// This is from nghttp2.h, but undocumented in Node.js +const NGHTTP2_ERR_START_STREAM_NOT_ALLOWED = -516; + +interface FetchExtraHttp2 extends FetchExtra +{ + raceConditionedGoaway: Set< string >; // per origin +} + +async function fetchImpl( + session: SimpleSessionHttp2, + input: Request, + init: Partial< FetchInit > = { }, + extra: FetchExtraHttp2 +) +: Promise< Response > +{ + const { + cleanup, + contentDecoders, + endStream, + headersToSend, + integrity, + method, + onTrailers, + origin, + redirect, + redirected, + request, + signal, + signalPromise, + timeoutAt, + timeoutInfo, + url, + } = await setupFetch( session, input, init, extra ); + + const { raceConditionedGoaway } = extra; + + function doFetch( ): Promise< Response > + { + return session.get( url ) + .then( async h2session => + { + const stream = h2session.request( headersToSend, { endStream } ); + + const response = new Promise< Response >( ( resolve, reject ) => + { + const guard = syncGuard( reject, { catchAsync: true } ); + + stream.on( "aborted", guard( ( ..._whatever ) => + { + reject( makeAbortedError( ) ); + } ) ); + + stream.on( "error", guard( ( err: Error ) => + { + reject( err ); + } ) ); + + stream.on( "frameError", guard( + ( _type: number, code: number, _streamId: number ) => + { + if ( + code === NGHTTP2_ERR_START_STREAM_NOT_ALLOWED && + endStream + ) + { + // This could be due to a race-condition in GOAWAY. + // As of current Node.js, the 'goaway' event is + // emitted on the session before this event + // is emitted, so we will know if we got it. + if ( + !raceConditionedGoaway.has( origin ) && + hasGotGoaway( h2session ) + ) + { + // Don't retry again due to potential GOAWAY + raceConditionedGoaway.add( origin ); + + // Since we've got the 'goaway' event, the + // context has already released the session, + // so a retry will create a new session. + resolve( + fetchImpl( + session, + request, + { signal, onTrailers }, + { + raceConditionedGoaway, + redirected, + timeoutAt, + } + ) + ); + + return; + } + } + + reject( new Error( "Request failed" ) ); + } ) + ); + + stream.on( "close", guard( ( ) => + { + // We'll get an 'error' event if there actually is an + // error, but not if we got NGHTTP2_NO_ERROR. + // In case of an error, the 'error' event will be awaited + // instead, to get (and propagate) the error object. + if ( stream.rstCode === NGHTTP2_NO_ERROR ) + reject( + new AbortError( "Stream prematurely closed" ) ); + } ) ); + + stream.on( "timeout", guard( ( ..._whatever ) => + { + reject( makeTimeoutError( ) ); + } ) ); + + stream.on( "trailers", guard( + ( _headers: IncomingHttp2Headers, _flags: any ) => + { + if ( !onTrailers ) + return; + try + { + const headers = new GuardedHeaders( "response" ); + + Object.keys( _headers ).forEach( key => + { + if ( Array.isArray( _headers[ key ] ) ) + ( < Array< string > >_headers[ key ] ) + .forEach( value => + headers.append( key, value ) ); + else + headers.set( key, "" + _headers[ key ] ); + } ); + + onTrailers( headers ); + } + catch ( err ) + { + // TODO: Implement #8 + // tslint:disable-next-line + console.warn( "Trailer handling failed", err ); + } + } ) ); + + // ClientHttp2Stream events + + stream.on( "continue", guard( ( ..._whatever ) => + { + reject( make100Error( ) ); + } ) ); + + stream.on( "headers", guard( + ( headers: IncomingHttp2Headers, _flags: any ) => + { + const code = headers[ HTTP2_HEADER_STATUS ]; + reject( new Error( + `Request failed with a ${code} status. ` + + "Any 1xx error is unexpected to fetch() and " + + "shouldn't happen." ) ); + } + ) ); + + stream.on( "response", guard( + ( headers: IncomingHttp2Headers ) => + { + if ( signal && signal.aborted ) + { + // No reason to continue, the request is aborted + stream.destroy( ); + return; + } + + const status = "" + headers[ HTTP2_HEADER_STATUS ]; + const location = parseLocation( + headers[ HTTP2_HEADER_LOCATION ], + url + ); + + const isRedirected = isRedirectStatus[ status ]; + + if ( headers[ HTTP2_HEADER_SET_COOKIE ] ) + { + const setCookies = + arrayify( headers[ HTTP2_HEADER_SET_COOKIE ] ); + + session.cookieJar.setCookies( setCookies, url ); + } + + delete headers[ "set-cookie" ]; + delete headers[ "set-cookie2" ]; + + if ( isRedirected && !location ) + return reject( makeIllegalRedirectError( ) ); + + if ( !isRedirected || redirect === "manual" ) + return resolve( + new StreamResponse( + contentDecoders, + url, + stream, + headers, + redirect === "manual" + ? false + : extra.redirected.length > 0, + { }, + 2, + integrity + ) + ); + + if ( redirect === "error" ) + return reject( makeRedirectionError( location ) ); + + // redirect is 'follow' + + // We don't support re-sending a non-GET/HEAD request (as + // we don't want to [can't, if its' streamed] re-send the + // body). The concept is fundementally broken anyway... + if ( !endStream ) + return reject( + makeRedirectionMethodError( location, method ) + ); + + if ( !location ) + return reject( makeIllegalRedirectError( ) ); + + stream.destroy( ); + resolve( + fetchImpl( + session, + request.clone( location ), + { signal, onTrailers }, + { + raceConditionedGoaway, + redirected: redirected.concat( url ), + timeoutAt, + } + ) + ); + } ) ); + } ); + + if ( !endStream ) + await request.readable( ) + .then( readable => + { + readable.pipe( stream ); + } ); + + return response; + } ); + } + + return handleSignalAndTimeout( + signalPromise, + timeoutInfo, + cleanup, + doFetch + ); +} + +export function fetch( + session: SimpleSessionHttp2, + input: Request, + init?: Partial< FetchInit > +) +: Promise< Response > +{ + const timeoutAt = void 0; + + const raceConditionedGoaway = new Set( ); + const extra = { timeoutAt, redirected: [ ], raceConditionedGoaway }; + + return fetchImpl( session, input, init, extra ); +} diff --git a/lib/fetch.ts b/lib/fetch.ts deleted file mode 100644 index 3f7d472..0000000 --- a/lib/fetch.ts +++ /dev/null @@ -1,496 +0,0 @@ -import { - constants as h2constants, - IncomingHttpHeaders as IncomingHttp2Headers, -} from "http2"; -import { URL } from "url"; - -import { Finally } from "already"; -import { syncGuard } from "callguard"; - -import { BodyInspector } from "./body"; -import { - AbortError, - FetchInit, - SimpleSession, - TimeoutError, -} from "./core"; -import { GuardedHeaders, Headers, RawHeaders } from "./headers"; -import { Request } from "./request"; -import { H2StreamResponse, Response } from "./response"; -import { arrayify, hasGotGoaway, parseLocation } from "./utils"; - - -const { - // Required for a request - HTTP2_HEADER_METHOD, - HTTP2_HEADER_SCHEME, - HTTP2_HEADER_PATH, - - // Methods - HTTP2_METHOD_GET, - HTTP2_METHOD_HEAD, - - // Requests - HTTP2_HEADER_USER_AGENT, - HTTP2_HEADER_ACCEPT, - HTTP2_HEADER_COOKIE, - HTTP2_HEADER_CONTENT_TYPE, - HTTP2_HEADER_CONTENT_LENGTH, - HTTP2_HEADER_ACCEPT_ENCODING, - - // Responses - HTTP2_HEADER_STATUS, - HTTP2_HEADER_LOCATION, - HTTP2_HEADER_SET_COOKIE, - - // Error codes - NGHTTP2_NO_ERROR, -} = h2constants; - -// This is from nghttp2.h, but undocumented in Node.js -const NGHTTP2_ERR_START_STREAM_NOT_ALLOWED = -516; - -const isRedirectStatus: { [ status: string ]: boolean; } = { - 300: true, - 301: true, - 302: true, - 303: true, - 305: true, - 307: true, - 308: true, -}; - -function ensureNotCircularRedirection( redirections: ReadonlyArray< string > ) -: void -{ - const urls = [ ...redirections ]; - const last = urls.pop( ); - - for ( let i = 0; i < urls.length; ++i ) - if ( urls[ i ] === last ) - { - const err = new Error( "Redirection loop detected" ); - ( < any >err ).urls = urls.slice( i ); - throw err; - } -} - -interface FetchExtra -{ - redirected: Array< string >; - timeoutAt?: number; - raceConditionedGoaway: Set< string >; // per origin -} - -async function fetchImpl( - session: SimpleSession, - input: string | Request, - init: Partial< FetchInit > = { }, - extra: FetchExtra -) -: Promise< Response > -{ - const { redirected, raceConditionedGoaway } = extra; - ensureNotCircularRedirection( redirected ); - - const req = new Request( input, init ); - - const { url, method, redirect, integrity } = req; - - const { signal, onTrailers } = init; - - const { - origin, - protocol, - pathname, search, hash, - } = new URL( url ); - const path = pathname + search + hash; - - const endStream = - method === HTTP2_METHOD_GET || method === HTTP2_METHOD_HEAD; - - const headers = new Headers( req.headers ); - - const cookies = ( await session.cookieJar.getCookies( url ) ) - .map( cookie => cookie.cookieString( ) ); - - const contentDecoders = session.contentDecoders( ); - - const acceptEncoding = - contentDecoders.length === 0 - ? "gzip;q=1.0, deflate;q=0.5" - : contentDecoders - .map( decoder => `${decoder.name};q=1.0` ) - .join( ", " ) + ", gzip;q=0.8, deflate;q=0.5"; - - if ( headers.has( HTTP2_HEADER_COOKIE ) ) - cookies.push( ...arrayify( headers.get( HTTP2_HEADER_COOKIE ) ) ); - - const headersToSend: RawHeaders = { - // Set required headers - [ HTTP2_HEADER_METHOD ]: method, - [ HTTP2_HEADER_SCHEME ]: protocol.replace( /:.*/, "" ), - [ HTTP2_HEADER_PATH ]: path, - - // Set default headers - [ HTTP2_HEADER_ACCEPT ]: session.accept( ), - [ HTTP2_HEADER_USER_AGENT ]: session.userAgent( ), - [ HTTP2_HEADER_ACCEPT_ENCODING ]: acceptEncoding, - }; - - if ( cookies.length > 0 ) - headersToSend[ HTTP2_HEADER_COOKIE ] = cookies.join( "; " ); - - for ( const [ key, val ] of headers.entries( ) ) - { - if ( key === "host" ) - // Convert to :authority like curl does: - // https://github.com/grantila/fetch-h2/issues/9 - headersToSend[ ":authority" ] = val; - else if ( key !== HTTP2_HEADER_COOKIE ) - headersToSend[ key ] = val; - } - - const inspector = new BodyInspector( req ); - - if ( - !endStream && - inspector.length != null && - !req.headers.has( HTTP2_HEADER_CONTENT_LENGTH ) - ) - headersToSend[ HTTP2_HEADER_CONTENT_LENGTH ] = "" + inspector.length; - - if ( !endStream && !req.headers.has( "content-type" ) && inspector.mime ) - headersToSend[ HTTP2_HEADER_CONTENT_TYPE ] = inspector.mime; - - function timeoutError( ) - { - return new TimeoutError( - `${method} ${url} timed out after ${init.timeout} ms` ); - } - - const timeoutAt = extra.timeoutAt || ( - ( "timeout" in init && typeof init.timeout === "number" ) - // Setting the timeoutAt here at first time allows async cookie - // jar to not take part of timeout for at least the first request - // (in a potential redirect chain) - ? Date.now( ) + init.timeout - : void 0 - ); - - function setupTimeout( ) - : { promise: Promise< Response >; clear: ( ) => void; } | null - { - if ( !timeoutAt ) - return null; - - const now = Date.now( ); - if ( now >= timeoutAt ) - throw timeoutError( ); - - let timerId: NodeJS.Timeout | null; - - return { - clear: ( ) => - { - if ( timerId ) - clearTimeout( timerId ); - }, - promise: new Promise( ( _resolve, reject ) => - { - timerId = setTimeout( ( ) => - { - timerId = null; - reject( timeoutError( ) ); - }, - timeoutAt - now - ); - } ), - }; - - } - - const timeoutInfo = setupTimeout( ); - - function abortError( ) - { - return new AbortError( `${method} ${url} aborted` ); - } - - if ( signal && signal.aborted ) - throw abortError( ); - - const signalPromise: Promise< Response > | null = - signal - ? - new Promise< Response >( ( _resolve, reject ) => - { - signal.onabort = ( ) => - { - reject( abortError( ) ); - }; - } ) - : null; - - function cleanup( ) - { - if ( timeoutInfo && timeoutInfo.clear ) - timeoutInfo.clear( ); - - if ( signal ) - delete signal.onabort; - } - - function doFetch( ): Promise< Response > - { - return session.get( url ) - .then( async h2session => - { - const stream = h2session.request( headersToSend, { endStream } ); - - const response = new Promise< Response >( ( resolve, reject ) => - { - const guard = syncGuard( reject, { catchAsync: true } ); - - stream.on( "aborted", guard( ( ..._whatever ) => - { - reject( new AbortError( "Request aborted" ) ); - } ) ); - - stream.on( "error", guard( ( err: Error ) => - { - reject( err ); - } ) ); - - stream.on( "frameError", guard( - ( _type: number, code: number, _streamId: number ) => - { - if ( - code === NGHTTP2_ERR_START_STREAM_NOT_ALLOWED && - endStream - ) - { - // This could be due to a race-condition in GOAWAY. - // As of current Node.js, the 'goaway' event is - // emitted on the session before this event - // is emitted, so we will know if we got it. - if ( - !raceConditionedGoaway.has( origin ) && - hasGotGoaway( h2session ) - ) - { - // Don't retry again due to potential GOAWAY - raceConditionedGoaway.add( origin ); - - // Since we've got the 'goaway' event, the - // context has already released the session, - // so a retry will create a new session. - resolve( - fetchImpl( - session, - req, - { signal, onTrailers }, - { - raceConditionedGoaway, - redirected, - timeoutAt, - } - ) - ); - - return; - } - } - - reject( new Error( "Request failed" ) ); - } ) - ); - - stream.on( "close", guard( ( ) => - { - // We'll get an 'error' event if there actually is an - // error, but not if we got NGHTTP2_NO_ERROR. - // In case of an error, the 'error' event will be awaited - // instead, to get (and propagate) the error object. - if ( stream.rstCode === NGHTTP2_NO_ERROR ) - reject( - new AbortError( "Stream prematurely closed" ) ); - } ) ); - - stream.on( "timeout", guard( ( ..._whatever ) => - { - reject( new TimeoutError( "Request timed out" ) ); - } ) ); - - stream.on( "trailers", guard( - ( _headers: IncomingHttp2Headers, _flags: any ) => - { - if ( !onTrailers ) - return; - try - { - const headers = new GuardedHeaders( "response" ); - - Object.keys( _headers ).forEach( key => - { - if ( Array.isArray( _headers[ key ] ) ) - ( < Array< string > >_headers[ key ] ) - .forEach( value => - headers.append( key, value ) ); - else - headers.set( key, "" + _headers[ key ] ); - } ); - - onTrailers( headers ); - } - catch ( err ) - { - // TODO: Implement #8 - // tslint:disable-next-line - console.warn( "Trailer handling failed", err ); - } - } ) ); - - // ClientHttp2Stream events - - stream.on( "continue", guard( ( ..._whatever ) => - { - reject( new Error( - "Request failed with 100 continue. " + - "This can't happen unless a server failure" ) ); - } ) ); - - stream.on( "headers", guard( - ( headers: IncomingHttp2Headers, _flags: any ) => - { - const code = headers[ HTTP2_HEADER_STATUS ]; - reject( new Error( - `Request failed with a ${code} status. ` + - "Any 1xx error is unexpected to fetch() and " + - "shouldn't happen." ) ); - } - ) ); - - stream.on( "response", guard( - ( headers: IncomingHttp2Headers ) => - { - if ( signal && signal.aborted ) - { - // No reason to continue, the request is aborted - stream.destroy( ); - return; - } - - const status = "" + headers[ HTTP2_HEADER_STATUS ]; - const location = parseLocation( - headers[ HTTP2_HEADER_LOCATION ], - url - ); - - const isRedirected = isRedirectStatus[ status ]; - - if ( headers[ HTTP2_HEADER_SET_COOKIE ] ) - { - const setCookies = - arrayify( headers[ HTTP2_HEADER_SET_COOKIE ] ); - - session.cookieJar.setCookies( setCookies, url ); - } - - delete headers[ "set-cookie" ]; - delete headers[ "set-cookie2" ]; - - if ( isRedirected && !location ) - return reject( - new Error( "Server responded illegally with a " + - "redirect code but missing 'location' header" - ) - ); - - if ( !isRedirected || redirect === "manual" ) - return resolve( - new H2StreamResponse( - contentDecoders, - url, - stream, - headers, - redirect === "manual" - ? false - : extra.redirected.length > 0, - integrity - ) - ); - - if ( redirect === "error" ) - return reject( - new Error( `URL got redirected to ${location}` ) ); - - // redirect is 'follow' - - // We don't support re-sending a non-GET/HEAD request (as - // we don't want to [can't, if its' streamed] re-send the - // body). The concept is fundementally broken anyway... - if ( !endStream ) - return reject( new Error( - `URL got redirected to ${location}, which ` + - `'fetch-h2' doesn't support for ${method}` ) ); - - if ( !location ) - return reject( - new Error( - `URL got redirected without 'location' header` - ) - ); - - stream.destroy( ); - resolve( - fetchImpl( - session, - req.clone( location ), - { signal, onTrailers }, - { - raceConditionedGoaway, - redirected: redirected.concat( url ), - timeoutAt, - } - ) - ); - } ) ); - } ); - - if ( !endStream ) - await req.readable( ) - .then( readable => - { - readable.pipe( stream ); - } ); - - return response; - } ); - } - - return Promise.race( - [ - < Promise< any > >signalPromise, - < Promise< any > >( timeoutInfo && timeoutInfo.promise ), - doFetch( ), - ] - .filter( promise => promise ) - ) - .then( ...Finally( cleanup ) ); -} - -export function fetch( - session: SimpleSession, - input: string | Request, - init?: Partial< FetchInit > -) -: Promise< Response > -{ - const timeoutAt = void 0; - - const raceConditionedGoaway = new Set( ); - const extra = { timeoutAt, redirected: [ ], raceConditionedGoaway }; - - return fetchImpl( session, input, init, extra ); -} diff --git a/lib/request.ts b/lib/request.ts index f04d3ee..badbcd6 100644 --- a/lib/request.ts +++ b/lib/request.ts @@ -8,6 +8,7 @@ import { ReferrerTypes, RequestInit, RequestInitWithoutBody, + RequestInitWithUrl, } from "./core"; import { Body, JsonBody } from "./body"; @@ -49,15 +50,17 @@ export class Request extends Body implements RequestInitWithoutBody private _url: string; private _init: Partial< RequestInit >; - constructor( input: string | Request, init?: Partial< RequestInit > ) + constructor( input: string | Request, init?: Partial< RequestInitWithUrl > ) { super( ); + const { url: overwriteUrl } = init || ( { } as RequestInitWithUrl ); + // TODO: Consider throwing a TypeError if the URL has credentials this._url = input instanceof Request - ? input._url - : input; + ? ( overwriteUrl || input._url ) + : ( overwriteUrl || input ); if ( input instanceof Request ) { @@ -149,9 +152,6 @@ export class Request extends Body implements RequestInitWithoutBody public clone( newUrl?: string ): Request { - const ret = new Request( this ); - if ( newUrl ) - ret._url = newUrl; - return ret; + return new Request( this, { url: newUrl } ); } } diff --git a/lib/response.ts b/lib/response.ts index db3c494..bea24da 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -1,7 +1,6 @@ import { - ClientHttp2Stream, + // These are same as http1 for the usage here constants as h2constants, - IncomingHttpHeaders, } from "http2"; import { @@ -17,7 +16,6 @@ const { HTTP2_HEADER_CONTENT_LENGTH, } = h2constants; - import { BodyTypes, DecodeFunction, @@ -36,6 +34,11 @@ import { Body, } from "./body"; +import { + IncomingHttpHeaders, +} from "./types"; + + interface Extra { redirected: boolean; @@ -184,7 +187,17 @@ function makeHeadersFromH2Headers( headers: IncomingHttpHeaders ): Headers return out; } -function makeInit( inHeaders: IncomingHttpHeaders ): Partial< ResponseInit > +function makeInitHttp1( inHeaders: IncomingHttpHeaders ) +: Partial< ResponseInit > +{ + // Headers in HTTP/2 are compatible with HTTP/1 (colon illegal in HTTP/1) + const headers = makeHeadersFromH2Headers( inHeaders ); + + return { headers }; +} + +function makeInitHttp2( inHeaders: IncomingHttpHeaders ) +: Partial< ResponseInit > { const status = parseInt( "" + inHeaders[ HTTP2_HEADER_STATUS ], 10 ); const statusText = ""; // Not supported in H2 @@ -239,14 +252,16 @@ function handleEncoding( return decoder( stream ); } -export class H2StreamResponse extends Response +export class StreamResponse extends Response { constructor( contentDecoders: ReadonlyArray< Decoder >, url: string, - stream: ClientHttp2Stream, + stream: NodeJS.ReadableStream, headers: IncomingHttpHeaders, redirected: boolean, + init: Partial< ResponseInit >, + httpVersion: 1 | 2, integrity?: string ) { @@ -256,7 +271,14 @@ export class H2StreamResponse extends Response < NodeJS.ReadableStream >stream, headers ), - makeInit( headers ), + { + ...init, + ...( + httpVersion === 1 + ? makeInitHttp1( headers ) + : makeInitHttp2( headers ) + ), + }, makeExtra( url, redirected, integrity ) ); } diff --git a/lib/types.ts b/lib/types.ts new file mode 100644 index 0000000..aea720d --- /dev/null +++ b/lib/types.ts @@ -0,0 +1,12 @@ +import { + IncomingHttpHeaders as IncomingHttpHeadersH1, +} from "http"; + +import { + // ClientHttp2Stream, + // constants as h2constants, + IncomingHttpHeaders as IncomingHttpHeadersH2, +} from "http2"; + +export type IncomingHttpHeaders = + IncomingHttpHeadersH1 | IncomingHttpHeadersH2; diff --git a/lib/utils-http2.ts b/lib/utils-http2.ts new file mode 100644 index 0000000..68ea94f --- /dev/null +++ b/lib/utils-http2.ts @@ -0,0 +1,11 @@ +import { ClientHttp2Session } from "http2"; + +export function hasGotGoaway( session: ClientHttp2Session ) +{ + return !!( < any >session ).__fetch_h2_goaway; +} + +export function setGotGoaway( session: ClientHttp2Session ) +{ + ( < any >session ).__fetch_h2_goaway = true; +} diff --git a/lib/utils.ts b/lib/utils.ts index 2d79e60..f399902 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,4 +1,3 @@ -import { ClientHttp2Session } from "http2"; import { URL } from "url"; export function arrayify< T >( @@ -28,12 +27,39 @@ export function parseLocation( return url.href; } -export function hasGotGoaway( session: ClientHttp2Session ) +export const isRedirectStatus: { [ status: string ]: boolean; } = { + 300: true, + 301: true, + 302: true, + 303: true, + 305: true, + 307: true, + 308: true, +}; + +export function makeOkError( err: Error ): Error { - return !!( < any >session ).__fetch_h2_goaway; + ( < any >err ).metaData = ( < any >err ).metaData || { }; + ( < any >err ).metaData.ok = true; + return err; } -export function setGotGoaway( session: ClientHttp2Session ) +export function parseInput( url: string ) { - ( < any >session ).__fetch_h2_goaway = true; + const explicitProtocol = + ( url.startsWith( "http2://" ) || url.startsWith( "http1://" ) ) + ? url.substr( 0, 5 ) + : null; + + url = url.replace( /^http[12]:\/\//, "http://" ); + + const { origin, hostname, port, protocol } = new URL( url ); + + return { + hostname, + origin, + port: port || ( protocol === "https:" ? "443" : "80" ), + protocol: explicitProtocol || protocol.replace( ":", "" ), + url, + }; } diff --git a/package.json b/package.json index 774b707..188ec34 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "types": "./dist/index.d.ts", "directories": {}, "engines": { - "node": ">=10.0" + "node": ">=10.4" }, "files": [ "dist" @@ -21,6 +21,7 @@ "build": "./node_modules/.bin/rimraf dist && ./node_modules/.bin/tsc -p .", "lint": "node_modules/.bin/tslint --project .", "mocha": "node_modules/.bin/mocha --bail --check-leaks dist/test", + "mocha:debug": "node_modules/.bin/mocha --inspect-brk dist/test", "test": "npm run lint && node_modules/.bin/nyc npm run mocha", "testfast": "node_modules/.bin/nyc node_modules/.bin/_mocha -- --bail --check-leaks -i --grep nghttp2.org dist/test", "test-nocov": "node_modules/.bin/mocha --bail --check-leaks dist/test", @@ -31,6 +32,7 @@ "coverage": "node_modules/.bin/nyc report --reporter=html", "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run buildtest && scripts/version-git-add.sh", "prepack": "npm run build && npm run test", + "makecerts": "openssl req -x509 -nodes -days 7300 -newkey rsa:2048 -keyout certs/key.pem -out certs/cert.pem", "travis-deploy-once": "travis-deploy-once", "semantic-release": "semantic-release", "cz": "git-cz" @@ -44,6 +46,7 @@ "h2", "http2", "client", + "request", "api", "typesafe", "typescript" @@ -79,6 +82,9 @@ "to-arraybuffer": "1.x", "tough-cookie": "3.x" }, + "publishConfig": { + "tag": "beta" + }, "config": { "commitizen": { "path": "./node_modules/cz-conventional-changelog" diff --git a/test/fetch-h2/context.ts b/test/fetch-h2/context.ts index e0d6fde..e217c6b 100644 --- a/test/fetch-h2/context.ts +++ b/test/fetch-h2/context.ts @@ -1,18 +1,15 @@ import { expect } from "chai"; -import { readFileSync } from "fs"; import "mocha"; -import { makeServer } from "../lib/server"; +import { TestData } from "../lib/server-common"; +import { makeMakeServer } from "../lib/server-helpers"; import { context, CookieJar, - disconnectAll, Response, } from "../../"; -afterEach( disconnectAll ); - function ensureStatusSuccess( response: Response ): Response { if ( response.status < 200 || response.status >= 300 ) @@ -20,12 +17,19 @@ function ensureStatusSuccess( response: Response ): Response return response; } -const key = readFileSync( __dirname + "/../../../certs/key.pem" ); -const cert = readFileSync( __dirname + "/../../../certs/cert.pem" ); - -describe( "context", function( ) +( [ + { proto: "http:", version: "http1" }, + { proto: "http:", version: "http2" }, + { proto: "https:", version: "http1" }, + { proto: "https:", version: "http2" }, +] as Array< TestData > ) +.forEach( ( { proto, version } ) => +{ +describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) { + const { cycleOpts, makeServer } = makeMakeServer( { proto, version } ); + this.timeout( 500 ); describe( "options", ( ) => @@ -35,12 +39,13 @@ describe( "context", function( ) const { server, port } = await makeServer( ); const { disconnectAll, fetch } = context( { + ...cycleOpts, overwriteUserAgent: true, userAgent: "foobar", } ); const response = ensureStatusSuccess( - await fetch( `http://localhost:${port}/headers` ) + await fetch( `${proto}//localhost:${port}/headers` ) ); const res = await response.json( ); @@ -56,11 +61,12 @@ describe( "context", function( ) const { server, port } = await makeServer( ); const { disconnectAll, fetch } = context( { + ...cycleOpts, userAgent: "foobar", } ); const response = ensureStatusSuccess( - await fetch( `http://localhost:${port}/headers` ) + await fetch( `${proto}//localhost:${port}/headers` ) ); const res = await response.json( ); @@ -78,10 +84,13 @@ describe( "context", function( ) const accept = "application/foobar, text/*;0.9"; - const { disconnectAll, fetch } = context( { accept } ); + const { disconnectAll, fetch } = context( { + ...cycleOpts, + accept, + } ); const response = ensureStatusSuccess( - await fetch( `http://localhost:${port}/headers` ) + await fetch( `${proto}//localhost:${port}/headers` ) ); const res = await response.json( ); @@ -93,16 +102,17 @@ describe( "context", function( ) } ); } ); + if ( proto === "https:" ) describe( "network settings", ( ) => { it( "should not be able to connect over unauthorized ssl", async ( ) => { - const { server, port } = await makeServer( { - serverOptions: { key, cert }, - } ); + const { server, port } = await makeServer( ); const { disconnectAll, fetch } = context( { + ...cycleOpts, overwriteUserAgent: true, + session: { rejectUnauthorized: true }, userAgent: "foobar", } ); @@ -129,11 +139,10 @@ describe( "context", function( ) it( "should be able to connect over unauthorized ssl", async ( ) => { - const { server, port } = await makeServer( { - serverOptions: { key, cert }, - } ); + const { server, port } = await makeServer( ); const { disconnectAll, fetch } = context( { + ...cycleOpts, overwriteUserAgent: true, session: { rejectUnauthorized: false }, userAgent: "foobar", @@ -161,22 +170,23 @@ describe( "context", function( ) const cookieJar = new CookieJar( ); expect( - await cookieJar.getCookies( `http://localhost:${port}/` ) + await cookieJar.getCookies( `${proto}//localhost:${port}/` ) ).to.be.empty; const { disconnectAll, fetch } = context( { + ...cycleOpts, cookieJar, overwriteUserAgent: true, userAgent: "foobar", } ); - await fetch( `http://localhost:${port}/set-cookie`, { + await fetch( `${proto}//localhost:${port}/set-cookie`, { json: [ "a=b" , "c=d" ], method: "POST", } ); const cookies = - await cookieJar.getCookies( `http://localhost:${port}/` ); + await cookieJar.getCookies( `${proto}//localhost:${port}/` ); expect( cookies ).to.not.be.empty; expect( cookies[ 0 ].key ).to.equal( "a" ); @@ -186,10 +196,10 @@ describe( "context", function( ) // Next request should maintain cookies - await fetch( `http://localhost:${port}/echo` ); + await fetch( `${proto}//localhost:${port}/echo` ); const cookies2 = - await cookieJar.getCookies( `http://localhost:${port}/` ); + await cookieJar.getCookies( `${proto}//localhost:${port}/` ); expect( cookies2 ).to.not.be.empty; @@ -198,10 +208,10 @@ describe( "context", function( ) cookieJar.reset( ); - await fetch( `http://localhost:${port}/echo` ); + await fetch( `${proto}//localhost:${port}/echo` ); const cookies3 = - await cookieJar.getCookies( `http://localhost:${port}/` ); + await cookieJar.getCookies( `${proto}//localhost:${port}/` ); expect( cookies3 ).to.be.empty; @@ -220,7 +230,7 @@ describe( "context", function( ) const { disconnectAll, fetch } = context( ); - const awaitFetch = fetch( "http://localhost:0" ); + const awaitFetch = fetch( "${proto}//localhost:0" ); disconnectAll( ); @@ -237,7 +247,10 @@ describe( "context", function( ) const { server } = await makeServer( ); const { disconnectAll, fetch } = - context( { session: { port: -1, host: < any >{ } } } ); + context( { + ...cycleOpts, + session: { port: -1, host: < any >{ } }, + } ); const awaitFetch = fetch( "ftp://localhost" ); @@ -251,3 +264,4 @@ describe( "context", function( ) } ); } ); } ); +} ); diff --git a/test/fetch-h2/nghttp2.org.ts b/test/fetch-h2/httpbin.ts similarity index 71% rename from test/fetch-h2/nghttp2.org.ts rename to test/fetch-h2/httpbin.ts index 45cf852..9db008e 100644 --- a/test/fetch-h2/nghttp2.org.ts +++ b/test/fetch-h2/httpbin.ts @@ -1,3 +1,5 @@ +import { URL } from "url"; + import { delay } from "already"; import { expect } from "chai"; import "mocha"; @@ -6,21 +8,44 @@ import * as through2 from "through2"; import { context, DataBody, - disconnectAll, - fetch, + HttpProtocols, JsonBody, StreamBody, } from "../../"; -afterEach( disconnectAll ); -describe( "nghttp2.org/httpbin", function( ) +interface TestData +{ + protocol: string; + site: string; + protos: Array< HttpProtocols >; +} + +( [ + { protocol: "https:", site: "nghttp2.org/httpbin", protos: [ "http2" ] }, + { protocol: "http:", site: "httpbin.org", protos: [ "http1" ] }, + { protocol: "https:", site: "httpbin.org", protos: [ "http1" ] }, +] as Array< TestData > ) +.forEach( ( { site, protocol, protos } ) => +{ +const host = `${protocol}//${site}`; +const baseHost = new URL( host ).origin; + +const name = `${site} (${protos[ 0 ]} over ${protocol.replace( ":", "" )})`; + +describe( name, function( ) { this.timeout( 5000 ); - it( "should be possible to GET HTTPS/2", async ( ) => + const { fetch, disconnectAll } = context( { + httpsProtocols: protos, + } ); + + afterEach( disconnectAll ); + + it( "should be possible to GET", async ( ) => { - const response = await fetch( "https://nghttp2.org/httpbin/user-agent" ); + const response = await fetch( `${host}/user-agent` ); const data = await response.json( ); expect( data[ "user-agent" ] ).to.include( "fetch-h2/" ); } ); @@ -30,7 +55,7 @@ describe( "nghttp2.org/httpbin", function( ) const testData = { foo: "bar" }; const response = await fetch( - "https://nghttp2.org/httpbin/post", + `${host}/post`, { body: new JsonBody( testData ), method: "POST", @@ -47,7 +72,7 @@ describe( "nghttp2.org/httpbin", function( ) const testData = '{"foo": "data"}'; const response = await fetch( - "https://nghttp2.org/httpbin/post", + `${host}/post`, { body: new DataBody( testData ), method: "POST", @@ -67,7 +92,7 @@ describe( "nghttp2.org/httpbin", function( ) stream.end( ); const response = await fetch( - "https://nghttp2.org/httpbin/post", + `${host}/post`, { body: new StreamBody( stream ), headers: { "content-length": "6" }, @@ -84,7 +109,7 @@ describe( "nghttp2.org/httpbin", function( ) const stream = through2( ); const eventualResponse = fetch( - "https://nghttp2.org/httpbin/post", + `${host}/post`, { body: new StreamBody( stream ), headers: { "content-length": "6" }, @@ -109,13 +134,13 @@ describe( "nghttp2.org/httpbin", function( ) const { fetch, disconnectAll } = context( ); const responseSet = await fetch( - "https://nghttp2.org/httpbin/cookies/set?foo=bar", + `${host}/cookies/set?foo=bar`, { redirect: "manual" } ); expect( responseSet.headers.has( "location" ) ).to.be.true; const redirectedTo = responseSet.headers.get( "location" ); - const response = await fetch( "https://nghttp2.org" + redirectedTo ); + const response = await fetch( baseHost + redirectedTo ); const data = await response.json( ); expect( data.cookies ).to.deep.equal( { foo: "bar" } ); @@ -128,10 +153,10 @@ describe( "nghttp2.org/httpbin", function( ) const { fetch, disconnectAll } = context( ); const response = await fetch( - "https://nghttp2.org/httpbin/relative-redirect/2", + `${host}/relative-redirect/2`, { redirect: "follow" } ); - expect( response.url ).to.equal( "https://nghttp2.org/httpbin/get" ); + expect( response.url ).to.equal( `${host}/get` ); await response.text( ); await disconnectAll( ); @@ -139,8 +164,9 @@ describe( "nghttp2.org/httpbin", function( ) it( "should be possible to GET gzip data", async ( ) => { - const response = await fetch( "https://nghttp2.org/httpbin/gzip" ); + const response = await fetch( `${host}/gzip` ); const data = await response.json( ); expect( data ).to.deep.include( { gzipped: true, method: "GET" } ); } ); } ); +} ); diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index d950a04..9f81868 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -6,20 +6,21 @@ import { buffer as getStreamAsBuffer } from "get-stream"; import "mocha"; import * as through2 from "through2"; -import { makeServer } from "../lib/server"; -import { createIntegrity } from "../lib/utils"; +import { TestData } from "../lib/server-common"; +import { makeMakeServer } from "../lib/server-helpers"; +import { cleanUrl, createIntegrity } from "../lib/utils"; import { + context, DataBody, - disconnectAll, - fetch, + disconnectAll as _disconnectAll, + fetch as _fetch, Headers, - onPush, + onPush as _onPush, Response, StreamBody, } from "../../"; -afterEach( disconnectAll ); async function getRejection< T >( promise: Promise< T > ): Promise< Error > { @@ -41,18 +42,44 @@ function ensureStatusSuccess( response: Response ): Response return response; } + +( [ + { proto: "http:", version: "http1" }, + { proto: "http:", version: "http2" }, + { proto: "https:", version: "http1" }, + { proto: "https:", version: "http2" }, +] as Array< TestData > ) +.forEach( ( { proto, version } ) => +{ +const { cycleOpts, makeServer } = makeMakeServer( { proto, version } ); + +const { disconnectAll, fetch, onPush } = + ( proto === "httpss:" && version === "http1" ) + ? { disconnectAll: _disconnectAll, fetch: _fetch, onPush: _onPush } + : context( { ...cycleOpts } ); + describe( "basic", ( ) => +{ +afterEach( disconnectAll ); + +describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => { it( "should be able to perform simple GET", async ( ) => { const { server, port } = await makeServer( ); + const headers = + version === "http1" ? { "http1-path": "/headers" } : { }; + const response = ensureStatusSuccess( - await fetch( `http://localhost:${port}/headers` ) + await fetch( `${proto}//localhost:${port}/headers`, { headers } ) ); const res = await response.json( ); - expect( res[ ":path" ] ).to.equal( "/headers" ); + if ( version === "http1" ) + expect( res[ "http1-path" ] ).to.equal( "/headers" ); + else + expect( res[ ":path" ] ).to.equal( "/headers" ); await server.shutdown( ); } ); @@ -68,7 +95,7 @@ describe( "basic", ( ) => const response = ensureStatusSuccess( await fetch( - `http://localhost:${port}/headers`, + `${proto}//localhost:${port}/headers`, { body: new DataBody( "foobar" ), headers, @@ -92,7 +119,7 @@ describe( "basic", ( ) => const json = { foo: "bar" }; const response = await fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { json, method: "POST", @@ -119,7 +146,7 @@ describe( "basic", ( ) => const response = ensureStatusSuccess( await fetch( - `http://localhost:${port}/headers`, + `${proto}//localhost:${port}/headers`, { body: new DataBody( "foobar" ), headers, @@ -145,7 +172,7 @@ describe( "basic", ( ) => stream.write( "foo" ); const eventualResponse = fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { body: new StreamBody( stream ), headers: { "content-length": "6" }, @@ -175,7 +202,7 @@ describe( "basic", ( ) => stream.write( "foo" ); const eventualResponse = fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { body: new StreamBody( stream ), method: "POST", @@ -200,7 +227,7 @@ describe( "basic", ( ) => const { server, port } = await makeServer( ); const eventualResponse = fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { body: "foo", json: { foo: "" }, @@ -222,7 +249,7 @@ describe( "basic", ( ) => const json = { foo: "bar" }; const response = await fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { json, method: "POST", @@ -245,7 +272,7 @@ describe( "basic", ( ) => const body = "foobar"; const response = await fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { body, method: "POST", @@ -266,7 +293,7 @@ describe( "basic", ( ) => const body = Buffer.from( "foobar" ); const response = await fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { body, method: "POST", @@ -291,7 +318,7 @@ describe( "basic", ( ) => stream.end( ); const response = await fetch( - `http://localhost:${port}/echo`, + `${proto}//localhost:${port}/echo`, { body: stream, method: "POST", @@ -315,7 +342,7 @@ describe( "basic", ( ) => const onTrailers = deferredTrailers.resolve; const response = await fetch( - `http://localhost:${port}/trailers`, + `${proto}//localhost:${port}/trailers`, { json: trailers, method: "POST", @@ -326,7 +353,7 @@ describe( "basic", ( ) => const data = await response.text( ); const receivedTrailers = await deferredTrailers.promise; - expect( data ).to.not.be.empty; + expect( data ).to.contain( "trailers will be sent" ); Object.keys( trailers ) .forEach( key => @@ -344,7 +371,7 @@ describe( "basic", ( ) => const { server, port } = await makeServer( ); const eventualResponse = fetch( - `http://localhost:${port}/wait/20`, + `${proto}//localhost:${port}/wait/20`, { method: "POST", timeout: 8, @@ -363,7 +390,7 @@ describe( "basic", ( ) => const { server, port } = await makeServer( ); const response = await fetch( - `http://localhost:${port}/wait/1`, + `${proto}//localhost:${port}/wait/1`, { method: "POST", timeout: 100, @@ -404,7 +431,7 @@ describe( "basic", ( ) => } ); const eventualResponse = fetch( - `http://localhost:${port}/sha256`, + `${proto}//localhost:${port}/sha256`, { body: new StreamBody( stream ), headers: { "content-length": "" + chunkSize * chunks }, @@ -451,7 +478,7 @@ describe( "basic", ( ) => } ); const eventualResponse = fetch( - `http://localhost:${port}/sha256`, + `${proto}//localhost:${port}/sha256`, { body: new StreamBody( stream ), method: "POST", @@ -468,6 +495,7 @@ describe( "basic", ( ) => await server.shutdown( ); } ); + if ( version === "http2" ) it( "should be able to receive pushed request", async ( ) => { const { server, port } = await makeServer( ); @@ -484,7 +512,7 @@ describe( "basic", ( ) => const response = ensureStatusSuccess( await fetch( - `http://localhost:${port}/push`, + `${proto}//localhost:${port}/push`, { json: [ { @@ -519,7 +547,7 @@ describe( "basic", ( ) => const response = ensureStatusSuccess( await fetch( - `http://localhost:${port}/headers`, + `${proto}//localhost:${port}/headers`, { headers: { host }, } @@ -528,7 +556,10 @@ describe( "basic", ( ) => const responseData = await response.json( ); - expect( responseData[ ":authority" ] ).to.equal( host ); + if ( version === "http2" ) + expect( responseData[ ":authority" ] ).to.equal( host ); + else + expect( responseData.host ).to.equal( host ); await server.shutdown( ); } ); @@ -538,7 +569,7 @@ describe( "basic", ( ) => const { server, port } = await makeServer( ); const response = ensureStatusSuccess( - await fetch( `http://localhost:${port}/headers` ) + await fetch( `${proto}//localhost:${port}/headers` ) ); const responseData = await response.json( ); @@ -556,7 +587,7 @@ describe( "basic", ( ) => const response = ensureStatusSuccess( await fetch( - `http://localhost:${port}/compressed/gzip`, + `${proto}//localhost:${port}/compressed/gzip`, { json: testData, method: "POST", @@ -581,7 +612,7 @@ describe( "basic", ( ) => const response = ensureStatusSuccess( await fetch( - `http://localhost:${port}/compressed/deflate`, + `${proto}//localhost:${port}/compressed/deflate`, { json: testData, method: "POST", @@ -599,37 +630,39 @@ describe( "basic", ( ) => } ); } ); -describe( "response", ( ) => +describe( `response (${proto})`, ( ) => { it( "should have a proper url", async ( ) => { const { server, port } = await makeServer( ); - const url = `http://localhost:${port}/headers`; + const url = `${proto}//localhost:${port}/headers`; const response = ensureStatusSuccess( await fetch( url ) ); - expect( response.url ).to.equal( url ); + expect( response.url ).to.equal( cleanUrl( url ) ); await disconnectAll( ); await server.shutdown( ); } ); } ); -describe( "goaway", ( ) => +if ( version === "http2" ) +describe( `goaway (${proto})`, ( ) => { + if ( proto === "http:" ) // This race is too fast for TLS it( "handle session failover (race conditioned)", async ( ) => { const { server, port } = await makeServer( ); - const url1 = `http://localhost:${port}/goaway`; - const url2 = `http://localhost:${port}/headers`; + const url1 = `${proto}//localhost:${port}/goaway`; + const url2 = `${proto}//localhost:${port}/headers`; const response1 = ensureStatusSuccess( await fetch( url1 ) ); - expect( response1.url ).to.equal( url1 ); + expect( response1.url ).to.equal( cleanUrl( url1 ) ); const response2 = ensureStatusSuccess( await fetch( url2 ) ); - expect( response2.url ).to.equal( url2 ); + expect( response2.url ).to.equal( cleanUrl( url2 ) ); await response1.text( ); await response2.text( ); @@ -642,16 +675,16 @@ describe( "goaway", ( ) => { const { server, port } = await makeServer( ); - const url1 = `http://localhost:${port}/goaway`; - const url2 = `http://localhost:${port}/headers`; + const url1 = `${proto}//localhost:${port}/goaway`; + const url2 = `${proto}//localhost:${port}/headers`; const response1 = ensureStatusSuccess( await fetch( url1 ) ); - expect( response1.url ).to.equal( url1 ); + expect( response1.url ).to.equal( cleanUrl( url1 ) ); await delay(20); const response2 = ensureStatusSuccess( await fetch( url2 ) ); - expect( response2.url ).to.equal( url2 ); + expect( response2.url ).to.equal( cleanUrl( url2 ) ); await response1.text( ); await response2.text( ); @@ -664,16 +697,16 @@ describe( "goaway", ( ) => { const { server, port } = await makeServer( ); - const url1 = `http://localhost:${port}/goaway/50`; - const url2 = `http://localhost:${port}/slow/50`; + const url1 = `${proto}//localhost:${port}/goaway/50`; + const url2 = `${proto}//localhost:${port}/slow/50`; const response1 = ensureStatusSuccess( await fetch( url1 ) ); - expect( response1.url ).to.equal( url1 ); + expect( response1.url ).to.equal( cleanUrl( url1 ) ); await delay( 10 ); const response2 = ensureStatusSuccess( await fetch( url2 ) ); - expect( response2.url ).to.equal( url2 ); + expect( response2.url ).to.equal( cleanUrl( url2 ) ); await delay( 10 ); @@ -688,19 +721,19 @@ describe( "goaway", ( ) => } ); } ); -describe( "integrity", ( ) => +describe( `integrity (${proto})`, ( ) => { it( "handle and succeed on valid integrity", async ( ) => { const { server, port } = await makeServer( ); - const url = `http://localhost:${port}/slow/0`; + const url = `${proto}//localhost:${port}/slow/0`; const data = "abcdefghij"; const integrity = createIntegrity( data ); const response = ensureStatusSuccess( await fetch( url, { integrity } ) ); - expect( response.url ).to.equal( url ); + expect( response.url ).to.equal( cleanUrl( url ) ); expect( await response.text( ) ).to.equal( data ); @@ -712,13 +745,13 @@ describe( "integrity", ( ) => { const { server, port } = await makeServer( ); - const url = `http://localhost:${port}/slow/0`; + const url = `${proto}//localhost:${port}/slow/0`; const data = "abcdefghij-x"; const integrity = createIntegrity( data ); const response = ensureStatusSuccess( await fetch( url, { integrity } ) ); - expect( response.url ).to.equal( url ); + expect( response.url ).to.equal( cleanUrl( url ) ); try { @@ -735,13 +768,13 @@ describe( "integrity", ( ) => } ); } ); -describe( "premature stream close", ( ) => +describe( `premature stream close (${proto})`, ( ) => { it( "handle and reject fetch operation", async ( ) => { const { server, port } = await makeServer( ); - const url = `http://localhost:${port}/prem-close`; + const url = `${proto}//localhost:${port}/prem-close`; try { @@ -750,10 +783,16 @@ describe( "premature stream close", ( ) => } catch ( err ) { - expect( err.message ).to.contain( "Stream prematurely closed" ); + const expected = + version === "http1" + ? "socket hang up" + : "Stream prematurely closed"; + expect( err.message ).to.contain( expected ); } await disconnectAll( ); await server.shutdown( ); } ); } ); +} ); +} ); diff --git a/test/lib/server-common.ts b/test/lib/server-common.ts new file mode 100644 index 0000000..869dfd2 --- /dev/null +++ b/test/lib/server-common.ts @@ -0,0 +1,85 @@ +import { + Server as HttpServer, +} from "http"; +import { + Http2Server, + IncomingHttpHeaders, + SecureServerOptions, + ServerHttp2Stream, +} from "http2"; +import { + Server as HttpsServer, +} from "https"; + +import { HttpProtocols } from "../../"; + + +export interface TestData +{ + proto: string; + version: HttpProtocols; +} + +export interface MatchData +{ + path: string; + stream: ServerHttp2Stream; + headers: IncomingHttpHeaders; +} + +export type Matcher = ( matchData: MatchData ) => boolean; + +export const ignoreError = ( cb: ( ) => any ) => { try { cb( ); } catch ( err ) { } }; + +export interface ServerOptions +{ + port?: number; + matchers?: ReadonlyArray< Matcher >; + serverOptions?: SecureServerOptions; +} + +export abstract class Server +{ + public port: number | null = null; + protected _opts: ServerOptions = { }; + protected _server: HttpServer | HttpsServer | Http2Server = < any >void 0; + + + public async listen( port: number | undefined = void 0 ): Promise< number > + { + return new Promise( ( resolve, _reject ) => + { + this._server.listen( port, "0.0.0.0", resolve ); + } ) + .then( ( ) => + { + const address = this._server.address( ); + if ( typeof address === "string" ) + return 0; + return address.port; + } ) + .then( port => + { + this.port = port; + return port; + } ); + } + + public async shutdown( ): Promise< void > + { + await this._shutdown( ); + return new Promise< void >( ( resolve, _reject ) => + { + this._server.close( resolve ); + } ); + } + + protected async _shutdown( ): Promise< void > { } +} + +export abstract class TypedServer +< ServerType extends HttpServer | HttpsServer | Http2Server > +extends Server +{ + protected _server: ServerType = < any >void 0; +} diff --git a/test/lib/server-helpers.ts b/test/lib/server-helpers.ts new file mode 100644 index 0000000..3d08244 --- /dev/null +++ b/test/lib/server-helpers.ts @@ -0,0 +1,49 @@ +import { readFileSync } from "fs"; + +import { + ServerOptions, + TestData, +} from "./server-common"; +import { + makeServer as makeServer1, +} from "./server-http1"; +import { + makeServer as makeServer2, +} from "./server-http2"; + + +const key = readFileSync( __dirname + "/../../../certs/key.pem" ); +const cert = readFileSync( __dirname + "/../../../certs/cert.pem" ); + +export function makeMakeServer( { proto, version }: TestData ) +{ + const makeServer = ( opts?: ServerOptions ) => + { + const serverOptions = + ( opts && opts.serverOptions ) ? opts.serverOptions : { }; + + if ( proto === "https:" ) + { + opts = { + serverOptions: { + cert, + key, + ...serverOptions, + }, + ...( opts ? opts : { } ), + }; + } + + return version === "http1" + ? makeServer1( opts ) + : makeServer2( opts ); + }; + + const cycleOpts = { + httpProtocol: version, + httpsProtocols: [ version ], + session: { rejectUnauthorized: false }, + }; + + return { makeServer, cycleOpts }; +} diff --git a/test/lib/server-http1.ts b/test/lib/server-http1.ts new file mode 100644 index 0000000..b9e7108 --- /dev/null +++ b/test/lib/server-http1.ts @@ -0,0 +1,282 @@ +import { + createServer, + IncomingMessage, + Server as HttpServer, + ServerResponse, +} from "http"; +import { + constants as h2constants, +} from "http2"; +import { + createServer as createSecureServer, + Server as HttpsServer, +} from "https"; +import { Socket } from "net"; + +import { createHash } from "crypto"; +import { createDeflate, createGzip } from "zlib"; + +import { delay } from "already"; +import { buffer as getStreamAsBuffer } from "get-stream"; + +import { + ignoreError, + Server, + ServerOptions, + TypedServer, +} from "./server-common"; + +// These are the same in HTTP/1 and HTTP/2 +const { + HTTP2_HEADER_ACCEPT_ENCODING, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_CONTENT_TYPE, + HTTP2_HEADER_SET_COOKIE, +} = h2constants; + +interface RawHeaders +{ + [ name: string ]: number | string | Array< string >; +} + +export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > +{ + private _store = new Set< Socket >( ); + + constructor( opts: ServerOptions ) + { + super( ); + + this._opts = opts || { }; + if ( this._opts.serverOptions ) + this._server = createSecureServer( this._opts.serverOptions ); + else + this._server = createServer( ); + this.port = null; + + this._server.on( + "connection", + socket => { this._store.add( socket ); } + ); + + this._server.on( + "request", + ( request: IncomingMessage, response: ServerResponse ) => + { + this.onRequest( request, response ) + .catch( err => + { + console.error( "Unit test server failed", err ); + process.exit( 1 ); + } ); + } + ); + } + + public async _shutdown( ): Promise< void > + { + for ( const socket of this._store ) + { + socket.destroy( ); + } + this._store.clear( ); + } + + private async onRequest( + request: IncomingMessage, response: ServerResponse + ) + : Promise< void > + { + const { url: path, headers } = request; + let m; + + if ( path == null ) + throw new Error( "Internal test error" ); + + const sendHeaders = ( headers: RawHeaders ) => + { + const { ":status": status = 200, ...rest } = { ...headers }; + + response.statusCode = status; + + for ( const [ key, value ] of Object.entries( rest ) ) + response.setHeader( key, value ); + }; + + if ( path === "/headers" ) + { + sendHeaders( { + ":status": 200, + "content-type": "application/json", + } ); + + response.end( JSON.stringify( headers ) ); + } + else if ( path === "/echo" ) + { + const responseHeaders: RawHeaders = { + ":status": 200, + }; + [ HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_CONTENT_LENGTH ] + .forEach( name => + { + const value = headers[ name ]; + if ( value != null ) + responseHeaders[ name ] = value; + } ); + + sendHeaders( responseHeaders ); + request.pipe( response ); + } + else if ( path === "/set-cookie" ) + { + const responseHeaders: RawHeaders = { + ":status": 200, + [ HTTP2_HEADER_SET_COOKIE ]: [ ], + }; + + const data = await getStreamAsBuffer( request ); + const json = JSON.parse( data.toString( ) ); + json.forEach( ( cookie: any ) => + { + ( < any >responseHeaders[ HTTP2_HEADER_SET_COOKIE ] ) + .push( cookie ); + } ); + + sendHeaders( responseHeaders ); + response.end( ); + } + // tslint:disable-next-line + else if ( m = path.match( /\/wait\/(.+)/ ) ) + { + const timeout = parseInt( m[ 1 ], 10 ); + await delay( timeout ); + + const responseHeaders: RawHeaders = { + ":status": 200, + }; + [ HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_CONTENT_LENGTH ] + .forEach( name => + { + const value = headers[ name ]; + if ( value != null ) + responseHeaders[ name ] = value; + } ); + + try + { + sendHeaders( responseHeaders ); + request.pipe( response ); + } + catch ( err ) + // We ignore errors since this route is used to intentionally + // timeout, which causes us to try to write to a closed stream. + { } + } + else if ( path === "/trailers" ) + { + const responseHeaders = { + ":status": 200, + }; + + const data = await getStreamAsBuffer( request ); + const json = JSON.parse( data.toString( ) ); + + sendHeaders( responseHeaders ); + + response.write( "trailers will be sent" ); + + response.addTrailers( json ); + + response.end( ); + } + else if ( path === "/sha256" ) + { + const hash = createHash( "sha256" ); + + const responseHeaders = { + ":status": 200, + }; + sendHeaders( responseHeaders ); + + hash.on( "readable", ( ) => + { + const data = < Buffer >hash.read( ); + if ( data ) + { + response.write( data.toString( "hex" ) ); + response.end( ); + } + } ); + + request.pipe( hash ); + } + else if ( path.startsWith( "/compressed/" ) ) + { + const encoding = path.replace( "/compressed/", "" ); + + const accept = headers[ HTTP2_HEADER_ACCEPT_ENCODING ] as string; + + if ( !accept.includes( encoding ) ) + { + response.end( ); + return; + } + + const encoder = + encoding === "gzip" + ? createGzip( ) + : encoding === "deflate" + ? createDeflate( ) + : null; + + const responseHeaders = { + ":status": 200, + "content-encoding": encoding, + }; + + sendHeaders( responseHeaders ); + if ( encoder ) + request.pipe( encoder ).pipe( response ); + else + request.pipe( response ); + } + else if ( path.startsWith( "/slow/" ) ) + { + const waitMs = parseInt( path.replace( "/slow/", "" ), 10 ); + + const responseHeaders = { + ":status": 200, + [ HTTP2_HEADER_CONTENT_LENGTH ]: "10", + }; + + sendHeaders( responseHeaders ); + + response.write( "abcde" ); + + if ( waitMs > 0 ) + await delay( waitMs ); + + ignoreError( ( ) => response.write( "fghij" ) ); + ignoreError( ( ) => response.end( ) ); + } + else if ( path.startsWith( "/prem-close" ) ) + { + request.socket.destroy( ); + } + else + { + response.end( ); + } + } +} + +export async function makeServer( opts: ServerOptions = { } ) +: Promise< { server: Server; port: number | null; } > +{ + opts = opts || { }; + + const server = new ServerHttp1( opts ); + await server.listen( opts.port ); + return { server, port: server.port }; +} diff --git a/test/lib/server.ts b/test/lib/server-http2.ts similarity index 84% rename from test/lib/server.ts rename to test/lib/server-http2.ts index ddcaa81..420c431 100644 --- a/test/lib/server.ts +++ b/test/lib/server-http2.ts @@ -6,16 +6,21 @@ import { Http2Session, IncomingHttpHeaders, OutgoingHttpHeaders, - SecureServerOptions, ServerHttp2Stream, } from "http2"; import { createHash } from "crypto"; import { createDeflate, createGzip } from "zlib"; +import { delay } from "already"; import { buffer as getStreamAsBuffer } from "get-stream"; -import { delay } from "already"; +import { + ignoreError, + Server, + ServerOptions, + TypedServer, +} from "./server-common"; const { HTTP2_HEADER_PATH, @@ -25,33 +30,14 @@ const { HTTP2_HEADER_SET_COOKIE, } = constants; -export interface MatchData -{ - path: string; - stream: ServerHttp2Stream; - headers: IncomingHttpHeaders; -} - -export type Matcher = ( matchData: MatchData ) => boolean; - -export interface ServerOptions -{ - port?: number; - matchers?: ReadonlyArray< Matcher >; - serverOptions?: SecureServerOptions; -} - -const ignoreError = ( cb: ( ) => any ) => { try { cb( ); } catch ( err ) { } }; - -export class Server +export class ServerHttp2 extends TypedServer< Http2Server > { - public port: number | null; - private _opts: ServerOptions; - private _server: Http2Server; private _sessions: Set< Http2Session >; constructor( opts: ServerOptions ) { + super( ); + this._opts = opts || { }; if ( this._opts.serverOptions ) this._server = createSecureServer( this._opts.serverOptions ); @@ -71,36 +57,13 @@ export class Server } ); } - public async listen( port: number | undefined = void 0 ): Promise< number > + public async _shutdown( ): Promise< void > { - return new Promise( ( resolve, _reject ) => + for ( const session of this._sessions ) { - this._server.listen( port, "0.0.0.0", resolve ); - } ) - .then( ( ) => - { - const address = this._server.address( ); - if ( typeof address === "string" ) - return 0; - return address.port; - } ) - .then( port => - { - this.port = port; - return port; - } ); - } - - public async shutdown( ): Promise< void > - { - return new Promise< void >( ( resolve, _reject ) => - { - for ( const session of this._sessions ) - { - session.destroy( ); - } - this._server.close( resolve ); - } ); + session.destroy( ); + } + this._sessions.clear( ); } private async onStream( @@ -351,7 +314,7 @@ export async function makeServer( opts: ServerOptions = { } ) { opts = opts || { }; - const server = new Server( opts ); + const server = new ServerHttp2( opts ); await server.listen( opts.port ); return { server, port: server.port }; } diff --git a/test/lib/utils.ts b/test/lib/utils.ts index b376eaa..a34eeec 100644 --- a/test/lib/utils.ts +++ b/test/lib/utils.ts @@ -6,3 +6,6 @@ export function createIntegrity( data: string, hashType = "sha256" ) hash.update( data ); return hashType + "-" + hash.digest( "base64" ); } + +export const cleanUrl = ( url: string ) => + url.replace( /^http[12]:\/\//, "http://" ); From b8753f177911da9e944a23f7aee758778d9e7de0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 13 Jan 2019 16:25:26 +0100 Subject: [PATCH 02/77] feat(http1): Added HTTP/1 options, and made options per-origin --- lib/context-http1.ts | 35 ++++++---- lib/context-http2.ts | 21 ++++-- lib/context.ts | 140 ++++++++++++++++++++++----------------- lib/core.ts | 53 ++++++++++++--- lib/headers.ts | 17 +++-- lib/request.ts | 13 +++- lib/response.ts | 15 ++++- test/fetch-h2/httpbin.ts | 2 + test/fetch-h2/index.ts | 9 +++ 9 files changed, 206 insertions(+), 99 deletions(-) diff --git a/lib/context-http1.ts b/lib/context-http1.ts index 65c79ff..0114211 100644 --- a/lib/context-http1.ts +++ b/lib/context-http1.ts @@ -6,7 +6,10 @@ import { URL } from "url"; import { defer, Deferred } from "already"; import { + getByOrigin, Http1Options, + parsePerOrigin, + PerOrigin, } from "./core"; import { Request @@ -189,12 +192,21 @@ class OriginPool class ContextPool { - private options: Http1Options; private pools = new Map< string, OriginPool >( ); - constructor( options: Http1Options ) + private keepAlive: boolean | PerOrigin< boolean >; + private keepAliveMsecs: number | PerOrigin< number >; + private maxSockets: number | PerOrigin< number >; + private maxFreeSockets: number | PerOrigin< number >; + private timeout: void | number | PerOrigin< void | number >; + + constructor( options: Partial< Http1Options > ) { - this.options = options; + this.keepAlive = parsePerOrigin( options.keepAlive, false ); + this.keepAliveMsecs = parsePerOrigin( options.keepAliveMsecs, 1000 ); + this.maxSockets = parsePerOrigin( options.maxSockets, 256 ); + this.maxFreeSockets = parsePerOrigin( options.maxFreeSockets, Infinity ); + this.timeout = parsePerOrigin( options.timeout, void 0 ); } public hasOrigin( origin: string ) @@ -208,16 +220,11 @@ class ContextPool if ( !pool ) { - const runIfFunction = - < T extends number | boolean | void > - ( value: T | ( ( origin: string ) => T ) ) => - typeof value === "function" ? value( origin ) : value; - - const keepAlive = runIfFunction( this.options.keepAlive ); - const keepAliveMsecs = runIfFunction( this.options.keepAliveMsecs ); - const maxSockets = runIfFunction( this.options.maxSockets ); - const maxFreeSockets = runIfFunction( this.options.maxFreeSockets ); - const timeout = runIfFunction( this.options.timeout ); + const keepAlive = getByOrigin( this.keepAlive, origin ); + const keepAliveMsecs = getByOrigin( this.keepAliveMsecs, origin ); + const maxSockets = getByOrigin( this.maxSockets, origin ); + const maxFreeSockets = getByOrigin( this.maxFreeSockets, origin ); + const timeout = getByOrigin( this.timeout, origin ); const newPool = new OriginPool( keepAlive, @@ -251,7 +258,7 @@ export class H1Context { private contextPool: ContextPool; - constructor( options: Http1Options ) + constructor( options: Partial< Http1Options > ) { this.contextPool = new ContextPool( options ); } diff --git a/lib/context-http2.ts b/lib/context-http2.ts index 82bca40..4286983 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -12,7 +12,7 @@ import { asyncGuard, syncGuard } from "callguard"; import { AbortError, - BaseContext, + Decoder, TimeoutError, } from "./core"; @@ -39,6 +39,10 @@ export type PushHandler = getResponse: ( ) => Promise< Response > ) => void; +export type GetDecoders = ( origin: string ) => ReadonlyArray< Decoder >; +export type GetSessionOptions = + ( origin: string ) => SecureClientSessionOptions; + export class H2Context { public _pushHandler?: PushHandler; @@ -46,11 +50,16 @@ export class H2Context private _h2sessions: Map< string, H2SessionItem > = new Map( ); private _h2staleSessions: Map< string, Set< ClientHttp2Session > > = new Map( ); - private _context: BaseContext; + private _getDecoders: GetDecoders; + private _getSessionOptions: GetSessionOptions; - constructor( context: BaseContext ) + constructor( + getDecoders: GetDecoders, + getSessionOptions: GetSessionOptions + ) { - this._context = context; + this._getDecoders = getDecoders; + this._getSessionOptions = getSessionOptions; } public hasOrigin( origin: string ) @@ -262,7 +271,7 @@ export class H2Context ( responseHeaders: IncomingHttp2Headers ) => { const response = new StreamResponse( - this._context._decoders, + this._getDecoders( origin ), path, pushedStream, responseHeaders, @@ -310,7 +319,7 @@ export class H2Context ); const options = { - ...this._context._sessionOptions, + ...this._getSessionOptions( origin ), ...extraOptions, }; diff --git a/lib/context.ts b/lib/context.ts index 93fa017..2e0d3ba 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -11,12 +11,14 @@ import { H2Context, PushHandler } from "./context-http2"; import { connectTLS } from "./context-https"; import { CookieJar } from "./cookie-jar"; import { - BaseContext, Decoder, FetchError, FetchInit, + getByOrigin, Http1Options, HttpProtocols, + parsePerOrigin, + PerOrigin, SimpleSession, SimpleSessionHttp1, SimpleSessionHttp2, @@ -44,93 +46,111 @@ const defaultAccept = "application/json, text/*;0.9, */*;q=0.8"; export interface ContextOptions { - userAgent: string; - overwriteUserAgent: boolean; - accept: string; + userAgent: string | PerOrigin< string >; + overwriteUserAgent: boolean | PerOrigin< boolean >; + accept: string | PerOrigin< string >; cookieJar: CookieJar; - decoders: ReadonlyArray< Decoder >; - session: SecureClientSessionOptions; - httpProtocol: HttpProtocols; - httpsProtocols: ReadonlyArray< HttpProtocols >; - http1: Partial< Http1Options >; + decoders: + ReadonlyArray< Decoder > | PerOrigin< ReadonlyArray< Decoder > >; + session: + SecureClientSessionOptions | PerOrigin< SecureClientSessionOptions >; + httpProtocol: HttpProtocols | PerOrigin< HttpProtocols >; + httpsProtocols: + ReadonlyArray< HttpProtocols > | + PerOrigin< ReadonlyArray< HttpProtocols > >; + http1: Partial< Http1Options > | PerOrigin< Partial< Http1Options > >; } -export class Context implements BaseContext +export class Context { - public _decoders: ReadonlyArray< Decoder >; - public _sessionOptions: SecureClientSessionOptions; - private h1Context: H1Context; - private h2Context = new H2Context( this ); - private _userAgent: string; - private _accept: string; + private h2Context: H2Context; + + private _userAgent: string | PerOrigin< string >; + private _overwriteUserAgent: boolean | PerOrigin< boolean >; + private _accept: string | PerOrigin< string >; private _cookieJar: CookieJar; - private _httpProtocol: HttpProtocols; - private _httpsProtocols: Array< HttpProtocols >; - private _http1Options: Http1Options; + private _decoders: + ReadonlyArray< Decoder > | PerOrigin< ReadonlyArray< Decoder > >; + private _sessionOptions: + SecureClientSessionOptions | PerOrigin< SecureClientSessionOptions >; + private _httpProtocol: HttpProtocols | PerOrigin< HttpProtocols >; + private _httpsProtocols: + ReadonlyArray< HttpProtocols > | + PerOrigin< ReadonlyArray< HttpProtocols > >; + private _http1Options: Partial< Http1Options | PerOrigin< Http1Options > >; constructor( opts?: Partial< ContextOptions > ) { this._userAgent = ""; + this._overwriteUserAgent = false; this._accept = ""; this._cookieJar = < CookieJar >< any >void 0; this._decoders = [ ]; this._sessionOptions = { }; this._httpProtocol = "http1"; this._httpsProtocols = [ "http2", "http1" ]; - this._http1Options = { - keepAlive: false, - keepAliveMsecs: 1000, - maxFreeSockets: 256, - maxSockets: Infinity, - timeout: void 0, - }; + this._http1Options = { }; this.setup( opts ); this.h1Context = new H1Context( this._http1Options ); + this.h2Context = new H2Context( + this.decoders.bind( this ), + this.sessionOptions.bind( this ) + ); } public setup( opts?: Partial< ContextOptions > ) { opts = opts || { }; - this._userAgent = - ( - "userAgent" in opts && - "overwriteUserAgent" in opts && - opts.overwriteUserAgent - ) - ? ( opts.userAgent || "" ) - : "userAgent" in opts - ? opts.userAgent + " " + defaultUserAgent - : defaultUserAgent; - - this._accept = "accept" in opts - ? ( opts.accept || defaultAccept ) - : defaultAccept; - this._cookieJar = "cookieJar" in opts ? ( opts.cookieJar || new CookieJar( ) ) : new CookieJar( ); - this._decoders = "decoders" in opts - ? opts.decoders || [ ] - : [ ]; + this._userAgent = parsePerOrigin( opts.userAgent, "" ); + this._overwriteUserAgent = + parsePerOrigin( opts.overwriteUserAgent, false ); + this._accept = parsePerOrigin( opts.accept, defaultAccept ); + this._decoders = parsePerOrigin( opts.decoders, [ ] ); + this._sessionOptions = parsePerOrigin( opts.session, { } ); + this._httpProtocol = parsePerOrigin( opts.httpProtocol, "http1" ); + + this._httpsProtocols = parsePerOrigin( + opts.httpsProtocols, + [ "http2", "http1" ] + ); + + Object.assign( this._http1Options, opts.http1 || { } ); + } - this._sessionOptions = "session" in opts - ? opts.session || { } - : { }; + public userAgent( origin: string ) + { + const combine = ( userAgent: string, overwriteUserAgent: boolean ) => + { + const defaultUA = overwriteUserAgent ? "" : defaultUserAgent; - this._httpProtocol = "httpProtocol" in opts - ? opts.httpProtocol || "http1" - : "http1"; + return userAgent + ? defaultUA + ? userAgent + " " + defaultUA + : userAgent + : defaultUA; + }; - this._httpsProtocols = "httpsProtocols" in opts - ? [ ...( opts.httpsProtocols || [ ] ) ] - : [ "http2", "http1" ]; + return combine( + getByOrigin( this._userAgent, origin ), + getByOrigin( this._overwriteUserAgent, origin ) + ); + } - Object.assign( this._http1Options, opts.http1 || { } ); + public decoders( origin: string ) + { + return getByOrigin( this._decoders, origin ); + } + public sessionOptions( origin: string ) + { + return getByOrigin( this._sessionOptions, origin ); } public onPush( pushHandler?: PushHandler ) @@ -152,15 +172,15 @@ export class Context implements BaseContext : input : new Request( input, { ...( init || { } ), url } ); - const { rejectUnauthorized } = this._sessionOptions; + const { rejectUnauthorized } = this.sessionOptions( origin ); const makeSimpleSession = ( protocol: HttpProtocols ): SimpleSession => ( { - accept: ( ) => this._accept, - contentDecoders: ( ) => this._decoders, + accept: ( ) => getByOrigin( this._accept, origin ), + contentDecoders: ( ) => getByOrigin( this._decoders, origin ), cookieJar: this._cookieJar, protocol, - userAgent: ( ) => this._userAgent, + userAgent: ( ) => this.userAgent( origin ), } ); const doFetchHttp1 = ( socket: Socket ) => @@ -236,8 +256,8 @@ export class Context implements BaseContext const { protocol, socket } = await connectTLS( hostname, port, - this._httpsProtocols, - this._sessionOptions + getByOrigin( this._httpsProtocols, origin ), + getByOrigin( this._sessionOptions, origin ) ); if ( protocol === "http2" ) diff --git a/lib/core.ts b/lib/core.ts index a220320..2c0412e 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -1,5 +1,5 @@ import { ClientRequest } from "http"; -import { ClientHttp2Session, SecureClientSessionOptions } from "http2"; +import { ClientHttp2Session } from "http2"; import { CookieJar } from "./cookie-jar"; import { Headers, RawHeaders } from "./headers"; @@ -91,6 +91,8 @@ export type ResponseTypes = export type HttpProtocols = "http1" | "http2"; +export type HttpVersion = 1 | 2; + export interface IBody { readonly bodyUsed: boolean; @@ -118,6 +120,7 @@ export interface RequestInitWithoutBody referrer: ReferrerTypes; referrerPolicy: ReferrerPolicyTypes; integrity: string; + allowForbiddenHeaders: boolean; } export interface RequestInit extends RequestInitWithoutBody @@ -189,21 +192,49 @@ export interface Decoder decode: DecodeFunction; } -export type PerOriginOption< T > = ( origin: string ) => T; +export type PerOrigin< T > = ( origin: string ) => T; -export interface Http1Options +export function getByOrigin< T >( + val: T | PerOrigin< T >, + origin: string +) +: T +{ + return typeof val === "function" + ? ( < PerOrigin< T > >val )( origin ) + : val; +} + +export function parsePerOrigin< T >( + val: T | PerOrigin< T > | void, + _default: T +) +: T | PerOrigin< T > { - keepAlive: boolean | PerOriginOption< boolean >; - keepAliveMsecs: number | PerOriginOption< number >; - maxSockets: number | PerOriginOption< number >; - maxFreeSockets: number | PerOriginOption< number >; - timeout: void | number | PerOriginOption< void | number >; + if ( val == null ) + { + return _default; + } + + if ( typeof val === "function" ) + return ( origin: string ) => + { + const ret = ( < PerOrigin< T > >val )( origin ); + if ( ret == null ) + return _default; + return ret; + }; + + return val; } -export interface BaseContext +export interface Http1Options { - _decoders: ReadonlyArray< Decoder >; - _sessionOptions: SecureClientSessionOptions; + keepAlive: boolean | PerOrigin< boolean >; + keepAliveMsecs: number | PerOrigin< number >; + maxSockets: number | PerOrigin< number >; + maxFreeSockets: number | PerOrigin< number >; + timeout: void | number | PerOrigin< void | number >; } export interface SimpleSession diff --git a/lib/headers.ts b/lib/headers.ts index 1b71cc5..105d097 100644 --- a/lib/headers.ts +++ b/lib/headers.ts @@ -123,7 +123,7 @@ function _ensureGuard( ` (${name})` ); } -let _guard: string | null = null; +let _guard: GuardTypes | null = null; export class Headers { @@ -136,13 +136,22 @@ export class Headers _guard = null; this._data = new Map( ); + const set = ( name: string, values: ReadonlyArray< string > ) => + { + if ( values.length === 1 ) + this.set( name, values[ 0 ] ); + else + for ( const value of values ) + this.append( name, value ); + }; + if ( !init ) return; else if ( init instanceof Headers ) { - for ( const [ name, value ] of init._data.entries( ) ) - this._data.set( name, [ ...value ] ); + for ( const [ name, values ] of init._data.entries( ) ) + set( name, values ); } else @@ -152,7 +161,7 @@ export class Headers const name = filterName( _name ); const value = arrayify( init[ _name ] ) .map( val => `${val}` ); - this._data.set( name, [ ...value ] ); + set( name, [ ...value ] ); } } } diff --git a/lib/request.ts b/lib/request.ts index badbcd6..6a42782 100644 --- a/lib/request.ts +++ b/lib/request.ts @@ -16,6 +16,7 @@ import { GuardedHeaders, Headers } from "./headers"; const defaultInit: Partial< RequestInit > = { + allowForbiddenHeaders: false, cache: "default", credentials: "omit", method: "GET", @@ -46,6 +47,8 @@ export class Request extends Body implements RequestInitWithoutBody public readonly integrity: string; // @ts-ignore public readonly cache: CacheTypes; + // @ts-ignore + public readonly allowForbiddenHeaders: boolean; private _url: string; private _init: Partial< RequestInit >; @@ -83,9 +86,13 @@ export class Request extends Body implements RequestInitWithoutBody } this._init = Object.assign( { }, defaultInit, init ); + const allowForbiddenHeaders = + < boolean >this._init.allowForbiddenHeaders; const headers = new GuardedHeaders( - this._init.mode === "no-cors" + allowForbiddenHeaders + ? "none" + : this._init.mode === "no-cors" ? "request-no-cors" : "request", this._init.headers @@ -107,6 +114,10 @@ export class Request extends Body implements RequestInitWithoutBody } Object.defineProperties( this, { + allowForbiddenHeaders: { + enumerable: true, + value: allowForbiddenHeaders, + }, cache: { enumerable: true, value: this._init.cache, diff --git a/lib/response.ts b/lib/response.ts index bea24da..198dba5 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -20,6 +20,7 @@ import { BodyTypes, DecodeFunction, Decoder, + HttpVersion, ResponseInit, ResponseTypes, } from "./core"; @@ -41,6 +42,7 @@ import { interface Extra { + httpVersion: HttpVersion; redirected: boolean; integrity: string; type: ResponseTypes; @@ -65,6 +67,8 @@ export class Response extends Body public readonly url: string; // @ts-ignore public readonly useFinalURL: boolean; + // @ts-ignore + public readonly httpVersion: HttpVersion; constructor( body: BodyTypes | Body | null, @@ -106,6 +110,10 @@ export class Response extends Body enumerable: true, value: headers, }, + httpVersion: { + enumerable: true, + value: _extra.httpVersion, + }, ok: { enumerable: true, get: ( ) => this.status >= 200 && this.status < 300, @@ -207,6 +215,7 @@ function makeInitHttp2( inHeaders: IncomingHttpHeaders ) } function makeExtra( + httpVersion: HttpVersion, url: string, redirected: boolean, integrity?: string @@ -215,7 +224,7 @@ function makeExtra( { const type = "basic"; // TODO: Implement CORS - return { redirected, integrity, type, url }; + return { httpVersion, redirected, integrity, type, url }; } function handleEncoding( @@ -261,7 +270,7 @@ export class StreamResponse extends Response headers: IncomingHttpHeaders, redirected: boolean, init: Partial< ResponseInit >, - httpVersion: 1 | 2, + httpVersion: HttpVersion, integrity?: string ) { @@ -279,7 +288,7 @@ export class StreamResponse extends Response : makeInitHttp2( headers ) ), }, - makeExtra( url, redirected, integrity ) + makeExtra( httpVersion, url, redirected, integrity ) ); } } diff --git a/test/fetch-h2/httpbin.ts b/test/fetch-h2/httpbin.ts index 9db008e..71b61a1 100644 --- a/test/fetch-h2/httpbin.ts +++ b/test/fetch-h2/httpbin.ts @@ -94,6 +94,7 @@ describe( name, function( ) const response = await fetch( `${host}/post`, { + allowForbiddenHeaders: true, body: new StreamBody( stream ), headers: { "content-length": "6" }, method: "POST", @@ -111,6 +112,7 @@ describe( name, function( ) const eventualResponse = fetch( `${host}/post`, { + allowForbiddenHeaders: true, body: new StreamBody( stream ), headers: { "content-length": "6" }, method: "POST", diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index 9f81868..f9885f3 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -81,6 +81,10 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => else expect( res[ ":path" ] ).to.equal( "/headers" ); + const versionNumber = + parseInt( version.substr( version.length - 1 ), 10 ); + expect( response.httpVersion ).to.equal( versionNumber ); + await server.shutdown( ); } ); @@ -97,6 +101,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => await fetch( `${proto}//localhost:${port}/headers`, { + allowForbiddenHeaders: true, body: new DataBody( "foobar" ), headers, method: "POST", @@ -148,6 +153,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => await fetch( `${proto}//localhost:${port}/headers`, { + allowForbiddenHeaders: true, body: new DataBody( "foobar" ), headers, method: "POST", @@ -174,6 +180,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const eventualResponse = fetch( `${proto}//localhost:${port}/echo`, { + allowForbiddenHeaders: true, body: new StreamBody( stream ), headers: { "content-length": "6" }, method: "POST", @@ -433,6 +440,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const eventualResponse = fetch( `${proto}//localhost:${port}/sha256`, { + allowForbiddenHeaders: true, body: new StreamBody( stream ), headers: { "content-length": "" + chunkSize * chunks }, method: "POST", @@ -549,6 +557,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => await fetch( `${proto}//localhost:${port}/headers`, { + allowForbiddenHeaders: true, headers: { host }, } ) From 35f55197b24380d97ed78027aee7c1b60729e329 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 13 Jan 2019 16:26:24 +0100 Subject: [PATCH 03/77] chore(coverage): Use source-map-support for TS coverage reports --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 188ec34..46c3a98 100644 --- a/package.json +++ b/package.json @@ -22,8 +22,8 @@ "lint": "node_modules/.bin/tslint --project .", "mocha": "node_modules/.bin/mocha --bail --check-leaks dist/test", "mocha:debug": "node_modules/.bin/mocha --inspect-brk dist/test", - "test": "npm run lint && node_modules/.bin/nyc npm run mocha", - "testfast": "node_modules/.bin/nyc node_modules/.bin/_mocha -- --bail --check-leaks -i --grep nghttp2.org dist/test", + "test": "npm run lint && node_modules/.bin/nyc --require source-map-support/register npm run mocha", + "testfast": "node_modules/.bin/nyc --require source-map-support/register node_modules/.bin/_mocha -- --bail --check-leaks -i --grep nghttp2.org dist/test", "test-nocov": "node_modules/.bin/mocha --bail --check-leaks dist/test", "buildtest": "npm run build && npm run test-nocov", "buildtestfast": "npm run build && node_modules/.bin/mocha -i --grep nghttp2.org dist/test", @@ -83,7 +83,7 @@ "tough-cookie": "3.x" }, "publishConfig": { - "tag": "beta" + "tag": "next" }, "config": { "commitizen": { From 258c6ca3af20ea0868372c96c42fd5dbfb9b6f10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 13 Jan 2019 16:27:02 +0100 Subject: [PATCH 04/77] docs(readme): Updated README for HTTP/1 support --- README.md | 118 ++++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 106 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 4f618cc..6b9b1c1 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,13 @@ # fetch-h2 -HTTP/2 [Fetch API](https://developer.mozilla.org/docs/Web/API/Fetch_API) implementation for Node.js (using Node.js' built-in `http2` module). This module is intended to be solely for HTTP/2, handling HTTP/2 sessions transparently. For an HTTP/1(.1)-only alternative, you can use [node-fetch](https://github.com/bitinn/node-fetch). +[Fetch API](https://developer.mozilla.org/docs/Web/API/Fetch_API) implementation for Node.js using the built-in `http`, `https` and `http2` packages without any compatibility layer. -The module tries to adhere to the [Fetch API](https://developer.mozilla.org/docs/Web/API/Fetch_API) very closely, but extends it slightly to fit better into Node.js (e.g. using streams). +`fetch-h2` handles HTTP/1(.1) and HTTP/2 connections transparently since 2.0. By default (although configurable) a url to `http://` uses HTTP/1(.1) and for the very uncommon plain-text HTTP/2 (called _h2c_), `http2://` can be provided. The library supports ALPN negotation, so `https://` will use either HTTP/1(.1) or HTTP/2 depending on what the server supports. By default, HTTP/2 is preferred. + +The library handles sessions transparently and re-uses sockets when possible. + +`fetch-h2` tries to adhere to the [Fetch API](https://developer.mozilla.org/docs/Web/API/Fetch_API) very closely, but extends it slightly to fit better into Node.js (e.g. using streams). Regardless of whether you're actually interested in the Fetch API per se or not, as long as you want to handle HTTP/2 client requests in Node.js, this module is a lot easier and more natural to use than the native built-in [`http2`](https://nodejs.org/dist/latest-v10.x/docs/api/http2.html) module which is low-level in comparison. @@ -17,13 +21,16 @@ Regardless of whether you're actually interested in the Fetch API per se or not, By default, `fetch-h2` will accept `gzip` and `deflate` encodings, and decode transparently. If you also want to allow Brotli (`br`), use the [`fetch-h2-br`](https://www.npmjs.com/package/fetch-h2-br) package. -**NOTE;** HTTP/2 support was introduced in Node.js (version 8.4), and required `node` to be started with a flag `--expose-http2` up to version 8.7 (this module won't work without it). From Node.js 8.8, the `http2` module is available without any flag. The API has changed and not settled until 10.x, **and `fetch-h2` requires 10.4+**. - ## Releases Since 1.0.0, `fetch-h2` requires Node.js 10. +Since 2.0.0, `fetch-h2` requires Node.js 10.4. + + +# API + ## Imports This README uses the ES6/TypeScript `import` syntax, mainly because `fetch-h2` is written in TypeScript (and also because ES6 modules will eventually arrive in Node.js). If you use pure JavaScript in Node.js today, you don't have *modules* support, just `require` instead, e.g: @@ -83,7 +90,7 @@ const responseText = await response.text( ); With HTTP/2, all requests to the same *origin* (domain name and port) share a single session (socket). In browsers, it is eventually disconnected, maybe. It's up to the implementation to handle disconnections. In `fetch-h2`, you can disconnect it manually, which is great e.g. when using `fetch-h2` in unit tests. -### Disconnect +## Disconnect Disconnect the session for a certain url (the session for the *origin* will be disconnected) using `disconnect`, and disconnect **all** sessions with `disconnectAll`. Read more on *contexts* below to understand what "all" really means... @@ -96,7 +103,7 @@ await disconnectAll( ); ``` -### Pushed requests +## Pushed requests When the server pushes a request, this can be handled using the `onPush` handler. Registering an `onPush` handler is, just like the disconnection functions, *per-context*. @@ -150,6 +157,8 @@ These are features in `fetch-h2`, that don't exist in the Fetch API. Some things * `fetch()` has an extra option, `onTrailers` (of the type `OnTrailers`) which is a callback that will receive trailing headers. * The `Request.clone()` member function has an optional `url` argument. * The response `text()` and `arrayBuffer()` has an optional argument `allowIncomplete` which defaults to `false`. If set to `true` these function will return incomplete bodies, i.e. "as much as was read" before the stream was prematurely closed (disconnected). If integrity checks are enabled, the functions will throw anyway if the body is incomplete. + * The `Request` class (options to `fetch`) has an extra property `allowForbiddenHeaders`, which defaults to `false`. + * The response object has an extra property `httpVersion` which is either `1` or `2`, depending on what was negotiated with the server. ## Contexts @@ -180,6 +189,7 @@ const { fetch, disconnect, disconnectAll, onPush } = context( ); Contexts can be configured with options when constructed. The default context can be configured using the `setup( )` function, but if this function is used, call it only once, and before any usage of `fetch-h2`, or the result is undefined. + ### Context configuration The options to `setup( )` are the same as those to `context( )` and is available as a TypeScript type `ContextOptions`. @@ -188,23 +198,69 @@ The options to `setup( )` are the same as those to `context( )` and is available // The options object interface ContextOptions { - userAgent: string; - overwriteUserAgent: boolean; - accept: string; - cookieJar: CookieJar; - decoders: ReadonlyArray< Decoder >; - session: SecureClientSessionOptions; + userAgent: + string | + PerOrigin< string >; + overwriteUserAgent: + boolean | + PerOrigin< boolean >; + accept: + string | + PerOrigin< string >; + cookieJar: + CookieJar; + decoders: + ReadonlyArray< Decoder > | + PerOrigin< ReadonlyArray< Decoder > >; + session: + SecureClientSessionOptions | + PerOrigin< SecureClientSessionOptions >; + httpProtocol: + HttpProtocols | + PerOrigin< HttpProtocols >; + httpsProtocols: + ReadonlyArray< HttpProtocols > | + PerOrigin< ReadonlyArray< HttpProtocols > >; + http1: + Partial< Http1Options > | + PerOrigin< Partial< Http1Options > >; +} +``` + +where `Http1Options` is +```ts +interface Http1Options +{ + keepAlive: boolean | PerOrigin< boolean >; + keepAliveMsecs: number | PerOrigin< number >; + maxSockets: number | PerOrigin< number >; + maxFreeSockets: number | PerOrigin< number >; + timeout: void | number | PerOrigin< void | number >; } ``` + +#### Per-origin configuration + +Any of these options, except for the cookie jar, can be provided either as a value or as a callback function (`PerOrigin`) which takes the _origin_ as argument and returns the value. A `void` return from that function, will use the built-in default. + + +### User agent + By specifying a `userAgent` string, this will be added to the built-in `user-agent` header. If defined, and `overwriteUserAgent` is true, the built-in user agent string will not be sent. + +### Accept + `accept` can be specified, which is the `accept` header. The default is: ``` application/json, text/*;0.9, */*;q=0.8 ``` + +### Cookies + `cookieJar` can be set to a custom cookie jar, constructed as `new CookieJar( )`. `CookieJar` is a class exported by `fetch-h2` and has three functions: ```ts @@ -218,10 +274,48 @@ application/json, text/*;0.9, */*;q=0.8 where `Cookie` is a [`tough-cookie` Cookie](https://www.npmjs.com/package/tough-cookie#cookie). + +### Content encodings (compression) + +By default, `gzip` and `deflate` are supported. + `decoders` can be an array of custom decoders, such as [`fetch-h2-br`](https://www.npmjs.com/package/fetch-h2-br) which adds Brotli content decoding support. + +### Low-level session configuration + `session` can be used for lower-level Node.js settings. This is the options to [`http2::connect`](https://nodejs.org/dist/latest-v10.x/docs/api/http2.html#http2_http2_connect_authority_options_listener) (including the [`net::connect`](https://nodejs.org/dist/latest-v10.x/docs/api/net.html#net_net_connect) and [`tls::connect`](https://nodejs.org/dist/latest-v10.x/docs/api/tls.html#tls_tls_connect_options_callback) options). Use this option to specify `{rejectUnauthorized: false}` if you want to allow unauthorized (e.g. self-signed) certificates. +Some of these fields are compatible with HTTP/1.1 too, such as `rejectUnauthorized`. + + +### HTTP Protocols + +The type `HttpProtocols` is `"http1" | "http2"`. + +The option `httpProtocol` can be set to either `"http2"` or `"http1"` (the default). This controls what links to `http://` will use. Note that no web server will likely support HTTP/2 unencrypted. + +`httpsProtocol` is an array of supported protocols to negotiate over https. It defaults to `[ "http2", "http1" ]`, but can be swapped to prefer HTTP/1(.1) rather than HTTP/2, or to require one of them by only containing that protocol. + + +### HTTP/1 + +HTTP/2 allows for multiple concurrent streams (requests) over the same session (socket). HTTP/1 has no such feature, so commonly, clients open a set of connections and re-use them to allow for concurrency. + +The `http1` options object can be used to configure this. + + +#### Keep-alive + +`http1.keepAlive` defaults to false, but can be set to true, to allow connections to linger so that they can be reused. The `http1.keepAliveMsecs` time (defaults to 1000ms, i.e. 1s) specifies the delay before keep-alive probing. + + +#### Sockets + +`http1.maxSockets` defines the maximum sockets to allow per origin, and `http1.maxFreeSockets` the maximum number of lingering sockets, waiting to be re-used for new requests. + +`http1.timeout` defines the HTTP/1 timeout. + ## Errors From 57580889a66e4a369f8aeaa8ca40095d1d021c0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 13 Jan 2019 22:24:06 +0100 Subject: [PATCH 05/77] feat(core): Keep track of socket ref/deref in sync with request pool When a request has been made (over HTTP/1 or HTTP/2 with or without https), the socket will be unref'd for Nodejs to be able to exit the program (without requiring explicit disconnect). When sockets are re-used, they are ref'd again. --- .npmignore | 1 + lib/context-http1.ts | 79 ++++-- lib/context-http2.ts | 94 ++++++- lib/context.ts | 51 ++-- lib/core.ts | 16 +- lib/fetch-common.ts | 8 +- lib/fetch-http1.ts | 9 +- lib/fetch-http2.ts | 381 +++++++++++++------------- package.json | 2 + scripts/test-client | 3 + test-client/index.ts | 39 +++ test/fetch-h2/event-loop-reference.ts | 47 ++++ test/fetch-h2/index.ts | 2 +- test/lib/server-common.ts | 2 +- tsconfig.json | 3 +- 15 files changed, 487 insertions(+), 250 deletions(-) create mode 100755 scripts/test-client create mode 100755 test-client/index.ts create mode 100644 test/fetch-h2/event-loop-reference.ts diff --git a/.npmignore b/.npmignore index c54941e..616c784 100644 --- a/.npmignore +++ b/.npmignore @@ -4,3 +4,4 @@ scripts tsconfig.json test/ dist/test/ +dist/test-client/ diff --git a/lib/context-http1.ts b/lib/context-http1.ts index 0114211..d1237fc 100644 --- a/lib/context-http1.ts +++ b/lib/context-http1.ts @@ -17,23 +17,36 @@ import { import { parseInput } from "./utils"; -export interface FreeSocketInfo -{ - socket?: Socket; - shouldCreateNew: boolean; -} - export interface ConnectOptions { rejectUnauthorized: boolean | undefined; createConnection: ( ) => Socket; } +export interface SocketAndCleanup +{ + socket: Socket; + cleanup: ( ) => void; +} + +export interface FreeSocketInfoWithSocket extends SocketAndCleanup +{ + shouldCreateNew: boolean; +} +export interface FreeSocketInfoWithoutSocket +{ + socket: never; + cleanup: never; + shouldCreateNew: boolean; +} +export type FreeSocketInfo = + FreeSocketInfoWithSocket | FreeSocketInfoWithoutSocket; + class OriginPool { private usedSockets = new Set< Socket >( ); private unusedSockets = new Set< Socket >( ); - private waiting: Array< Deferred< Socket > > = [ ]; + private waiting: Array< Deferred< SocketAndCleanup > > = [ ]; private keepAlive: boolean; private keepAliveMsecs: number; @@ -84,23 +97,25 @@ class OriginPool } ); this.usedSockets.add( socket ); + + return this.makeCleaner( socket ); } public getFreeSocket( ): FreeSocketInfo { - const socket = this.getFirstUnused( ); + const socketAndCleanup = this.getFirstUnused( ); - if ( socket ) - return { socket, shouldCreateNew: false }; + if ( socketAndCleanup ) + return { ...socketAndCleanup, shouldCreateNew: false }; const shouldCreateNew = this.maxSockets >= this.usedSockets.size; - return { shouldCreateNew }; + return { shouldCreateNew } as FreeSocketInfoWithoutSocket; } - public waitForSocket( ): Promise< Socket > + public waitForSocket( ): Promise< SocketAndCleanup > { - const deferred = defer< Socket >( ); + const deferred = defer< SocketAndCleanup >( ); this.waiting.push( deferred ); @@ -127,11 +142,14 @@ class OriginPool ); } - private getFirstUnused( ) + private getFirstUnused( ): SocketAndCleanup | null { for ( const socket of this.unusedSockets.values( ) ) + { // We obviously have a socket - return this.moveToUsed( socket ); + this.moveToUsed( socket ); + return { socket, cleanup: this.makeCleaner( socket ) }; + } return null; } @@ -141,8 +159,8 @@ class OriginPool if ( this.waiting.length === 0 ) return false; - const waiting = < Deferred< Socket > >this.waiting.shift( ); - waiting.resolve( socket ); + const waiting = < Deferred< SocketAndCleanup > >this.waiting.shift( ); + waiting.resolve( { socket, cleanup: this.makeCleaner( socket ) } ); return true; } @@ -150,9 +168,11 @@ class OriginPool { while ( this.waiting.length > 0 && this.unusedSockets.size > 0 ) { - const socket = < Socket >this.getFirstUnused( ); - const waiting = < Deferred< Socket > >this.waiting.shift( ); - waiting.resolve( socket ); + const socketAndCleanup = + < SocketAndCleanup >this.getFirstUnused( ); + const waiting = + < Deferred< SocketAndCleanup > >this.waiting.shift( ); + waiting.resolve( socketAndCleanup ); } } @@ -163,7 +183,20 @@ class OriginPool ); } - // @ts-ignore + private makeCleaner( socket: Socket ) + { + let hasCleaned = false; + return ( ) => + { + if ( hasCleaned ) + return; + hasCleaned = true; + + if ( !socket.destroyed ) + this.moveToUnused( socket ); + }; + } + private async moveToUnused( socket: Socket ) { if ( this.tryReuse( socket ) ) @@ -267,7 +300,7 @@ export class H1Context { return this.contextPool.hasOrigin( origin ) ? this.contextPool.getOriginPool( origin ).getFreeSocket( ) - : { shouldCreateNew: true }; + : { shouldCreateNew: true } as FreeSocketInfoWithoutSocket; } public addUsedSocket( origin: string, socket: Socket ) @@ -275,7 +308,7 @@ export class H1Context return this.contextPool.getOriginPool( origin ).addUsed( socket ); } - public waitForSocket( origin: string ): Promise< Socket > + public waitForSocket( origin: string ): Promise< SocketAndCleanup > { return this.contextPool.getOriginPool( origin ).waitForSocket( ); } diff --git a/lib/context-http2.ts b/lib/context-http2.ts index 4286983..5ccf923 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -30,6 +30,9 @@ interface H2SessionItem { session: ClientHttp2Session; promise: Promise< ClientHttp2Session >; + + ref: ( ) => void; + unref: ( ) => void; } export type PushHandler = @@ -71,7 +74,11 @@ export class H2Context origin: string, extraOptions?: SecureClientSessionOptions ) - : { didCreate: boolean; session: Promise< ClientHttp2Session > } + : { + didCreate: boolean; + session: Promise< ClientHttp2Session >; + cleanup: ( ) => void; + } { const willCreate = !this._h2sessions.has( origin ); @@ -112,10 +119,28 @@ export class H2Context this._h2sessions.set( origin, sessionItem ); } - const session = - ( < H2SessionItem >this._h2sessions.get( origin ) ).promise; + const { promise: session, ref, unref } = + ( < H2SessionItem >this._h2sessions.get( origin ) ); + + if ( !willCreate ) + // This was re-used + ref( ); + + // Avoid potential double-clean races + let hasCleanedUp = false; + const cleanup = ( ) => + { + if ( hasCleanedUp ) + return; + hasCleanedUp = true; + unref( ); + }; - return { didCreate: willCreate, session }; + return { + cleanup, + didCreate: willCreate, + session, + }; } public disconnectSession( session: ClientHttp2Session ): Promise< void > @@ -240,7 +265,9 @@ export class H2Context private handlePush( origin: string, pushedStream: ClientHttp2Stream, - requestHeaders: IncomingHttp2Headers + requestHeaders: IncomingHttp2Headers, + ref: ( ) => void, + unref: ( ) => void ) { if ( !this._pushHandler ) @@ -255,10 +282,14 @@ export class H2Context const pushedRequest = new Request( path, { headers: requestHeaders } ); + ref( ); + const futureResponse = new Promise< Response >( ( resolve, reject ) => { const guard = syncGuard( reject, { catchAsync: true } ); + pushedStream.once( "close", unref ); + pushedStream.once( "aborted", ( ) => reject( new AbortError( "Response aborted" ) ) ); @@ -313,10 +344,32 @@ export class H2Context // tslint:disable-next-line const aGuard = asyncGuard( console.error.bind( console ) ); - const pushHandler = aGuard( - ( stream: ClientHttp2Stream, headers: IncomingHttp2Headers ) => - this.handlePush( origin, stream, headers ) - ); + const sessionRefs: Partial< H2SessionItem > = { }; + + const makeRefs = ( session: ClientHttp2Session ) => + { + let counter = 1; // Begins ref'd + sessionRefs.ref = ( ) => + { + if ( session.destroyed ) + return; + + if ( counter === 0 ) + // Go from unref'd to ref'd + session.ref( ); + ++counter; + }; + sessionRefs.unref = ( ) => + { + if ( session.destroyed ) + return; + + --counter; + if ( counter === 0 ) + // Go from ref'd to unref'd + session.unref( ); + }; + }; const options = { ...this._getSessionOptions( origin ), @@ -329,7 +382,21 @@ export class H2Context session = http2Connect( origin, options, ( ) => resolve( session ) ); - session.on( "stream", pushHandler ); + makeRefs( session ); + + session.on( "stream", aGuard( + ( + stream: ClientHttp2Stream, + headers: IncomingHttp2Headers + ) => + this.handlePush( + origin, + stream, + headers, + < ( ) => void >sessionRefs.ref, + < ( ) => void >sessionRefs.unref + ) + ) ); session.once( "close", ( ) => reject( makeOkError( makeError( ) ) ) ); @@ -341,6 +408,11 @@ export class H2Context } ); - return { promise, session }; + return { + promise, + ref: < ( ) => void >sessionRefs.ref, + session, + unref: < ( ) => void >sessionRefs.unref, + }; } } diff --git a/lib/context.ts b/lib/context.ts index 2e0d3ba..c6999c1 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -1,6 +1,5 @@ import { ClientRequest } from "http"; import { - ClientHttp2Session, SecureClientSessionOptions, } from "http2"; import { Socket } from "net"; @@ -22,6 +21,7 @@ import { SimpleSession, SimpleSessionHttp1, SimpleSessionHttp2, + SimpleSessionHttp2Session, } from "./core"; import { fetch as fetchHttp1 } from "./fetch-http1"; import { fetch as fetchHttp2 } from "./fetch-http2"; @@ -183,11 +183,18 @@ export class Context userAgent: ( ) => this.userAgent( origin ), } ); - const doFetchHttp1 = ( socket: Socket ) => + const doFetchHttp1 = ( socket: Socket, cleanup: ( ) => void ) => { const sessionGetterHttp1: SimpleSessionHttp1 = { get: ( url: string ) => - this.getHttp1( url, socket, request, rejectUnauthorized ), + ( { + cleanup, + req: this.getHttp1( + url, + socket, + request, + rejectUnauthorized ), + } ), ...makeSimpleSession( "http1" ), }; return fetchHttp1( sessionGetterHttp1, request, init ); @@ -204,18 +211,19 @@ export class Context const tryWaitForHttp1 = async ( ) => { - const { socket: freeHttp1Socket, shouldCreateNew } = + const { socket: freeHttp1Socket, cleanup, shouldCreateNew } = this.h1Context.getFreeSocketForOrigin( origin ); if ( freeHttp1Socket ) - return doFetchHttp1( freeHttp1Socket ); + return doFetchHttp1( freeHttp1Socket, cleanup ); if ( !shouldCreateNew ) { // We've maxed out HTTP/1 connections, wait for one to be // freed. - const socket = await this.h1Context.waitForSocket( origin ); - return doFetchHttp1( socket ); + const { socket, cleanup } = + await this.h1Context.waitForSocket( origin ); + return doFetchHttp1( socket, cleanup ); } }; @@ -227,8 +235,8 @@ export class Context return resp; const socket = await this.h1Context.makeNewConnection( url ); - this.h1Context.addUsedSocket( origin, socket ); - return doFetchHttp1( socket ); + const cleanup = this.h1Context.addUsedSocket( origin, socket ); + return doFetchHttp1( socket, cleanup ); } else if ( protocol === "http2" ) { @@ -262,20 +270,23 @@ export class Context if ( protocol === "http2" ) { - // Convert socket into http2 session - await this.h2Context.getOrCreateHttp2( + // Convert socket into http2 session, this will ref (*) + const { cleanup } = await this.h2Context.getOrCreateHttp2( origin, { createConnection: ( ) => socket, } ); // Session now lingering, it will be re-used by the next get() - return doFetchHttp2( ); + const ret = doFetchHttp2( ); + // Unref lingering ref + cleanup( ); + return ret; } else // protocol === "http1" { - this.h1Context.addUsedSocket( origin, socket ); - return doFetchHttp1( socket ); + const cleanup = this.h1Context.addUsedSocket( origin, socket ); + return doFetchHttp1( socket, cleanup ); } } } @@ -315,9 +326,9 @@ export class Context } private getOrCreateHttp2( origin: string, created = false ) - : Promise< ClientHttp2Session > + : Promise< SimpleSessionHttp2Session > { - const { didCreate, session } = + const { didCreate, session, cleanup } = this.h2Context.getOrCreateHttp2( origin ); return session @@ -327,12 +338,14 @@ export class Context // Created in this request, forward error throw err; // Not created in this request, try again - return this.getOrCreateHttp2( origin, true ); - } ); + return this.getOrCreateHttp2( origin, true ) + .then( ( { session } ) => session ); + } ) + .then( session => ( { session, cleanup } ) ); } private getHttp2( url: string ) - : Promise< ClientHttp2Session > + : Promise< SimpleSessionHttp2Session > { const { origin } = typeof url === "string" ? new URL( url ) : url; diff --git a/lib/core.ts b/lib/core.ts index 2c0412e..46f4406 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -249,12 +249,24 @@ export interface SimpleSession contentDecoders( ): ReadonlyArray< Decoder >; } +export interface SimpleSessionHttp1Request +{ + req: ClientRequest; + cleanup: ( ) => void; +} + +export interface SimpleSessionHttp2Session +{ + session: ClientHttp2Session; + cleanup: ( ) => void; +} + export interface SimpleSessionHttp1 extends SimpleSession { - get( url: string ): ClientRequest; + get( url: string ): SimpleSessionHttp1Request; } export interface SimpleSessionHttp2 extends SimpleSession { - get( url: string ): Promise< ClientHttp2Session >; + get( url: string ): Promise< SimpleSessionHttp2Session >; } diff --git a/lib/fetch-common.ts b/lib/fetch-common.ts index 6eb4b77..783e9ac 100644 --- a/lib/fetch-common.ts +++ b/lib/fetch-common.ts @@ -1,7 +1,7 @@ import { constants as h2constants } from "http2"; import { URL } from "url"; -import { Finally } from "already"; +import { Finally, rethrow } from "already"; import { BodyInspector } from "./body"; import { @@ -248,14 +248,16 @@ export function handleSignalAndTimeout( signalPromise: Promise< Response > | null, timeoutInfo: TimeoutInfo | null, cleanup: ( ) => void, - fetcher: ( ) => Promise< Response > + fetcher: ( ) => Promise< Response >, + onError: ( ) => void ) { return Promise.race( [ < Promise< any > >signalPromise, < Promise< any > >( timeoutInfo && timeoutInfo.promise ), - fetcher( ), + fetcher( ).catch( rethrow( onError ) ), + ] .filter( promise => promise ) ) diff --git a/lib/fetch-http1.ts b/lib/fetch-http1.ts index 82f97d0..e2e1e75 100644 --- a/lib/fetch-http1.ts +++ b/lib/fetch-http1.ts @@ -57,10 +57,10 @@ export async function fetchImpl( url, } = await setupFetch( session, input, init, extra ); + const { req, cleanup: socketCleanup } = session.get( url ); + const doFetch = async ( ): Promise< Response > => { - const req = session.get( url ); - for ( const [ key, value ] of Object.entries( headersToSend ) ) { if ( value != null ) @@ -111,6 +111,8 @@ export async function fetchImpl( req.once( "response", guard( ( res: IncomingMessage ) => { + res.once( "end", socketCleanup ); + if ( signal && signal.aborted ) { // No reason to continue, the request is aborted @@ -233,7 +235,8 @@ export async function fetchImpl( signalPromise, timeoutInfo, cleanup, - doFetch + doFetch, + socketCleanup ); } diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index 60f7478..3d6337f 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -74,230 +74,239 @@ async function fetchImpl( const { raceConditionedGoaway } = extra; - function doFetch( ): Promise< Response > + const streamPromise = session.get( url ); + + async function doFetch( ): Promise< Response > { - return session.get( url ) - .then( async h2session => + const { session: h2session, cleanup: socketCleanup } = + await streamPromise; + + const stream = h2session.request( headersToSend, { endStream } ); + + const response = new Promise< Response >( ( resolve, reject ) => { - const stream = h2session.request( headersToSend, { endStream } ); + const guard = syncGuard( reject, { catchAsync: true } ); - const response = new Promise< Response >( ( resolve, reject ) => + stream.on( "aborted", guard( ( ..._whatever ) => { - const guard = syncGuard( reject, { catchAsync: true } ); + reject( makeAbortedError( ) ); + } ) ); - stream.on( "aborted", guard( ( ..._whatever ) => - { - reject( makeAbortedError( ) ); - } ) ); + stream.on( "error", guard( ( err: Error ) => + { + reject( err ); + } ) ); - stream.on( "error", guard( ( err: Error ) => + stream.on( "frameError", guard( + ( _type: number, code: number, _streamId: number ) => { - reject( err ); - } ) ); - - stream.on( "frameError", guard( - ( _type: number, code: number, _streamId: number ) => + if ( + code === NGHTTP2_ERR_START_STREAM_NOT_ALLOWED && + endStream + ) { + // This could be due to a race-condition in GOAWAY. + // As of current Node.js, the 'goaway' event is + // emitted on the session before this event + // is emitted, so we will know if we got it. if ( - code === NGHTTP2_ERR_START_STREAM_NOT_ALLOWED && - endStream + !raceConditionedGoaway.has( origin ) && + hasGotGoaway( h2session ) ) { - // This could be due to a race-condition in GOAWAY. - // As of current Node.js, the 'goaway' event is - // emitted on the session before this event - // is emitted, so we will know if we got it. - if ( - !raceConditionedGoaway.has( origin ) && - hasGotGoaway( h2session ) - ) - { - // Don't retry again due to potential GOAWAY - raceConditionedGoaway.add( origin ); - - // Since we've got the 'goaway' event, the - // context has already released the session, - // so a retry will create a new session. - resolve( - fetchImpl( - session, - request, - { signal, onTrailers }, - { - raceConditionedGoaway, - redirected, - timeoutAt, - } - ) - ); - - return; - } + // Don't retry again due to potential GOAWAY + raceConditionedGoaway.add( origin ); + + // Since we've got the 'goaway' event, the + // context has already released the session, + // so a retry will create a new session. + resolve( + fetchImpl( + session, + request, + { signal, onTrailers }, + { + raceConditionedGoaway, + redirected, + timeoutAt, + } + ) + ); + + return; } + } - reject( new Error( "Request failed" ) ); - } ) - ); + reject( new Error( "Request failed" ) ); + } ) + ); - stream.on( "close", guard( ( ) => - { - // We'll get an 'error' event if there actually is an - // error, but not if we got NGHTTP2_NO_ERROR. - // In case of an error, the 'error' event will be awaited - // instead, to get (and propagate) the error object. - if ( stream.rstCode === NGHTTP2_NO_ERROR ) - reject( - new AbortError( "Stream prematurely closed" ) ); - } ) ); - - stream.on( "timeout", guard( ( ..._whatever ) => - { - reject( makeTimeoutError( ) ); - } ) ); + stream.on( "close", guard( ( ) => + { + socketCleanup( ); + + // We'll get an 'error' event if there actually is an + // error, but not if we got NGHTTP2_NO_ERROR. + // In case of an error, the 'error' event will be awaited + // instead, to get (and propagate) the error object. + if ( stream.rstCode === NGHTTP2_NO_ERROR ) + reject( + new AbortError( "Stream prematurely closed" ) ); + } ) ); + + stream.on( "timeout", guard( ( ..._whatever ) => + { + reject( makeTimeoutError( ) ); + } ) ); - stream.on( "trailers", guard( - ( _headers: IncomingHttp2Headers, _flags: any ) => + stream.on( "trailers", guard( + ( _headers: IncomingHttp2Headers, _flags: any ) => + { + if ( !onTrailers ) + return; + try { - if ( !onTrailers ) - return; - try - { - const headers = new GuardedHeaders( "response" ); + const headers = new GuardedHeaders( "response" ); - Object.keys( _headers ).forEach( key => - { - if ( Array.isArray( _headers[ key ] ) ) - ( < Array< string > >_headers[ key ] ) - .forEach( value => - headers.append( key, value ) ); - else - headers.set( key, "" + _headers[ key ] ); - } ); - - onTrailers( headers ); - } - catch ( err ) + Object.keys( _headers ).forEach( key => { - // TODO: Implement #8 - // tslint:disable-next-line - console.warn( "Trailer handling failed", err ); - } - } ) ); - - // ClientHttp2Stream events - - stream.on( "continue", guard( ( ..._whatever ) => + if ( Array.isArray( _headers[ key ] ) ) + ( < Array< string > >_headers[ key ] ) + .forEach( value => + headers.append( key, value ) ); + else + headers.set( key, "" + _headers[ key ] ); + } ); + + onTrailers( headers ); + } + catch ( err ) { - reject( make100Error( ) ); - } ) ); + // TODO: Implement #8 + // tslint:disable-next-line + console.warn( "Trailer handling failed", err ); + } + } ) ); - stream.on( "headers", guard( - ( headers: IncomingHttp2Headers, _flags: any ) => - { - const code = headers[ HTTP2_HEADER_STATUS ]; - reject( new Error( - `Request failed with a ${code} status. ` + - "Any 1xx error is unexpected to fetch() and " + - "shouldn't happen." ) ); - } - ) ); + // ClientHttp2Stream events - stream.on( "response", guard( - ( headers: IncomingHttp2Headers ) => + stream.on( "continue", guard( ( ..._whatever ) => + { + reject( make100Error( ) ); + } ) ); + + stream.on( "headers", guard( + ( headers: IncomingHttp2Headers, _flags: any ) => { - if ( signal && signal.aborted ) - { - // No reason to continue, the request is aborted - stream.destroy( ); - return; - } + const code = headers[ HTTP2_HEADER_STATUS ]; + reject( new Error( + `Request failed with a ${code} status. ` + + "Any 1xx error is unexpected to fetch() and " + + "shouldn't happen." ) ); + } + ) ); + + stream.on( "response", guard( + ( headers: IncomingHttp2Headers ) => + { + if ( signal && signal.aborted ) + { + // No reason to continue, the request is aborted + stream.destroy( ); + return; + } - const status = "" + headers[ HTTP2_HEADER_STATUS ]; - const location = parseLocation( - headers[ HTTP2_HEADER_LOCATION ], - url - ); + const status = "" + headers[ HTTP2_HEADER_STATUS ]; + const location = parseLocation( + headers[ HTTP2_HEADER_LOCATION ], + url + ); - const isRedirected = isRedirectStatus[ status ]; + const isRedirected = isRedirectStatus[ status ]; - if ( headers[ HTTP2_HEADER_SET_COOKIE ] ) - { - const setCookies = - arrayify( headers[ HTTP2_HEADER_SET_COOKIE ] ); + if ( headers[ HTTP2_HEADER_SET_COOKIE ] ) + { + const setCookies = + arrayify( headers[ HTTP2_HEADER_SET_COOKIE ] ); + + session.cookieJar.setCookies( setCookies, url ); + } + + delete headers[ "set-cookie" ]; + delete headers[ "set-cookie2" ]; + + if ( isRedirected && !location ) + return reject( makeIllegalRedirectError( ) ); + + if ( !isRedirected || redirect === "manual" ) + return resolve( + new StreamResponse( + contentDecoders, + url, + stream, + headers, + redirect === "manual" + ? false + : extra.redirected.length > 0, + { }, + 2, + integrity + ) + ); - session.cookieJar.setCookies( setCookies, url ); - } + if ( redirect === "error" ) + return reject( makeRedirectionError( location ) ); - delete headers[ "set-cookie" ]; - delete headers[ "set-cookie2" ]; - - if ( isRedirected && !location ) - return reject( makeIllegalRedirectError( ) ); - - if ( !isRedirected || redirect === "manual" ) - return resolve( - new StreamResponse( - contentDecoders, - url, - stream, - headers, - redirect === "manual" - ? false - : extra.redirected.length > 0, - { }, - 2, - integrity - ) - ); - - if ( redirect === "error" ) - return reject( makeRedirectionError( location ) ); - - // redirect is 'follow' - - // We don't support re-sending a non-GET/HEAD request (as - // we don't want to [can't, if its' streamed] re-send the - // body). The concept is fundementally broken anyway... - if ( !endStream ) - return reject( - makeRedirectionMethodError( location, method ) - ); - - if ( !location ) - return reject( makeIllegalRedirectError( ) ); + // redirect is 'follow' - stream.destroy( ); - resolve( - fetchImpl( - session, - request.clone( location ), - { signal, onTrailers }, - { - raceConditionedGoaway, - redirected: redirected.concat( url ), - timeoutAt, - } - ) + // We don't support re-sending a non-GET/HEAD request (as + // we don't want to [can't, if its' streamed] re-send the + // body). The concept is fundementally broken anyway... + if ( !endStream ) + return reject( + makeRedirectionMethodError( location, method ) ); - } ) ); - } ); - if ( !endStream ) - await request.readable( ) - .then( readable => - { - readable.pipe( stream ); - } ); + if ( !location ) + return reject( makeIllegalRedirectError( ) ); - return response; + stream.destroy( ); + resolve( + fetchImpl( + session, + request.clone( location ), + { signal, onTrailers }, + { + raceConditionedGoaway, + redirected: redirected.concat( url ), + timeoutAt, + } + ) + ); + } ) ); } ); + + if ( !endStream ) + await request.readable( ) + .then( readable => + { + readable.pipe( stream ); + } ); + + return response; } return handleSignalAndTimeout( signalPromise, timeoutInfo, cleanup, - doFetch + doFetch, + ( ) => + { + streamPromise + .then( ( { cleanup } ) => cleanup( ) ) + .catch( _err => { } ); + } ); } diff --git a/package.json b/package.json index 46c3a98..063493c 100644 --- a/package.json +++ b/package.json @@ -53,6 +53,7 @@ ], "devDependencies": { "@types/chai": "4.x", + "@types/execa": "^0.9.0", "@types/from2": "2.x", "@types/get-stream": "3.x", "@types/mocha": "5.x", @@ -62,6 +63,7 @@ "commitizen": "3.x", "coveralls": "3.x", "cz-conventional-changelog": "2.x", + "execa": "^1.0.0", "from2": "2.x", "mocha": "5.x", "nyc": "13.x", diff --git a/scripts/test-client b/scripts/test-client new file mode 100755 index 0000000..01c2371 --- /dev/null +++ b/scripts/test-client @@ -0,0 +1,3 @@ +#!/usr/bin/env node + +require( '../dist/test-client' ); diff --git a/test-client/index.ts b/test-client/index.ts new file mode 100755 index 0000000..0599fd8 --- /dev/null +++ b/test-client/index.ts @@ -0,0 +1,39 @@ +// tslint:disable-next-line +import { fetch, setup, HttpProtocols } from ".."; + +async function work( ) +{ + const args = process.argv.slice( 2 ); + + const [ method, url, version, insecure ] = args; + + const rejectUnauthorized = insecure !== "insecure"; + + setup( { + http1: { + keepAlive: false, + }, + ...( + !version ? { } : { + httpProtocol: version as HttpProtocols, + httpsProtocols: [ version as HttpProtocols ], + } + ), + session: { rejectUnauthorized }, + } ); + + const response = await fetch( + url, + { + method: < any >method, + } + ); + + const readable = await response.readable( ); + + readable.pipe( process.stdout ); +} + +work( ) +// tslint:disable-next-line +.catch( err => { console.error( err.stack ); } ); diff --git a/test/fetch-h2/event-loop-reference.ts b/test/fetch-h2/event-loop-reference.ts new file mode 100644 index 0000000..13c7a1d --- /dev/null +++ b/test/fetch-h2/event-loop-reference.ts @@ -0,0 +1,47 @@ +import * as path from "path"; + +import { expect } from "chai"; +import * as execa from "execa"; + +import { TestData } from "../lib/server-common"; +import { makeMakeServer } from "../lib/server-helpers"; + + +const script = path.resolve( __dirname, "../../../scripts/test-client" ); + +describe( "event-loop", function( ) +{ + this.timeout( 20000 ); + + const runs: Array< TestData > = [ + { proto: "http:", version: "http1" }, + { proto: "https:", version: "http1" }, + { proto: "http:", version: "http2" }, + { proto: "https:", version: "http2" }, + ]; + + runs.forEach( ( { proto, version } ) => + { + const { makeServer } = makeMakeServer( { proto, version } ); + + it( `should unref ${proto} ${version}`, async ( ) => + { + const { port, server } = await makeServer( ); + + const url = `${proto}//localhost:${port}/headers`; + + const body = { foo: "bar" }; + + const { stdout } = await execa( + script, + [ "GET", url, version, "insecure" ], + { input: JSON.stringify( body ) } + ); + + const responseBody = JSON.parse( stdout ); + expect( responseBody[ "user-agent" ] ).to.include( "fetch-h2/" ); + + await server.shutdown( ); + } ); + } ); +} ); diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index f9885f3..7701bf4 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -54,7 +54,7 @@ function ensureStatusSuccess( response: Response ): Response const { cycleOpts, makeServer } = makeMakeServer( { proto, version } ); const { disconnectAll, fetch, onPush } = - ( proto === "httpss:" && version === "http1" ) + ( proto === "http:" && version === "http1" ) ? { disconnectAll: _disconnectAll, fetch: _fetch, onPush: _onPush } : context( { ...cycleOpts } ); diff --git a/test/lib/server-common.ts b/test/lib/server-common.ts index 869dfd2..b0baf3b 100644 --- a/test/lib/server-common.ts +++ b/test/lib/server-common.ts @@ -16,7 +16,7 @@ import { HttpProtocols } from "../../"; export interface TestData { - proto: string; + proto: "http:" | "https:"; version: HttpProtocols; } diff --git a/tsconfig.json b/tsconfig.json index e2fd54b..a3d457f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,6 +15,7 @@ "generated", "externs", "lib", - "test" + "test", + "test-client" ] } From b1c07fc0c169245fee447298e92309ff4c10ab39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 13 Jan 2019 22:36:03 +0100 Subject: [PATCH 06/77] docs(README): Small README fixes --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 6b9b1c1..75bf3f1 100644 --- a/README.md +++ b/README.md @@ -153,12 +153,12 @@ These are features in `fetch-h2`, that don't exist in the Fetch API. Some things * The `body` that can be sent in a Request, and that is available on the Response, can be a Node.js `ReadableStream`. You can thereby stream data with a request, and stream the response body. * The `body` that can be sent in a Request can be a [`Body`](https://developer.mozilla.org/docs/Web/API/Body) object. It can also be a string or buffer. * `fetch()` has an extra option, `json` that can be used instead of `body` to send an object that will be JSON stringified. The appropriate `content-type` will be set if it isn't already. - * `fetch()` has an extra option, `timeout` which is a timeout in milliseconds before the request should be aborted and the returned promise thereby *rejected* (with an `TimeoutError`). + * `fetch()` has an extra option, `timeout` which is a timeout in milliseconds before the request should be aborted and the returned promise thereby *rejected* (with a `TimeoutError`). * `fetch()` has an extra option, `onTrailers` (of the type `OnTrailers`) which is a callback that will receive trailing headers. - * The `Request.clone()` member function has an optional `url` argument. + * The `Request.clone()` member function has an optional `url` argument for the cloned `Request`. * The response `text()` and `arrayBuffer()` has an optional argument `allowIncomplete` which defaults to `false`. If set to `true` these function will return incomplete bodies, i.e. "as much as was read" before the stream was prematurely closed (disconnected). If integrity checks are enabled, the functions will throw anyway if the body is incomplete. * The `Request` class (options to `fetch`) has an extra property `allowForbiddenHeaders`, which defaults to `false`. - * The response object has an extra property `httpVersion` which is either `1` or `2`, depending on what was negotiated with the server. + * The response object has an extra property `httpVersion` which is either `1` or `2` (numbers), depending on what was negotiated with the server. ## Contexts From 1aed16d06bfcb9c1a4c2b6498a5a964d593fc19f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 13 Jan 2019 22:37:15 +0100 Subject: [PATCH 07/77] build(tag): Moved back to @latest for 2.0 release --- package.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/package.json b/package.json index 063493c..7a992ce 100644 --- a/package.json +++ b/package.json @@ -84,9 +84,6 @@ "to-arraybuffer": "1.x", "tough-cookie": "3.x" }, - "publishConfig": { - "tag": "next" - }, "config": { "commitizen": { "path": "./node_modules/cz-conventional-changelog" From 4516b7cf7d637293e12ac387f7435007196b09c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 14 Jan 2019 08:32:23 +0100 Subject: [PATCH 08/77] fix(alpn): Accept https servers w/o ALPN support, fallback to defaults --- lib/context-https.ts | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/lib/context-https.ts b/lib/context-https.ts index 77d5061..42bd74c 100644 --- a/lib/context-https.ts +++ b/lib/context-https.ts @@ -1,7 +1,7 @@ import { SecureClientSessionOptions } from "http2"; import { connect, ConnectionOptions, TLSSocket } from "tls"; -import { FetchError, HttpProtocols } from "./core"; +import { HttpProtocols } from "./core"; const alpnProtocols = { @@ -15,6 +15,8 @@ export interface HttpsSocketResult protocol: "http1" | "http2"; } +const defaultMethod: Array< HttpProtocols > = [ "http2", "http1" ]; + export function connectTLS( host: string, port: string, @@ -34,7 +36,8 @@ export function connectTLS( } ); const orderedProtocols = Buffer.concat( - _protocols.map( protocol => alpnProtocols[ protocol ] ) + ( _protocols.length === 0 ? _protocols : defaultMethod ) + .map( protocol => alpnProtocols[ protocol ] ) ); const opts: ConnectionOptions = { @@ -54,7 +57,14 @@ export function connectTLS( return reject( authorizationError ); if ( ![ "h2", "http/1.1", "http/1.0" ].includes( alpnProtocol ) ) - return reject( new FetchError( "Invalid ALPN response" ) ); + { + // Maybe the server doesn't understand ALPN, enforce + // user-provided protocol, or fallback to HTTP/1 + if ( _protocols.length === 1 ) + return resolve( { protocol: _protocols[ 0 ], socket } ); + else + return resolve( { protocol: "http1", socket } ); + } const protocol = alpnProtocol === "h2" ? "http2" : "http1"; From 6b526ead2aad90d355edaa3863b90b3186ec8146 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 14 Jan 2019 21:24:40 +0100 Subject: [PATCH 09/77] fix(http2): Fixed GOAWAY race for 'error' event. It seems that after a session GOAWAY, the stream can get an 'error' event instead of 'frameError' to be notified of this. There is therefore no proper error code signalling exactly what happened, so an error message string is being matched. Not ideal... Potentially, if the session keeps a set of its streams, the GOAWAY could be forwarded to the streams, but that's a lot more complex. --- lib/fetch-http2.ts | 75 ++++++++++++++++++++++++++++------------------ 1 file changed, 46 insertions(+), 29 deletions(-) diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index 3d6337f..87d6b00 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -87,6 +87,41 @@ async function fetchImpl( { const guard = syncGuard( reject, { catchAsync: true } ); + const tryRetryOnGoaway = ( ) => + { + // This could be due to a race-condition in GOAWAY. + // As of current Node.js, the 'goaway' event is emitted on the + // session before this event (at least frameError, probably + // 'error' too) is emitted, so we will know if we got it. + if ( + !raceConditionedGoaway.has( origin ) && + hasGotGoaway( h2session ) + ) + { + // Don't retry again due to potential GOAWAY + raceConditionedGoaway.add( origin ); + + // Since we've got the 'goaway' event, the + // context has already released the session, + // so a retry will create a new session. + resolve( + fetchImpl( + session, + request, + { signal, onTrailers }, + { + raceConditionedGoaway, + redirected, + timeoutAt, + } + ) + ); + + return true; + } + return false; + }; + stream.on( "aborted", guard( ( ..._whatever ) => { reject( makeAbortedError( ) ); @@ -94,6 +129,16 @@ async function fetchImpl( stream.on( "error", guard( ( err: Error ) => { + if ( + err && + ( < any >err ).code === "ERR_HTTP2_STREAM_ERROR" && + err.message && + err.message.includes( "NGHTTP2_REFUSED_STREAM" ) + ) + { + if ( tryRetryOnGoaway( ) ) + return; + } reject( err ); } ) ); @@ -105,36 +150,8 @@ async function fetchImpl( endStream ) { - // This could be due to a race-condition in GOAWAY. - // As of current Node.js, the 'goaway' event is - // emitted on the session before this event - // is emitted, so we will know if we got it. - if ( - !raceConditionedGoaway.has( origin ) && - hasGotGoaway( h2session ) - ) - { - // Don't retry again due to potential GOAWAY - raceConditionedGoaway.add( origin ); - - // Since we've got the 'goaway' event, the - // context has already released the session, - // so a retry will create a new session. - resolve( - fetchImpl( - session, - request, - { signal, onTrailers }, - { - raceConditionedGoaway, - redirected, - timeoutAt, - } - ) - ); - + if ( tryRetryOnGoaway( ) ) return; - } } reject( new Error( "Request failed" ) ); From e130ce2f8ec6114dcad02c8f921e15a47d504b77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 14 Jan 2019 21:35:32 +0100 Subject: [PATCH 10/77] fix(http2): Unref stale session (marked for destruction) --- lib/context-http2.ts | 35 +++++++++++++++++++++++------------ lib/utils-http2.ts | 23 +++++++++++++++++++++-- 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/lib/context-http2.ts b/lib/context-http2.ts index 5ccf923..bc77c91 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -19,7 +19,12 @@ import { import { Request } from "./request"; import { Response, StreamResponse } from "./response"; import { makeOkError } from "./utils"; -import { setGotGoaway } from "./utils-http2"; +import { + isDestroyed, + MonkeyH2Session, + setDestroyed, + setGotGoaway, +} from "./utils-http2"; const { @@ -171,12 +176,17 @@ export class H2Context public deleteActiveSession( origin: string ): H2SessionItem | void { - if ( !this._h2sessions.has( origin ) ) + const sessionItem = this._h2sessions.get( origin ); + + if ( !sessionItem ) return; - const sessionItem = this._h2sessions.get( origin ); this._h2sessions.delete( origin ); + sessionItem.session.unref( ); + // Never re-ref, this session is over + setDestroyed( sessionItem.session ); + return sessionItem; } @@ -184,11 +194,11 @@ export class H2Context { const promises: Array< Promise< void > > = [ ]; - if ( !this._h2staleSessions.has( origin ) ) + const sessionSet = this._h2staleSessions.get( origin ); + + if ( !sessionSet ) return; - const sessionSet = - < Set< ClientHttp2Session > >this._h2staleSessions.get( origin ); this._h2staleSessions.delete( origin ); for ( const session of sessionSet ) @@ -348,24 +358,25 @@ export class H2Context const makeRefs = ( session: ClientHttp2Session ) => { - let counter = 1; // Begins ref'd + const monkeySession = < MonkeyH2Session >session; + monkeySession.__fetch_h2_refcount = 1; // Begins ref'd sessionRefs.ref = ( ) => { - if ( session.destroyed ) + if ( isDestroyed( session ) ) return; - if ( counter === 0 ) + if ( monkeySession.__fetch_h2_refcount === 0 ) // Go from unref'd to ref'd session.ref( ); - ++counter; + ++monkeySession.__fetch_h2_refcount; }; sessionRefs.unref = ( ) => { if ( session.destroyed ) return; - --counter; - if ( counter === 0 ) + --monkeySession.__fetch_h2_refcount; + if ( monkeySession.__fetch_h2_refcount === 0 ) // Go from ref'd to unref'd session.unref( ); }; diff --git a/lib/utils-http2.ts b/lib/utils-http2.ts index 68ea94f..0916a5f 100644 --- a/lib/utils-http2.ts +++ b/lib/utils-http2.ts @@ -1,11 +1,30 @@ import { ClientHttp2Session } from "http2"; + +export interface MonkeyH2Session extends ClientHttp2Session +{ + __fetch_h2_destroyed?: boolean; + __fetch_h2_goaway?: boolean; + __fetch_h2_refcount: number; +} + export function hasGotGoaway( session: ClientHttp2Session ) { - return !!( < any >session ).__fetch_h2_goaway; + return !!( < MonkeyH2Session >session ).__fetch_h2_goaway; } export function setGotGoaway( session: ClientHttp2Session ) { - ( < any >session ).__fetch_h2_goaway = true; + ( < MonkeyH2Session >session ).__fetch_h2_goaway = true; +} + +export function isDestroyed( session: ClientHttp2Session ) +{ + const monkeySession = < MonkeyH2Session >session; + return monkeySession.destroyed || monkeySession.__fetch_h2_destroyed; +} + +export function setDestroyed( session: ClientHttp2Session ) +{ + ( < MonkeyH2Session >session ).__fetch_h2_destroyed = true; } From be4d3d1f3590056fa319f721e429e9ab94c30b0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 14 Jan 2019 21:36:57 +0100 Subject: [PATCH 11/77] test(debug): Added support for introspection H2 sessions By running under environment DEBUG_FETCH_H2, session state will be logged to stderr when the SIGUSR2 is received. --- lib/context-http2.ts | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/lib/context-http2.ts b/lib/context-http2.ts index bc77c91..24fb848 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -68,6 +68,47 @@ export class H2Context { this._getDecoders = getDecoders; this._getSessionOptions = getSessionOptions; + + /* istanbul ignore next */ + if ( process.env.DEBUG_FETCH_H2 ) + { + const debug = ( line: string, ...args: Array< any > ) => + { + // tslint:disable-next-line + console.error( line, ...args ); + }; + + const printSession = ( origin: string, session: MonkeyH2Session ) => + { + debug( " Origin:", origin ); + debug( " Ref-counter:", session.__fetch_h2_refcount ); + debug( " Destroyed:", session.destroyed ); + debug( " Destroyed mark:", session.__fetch_h2_destroyed ); + }; + + process.on( "SIGUSR2", ( ) => + { + debug( "[Debug fetch-h2]: H2 sessions" ); + + debug( " Active sessions" ); + [ ...this._h2sessions.entries( ) ] + .forEach( ( [ origin, { session } ] ) => + { + printSession( origin, < MonkeyH2Session >session ); + } ); + + debug( " Stale sessions" ); + [ ...this._h2staleSessions.entries( ) ] + .forEach( ( [ origin, set ] ) => + { + [ ...set ] + .forEach( ( session ) => + { + printSession( origin, < MonkeyH2Session >session ); + } ); + } ); + } ); + } } public hasOrigin( origin: string ) From c33f6ab18198511acbca0a55d2bf031272e8a1d3 Mon Sep 17 00:00:00 2001 From: Andrew Betts Date: Mon, 28 Jan 2019 13:23:16 +0000 Subject: [PATCH 12/77] fix(ALPN): Fix inverted comparison --- lib/context-https.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/context-https.ts b/lib/context-https.ts index 42bd74c..22fb9e1 100644 --- a/lib/context-https.ts +++ b/lib/context-https.ts @@ -36,7 +36,7 @@ export function connectTLS( } ); const orderedProtocols = Buffer.concat( - ( _protocols.length === 0 ? _protocols : defaultMethod ) + ( _protocols.length !== 0 ? _protocols : defaultMethod ) .map( protocol => alpnProtocols[ protocol ] ) ); From 6c94589c57a8af13a0f92e466bd00547515daefe Mon Sep 17 00:00:00 2001 From: Niklas Korz Date: Mon, 21 Jan 2019 16:35:25 +0100 Subject: [PATCH 13/77] fix(spec): Make Response constructor parameters optional According to the fetch spec, all parameters of the Response constructor are optional: - https://developer.mozilla.org/en-US/docs/Web/API/Response/Response - https://fetch.spec.whatwg.org/#response-class --- lib/response.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/response.ts b/lib/response.ts index 198dba5..449c1b0 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -71,8 +71,8 @@ export class Response extends Body public readonly httpVersion: HttpVersion; constructor( - body: BodyTypes | Body | null, - init: Partial< ResponseInit >, + body: BodyTypes | Body | null = null, + init: Partial< ResponseInit > = { }, extra?: Partial< Extra > ) { From f8b94055041bf2b2434b4c4c01008fe1c830f2cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Wed, 30 Jan 2019 00:14:10 +0100 Subject: [PATCH 14/77] fix(spec): Added Response 'status' and 'statusText' defaults --- lib/response.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/response.ts b/lib/response.ts index 449c1b0..7768d9d 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -124,11 +124,11 @@ export class Response extends Body }, status: { enumerable: true, - value: init.status, + value: init.status || 200, }, statusText: { enumerable: true, - value: init.statusText, + value: init.statusText || "", }, type: { enumerable: true, From 6dac9398fd31afe90e23bdb8d5c90cc7b8cbaab2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Wed, 30 Jan 2019 01:00:40 +0100 Subject: [PATCH 15/77] docs(package): Fixed package name (not only HTTP/2 any more) --- package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 7a992ce..2e2871a 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "fetch-h2", "version": "1.0.1", - "description": "HTTP/2-only Fetch API client for Node.js", + "description": "HTTP/1+2 Fetch API client for Node.js", "author": "Gustaf Räntilä", "license": "MIT", "bugs": { @@ -53,7 +53,7 @@ ], "devDependencies": { "@types/chai": "4.x", - "@types/execa": "^0.9.0", + "@types/execa": "0.x", "@types/from2": "2.x", "@types/get-stream": "3.x", "@types/mocha": "5.x", @@ -71,7 +71,7 @@ "semantic-release": "15.x", "source-map-support": "0.x", "travis-deploy-once": "5.x", - "ts-node": "7.x", + "ts-node": "8.x", "tslint": "5.x", "typescript": "3.x" }, From f6bf42b0e060af3a444a844a4247c207b756cccf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Feb 2019 15:49:36 +0100 Subject: [PATCH 16/77] fix(test): Fixed unit test build issue --- package.json | 2 +- test/lib/server-common.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 2e2871a..2cdafbf 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,7 @@ "@types/from2": "2.x", "@types/get-stream": "3.x", "@types/mocha": "5.x", - "@types/node": "10.x", + "@types/node": "11.x", "@types/through2": "2.x", "chai": "4.x", "commitizen": "3.x", diff --git a/test/lib/server-common.ts b/test/lib/server-common.ts index b0baf3b..7220f95 100644 --- a/test/lib/server-common.ts +++ b/test/lib/server-common.ts @@ -54,7 +54,7 @@ export abstract class Server .then( ( ) => { const address = this._server.address( ); - if ( typeof address === "string" ) + if ( !address || typeof address === "string" ) return 0; return address.port; } ) From 54550b8c7e215794116dac4b50c3564512208c65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Thu, 14 Feb 2019 23:57:30 +0100 Subject: [PATCH 17/77] fix(headers): Allow forbidden headers in Response's fix #30 --- README.md | 1 + lib/context-http2.ts | 3 ++- lib/core.ts | 1 + lib/fetch-http1.ts | 1 + lib/fetch-http2.ts | 1 + lib/headers.ts | 1 + lib/response.ts | 8 +++++++- 7 files changed, 14 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 75bf3f1..847c173 100644 --- a/README.md +++ b/README.md @@ -158,6 +158,7 @@ These are features in `fetch-h2`, that don't exist in the Fetch API. Some things * The `Request.clone()` member function has an optional `url` argument for the cloned `Request`. * The response `text()` and `arrayBuffer()` has an optional argument `allowIncomplete` which defaults to `false`. If set to `true` these function will return incomplete bodies, i.e. "as much as was read" before the stream was prematurely closed (disconnected). If integrity checks are enabled, the functions will throw anyway if the body is incomplete. * The `Request` class (options to `fetch`) has an extra property `allowForbiddenHeaders`, which defaults to `false`. + * The `Response` class also has an extra property `allowForbiddenHeaders`, which defaults to `false` (or to the value of the `Request` if it was constructed through a `fetch` call, which is the common case). * The response object has an extra property `httpVersion` which is either `1` or `2` (numbers), depending on what was negotiated with the server. diff --git a/lib/context-http2.ts b/lib/context-http2.ts index 24fb848..e5aea6a 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -359,7 +359,8 @@ export class H2Context responseHeaders, false, { }, - 2 + 2, + false ); resolve( response ); diff --git a/lib/core.ts b/lib/core.ts index 46f4406..4256662 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -154,6 +154,7 @@ export interface ResponseInit status: number; statusText: string; headers: RawHeaders | Headers; + allowForbiddenHeaders: boolean; } export class FetchError extends Error diff --git a/lib/fetch-http1.ts b/lib/fetch-http1.ts index e2e1e75..228070e 100644 --- a/lib/fetch-http1.ts +++ b/lib/fetch-http1.ts @@ -184,6 +184,7 @@ export async function fetchImpl( statusText: res.statusMessage, }, 1, + input.allowForbiddenHeaders, integrity ) ); diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index 87d6b00..95531ef 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -267,6 +267,7 @@ async function fetchImpl( : extra.redirected.length > 0, { }, 2, + input.allowForbiddenHeaders, integrity ) ); diff --git a/lib/headers.ts b/lib/headers.ts index 105d097..b19538d 100644 --- a/lib/headers.ts +++ b/lib/headers.ts @@ -234,6 +234,7 @@ export class GuardedHeaders extends Headers constructor( guard: GuardTypes, init?: RawHeaders | Headers ) { super( ( _guard = guard, init ) ); + _guard = null; } } diff --git a/lib/response.ts b/lib/response.ts index 7768d9d..8c59539 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -78,7 +78,11 @@ export class Response extends Body { super( ); - const headers = ensureHeaders( init.headers ); + const headers = ensureHeaders( + init.allowForbiddenHeaders + ? new GuardedHeaders( "none", init.headers ) + : init.headers + ); const _extra = < Partial< Extra > >( extra || { } ); @@ -271,6 +275,7 @@ export class StreamResponse extends Response redirected: boolean, init: Partial< ResponseInit >, httpVersion: HttpVersion, + allowForbiddenHeaders: boolean, integrity?: string ) { @@ -282,6 +287,7 @@ export class StreamResponse extends Response ), { ...init, + allowForbiddenHeaders, ...( httpVersion === 1 ? makeInitHttp1( headers ) From 39b9f35f46e1167f8318965077b2f2ae4fb243fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 18 Feb 2019 22:50:01 +0100 Subject: [PATCH 18/77] fix(spec): Allow 'set-cookie' header if chosen to fix #30 --- lib/fetch-http1.ts | 7 +++++-- lib/fetch-http2.ts | 7 +++++-- lib/response.ts | 29 +++++++++++++++++++------- test/fetch-h2/context.ts | 45 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 76 insertions(+), 12 deletions(-) diff --git a/lib/fetch-http1.ts b/lib/fetch-http1.ts index 228070e..7fdcd91 100644 --- a/lib/fetch-http1.ts +++ b/lib/fetch-http1.ts @@ -163,8 +163,11 @@ export async function fetchImpl( session.cookieJar.setCookies( setCookies, url ); } - delete headers[ "set-cookie" ]; - delete headers[ "set-cookie2" ]; + if ( !input.allowForbiddenHeaders ) + { + delete headers[ "set-cookie" ]; + delete headers[ "set-cookie2" ]; + } if ( isRedirected && !location ) return reject( makeIllegalRedirectError( ) ); diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index 95531ef..3b7621d 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -249,8 +249,11 @@ async function fetchImpl( session.cookieJar.setCookies( setCookies, url ); } - delete headers[ "set-cookie" ]; - delete headers[ "set-cookie2" ]; + if ( !input.allowForbiddenHeaders ) + { + delete headers[ "set-cookie" ]; + delete headers[ "set-cookie2" ]; + } if ( isRedirected && !location ) return reject( makeIllegalRedirectError( ) ); diff --git a/lib/response.ts b/lib/response.ts index 8c59539..671b30f 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -179,9 +179,14 @@ export class Response extends Body } } -function makeHeadersFromH2Headers( headers: IncomingHttpHeaders ): Headers +function makeHeadersFromH2Headers( + headers: IncomingHttpHeaders, + allowForbiddenHeaders: boolean +) +: Headers { - const out = new GuardedHeaders( "response" ); + const out = new GuardedHeaders( + allowForbiddenHeaders ? "none" : "response" ); for ( const key of Object.keys( headers ) ) { @@ -199,21 +204,29 @@ function makeHeadersFromH2Headers( headers: IncomingHttpHeaders ): Headers return out; } -function makeInitHttp1( inHeaders: IncomingHttpHeaders ) +function makeInitHttp1( + inHeaders: IncomingHttpHeaders, + allowForbiddenHeaders: boolean +) : Partial< ResponseInit > { // Headers in HTTP/2 are compatible with HTTP/1 (colon illegal in HTTP/1) - const headers = makeHeadersFromH2Headers( inHeaders ); + const headers = + makeHeadersFromH2Headers( inHeaders, allowForbiddenHeaders ); return { headers }; } -function makeInitHttp2( inHeaders: IncomingHttpHeaders ) +function makeInitHttp2( + inHeaders: IncomingHttpHeaders, + allowForbiddenHeaders: boolean +) : Partial< ResponseInit > { const status = parseInt( "" + inHeaders[ HTTP2_HEADER_STATUS ], 10 ); const statusText = ""; // Not supported in H2 - const headers = makeHeadersFromH2Headers( inHeaders ); + const headers = + makeHeadersFromH2Headers( inHeaders, allowForbiddenHeaders ); return { status, statusText, headers }; } @@ -290,8 +303,8 @@ export class StreamResponse extends Response allowForbiddenHeaders, ...( httpVersion === 1 - ? makeInitHttp1( headers ) - : makeInitHttp2( headers ) + ? makeInitHttp1( headers, allowForbiddenHeaders ) + : makeInitHttp2( headers, allowForbiddenHeaders ) ), }, makeExtra( httpVersion, url, redirected, integrity ) diff --git a/test/fetch-h2/context.ts b/test/fetch-h2/context.ts index e217c6b..f365788 100644 --- a/test/fetch-h2/context.ts +++ b/test/fetch-h2/context.ts @@ -219,6 +219,51 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) await server.shutdown( ); } ); + + it( "shouldn't be able to read cookie headers be default", async ( ) => + { + const { server, port } = await makeServer( ); + + const { disconnectAll, fetch } = context( { ...cycleOpts } ); + + const response = await fetch( + `${proto}//localhost:${port}/set-cookie`, + { + json: [ "a=b" , "c=d" ], + method: "POST", + } + ); + + expect( response.headers.get( "set-cookie" ) ).to.be.null; + expect( response.headers.get( "set-cookie2" ) ).to.be.null; + + disconnectAll( ); + + await server.shutdown( ); + } ); + + it( "should be able to read cookie headers if allowed", async ( ) => + { + const { server, port } = await makeServer( ); + + const { disconnectAll, fetch } = context( { ...cycleOpts } ); + + const response = await fetch( + `${proto}//localhost:${port}/set-cookie`, + { + allowForbiddenHeaders: true, + json: [ "a=b" , "c=d" ], + method: "POST", + } + ); + + expect( response.headers.get( "set-cookie" ) ) + .to.equal( "a=b,c=d" ); + + disconnectAll( ); + + await server.shutdown( ); + } ); } ); describe( "disconnection", ( ) => From cc340743792709d3ba94c13c40878a551ee0ddc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 18 Feb 2019 22:55:16 +0100 Subject: [PATCH 19/77] build(mocha): Bumped to mocha 6 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2cdafbf..502b5a3 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "cz-conventional-changelog": "2.x", "execa": "^1.0.0", "from2": "2.x", - "mocha": "5.x", + "mocha": "6.x", "nyc": "13.x", "rimraf": "2.x", "semantic-release": "15.x", From eb67cfa12eab3ca65d8ec707e6662f098bb8fb52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 11 Mar 2019 22:02:03 +0100 Subject: [PATCH 20/77] docs(readme): Fixed travis badge to point to master --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 847c173..e38ae5c 100644 --- a/README.md +++ b/README.md @@ -381,7 +381,7 @@ const response = await fetch( url, { method, body } ); [npm-image]: https://img.shields.io/npm/v/fetch-h2.svg [npm-url]: https://npmjs.org/package/fetch-h2 -[travis-image]: https://img.shields.io/travis/grantila/fetch-h2.svg +[travis-image]: https://img.shields.io/travis/grantila/fetch-h2/master.svg [travis-url]: https://travis-ci.org/grantila/fetch-h2 [coverage-image]: https://coveralls.io/repos/github/grantila/fetch-h2/badge.svg?branch=master [coverage-url]: https://coveralls.io/github/grantila/fetch-h2?branch=master From 65d3a22904bc45cb2b0e0030a5afe37ed6a3d8a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 11 Mar 2019 22:07:50 +0100 Subject: [PATCH 21/77] feat(compression): Added native Brotli support for Node.js 11.7+ --- README.md | 6 ++--- lib/fetch-common.ts | 58 +++++++++++++++++++++++++++++++++++----- lib/response.ts | 11 ++++++++ lib/utils.ts | 29 ++++++++++++++++++++ test/fetch-h2/index.ts | 38 ++++++++++++++++++++++++++ test/lib/server-http1.ts | 4 ++- test/lib/server-http2.ts | 4 ++- 7 files changed, 138 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index e38ae5c..1354625 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ Regardless of whether you're actually interested in the Fetch API per se or not, `fetch-h2` supports cookies (per-context, see below), so when the server sends 'set-cookie' headers, they are saved and automatically re-sent, even after disconnect. They are however only persisted in-memory. -By default, `fetch-h2` will accept `gzip` and `deflate` encodings, and decode transparently. If you also want to allow Brotli (`br`), use the [`fetch-h2-br`](https://www.npmjs.com/package/fetch-h2-br) package. +By default, `fetch-h2` will accept `gzip` and `deflate` encodings (and Brolti `br` if running on Node.js 11.7 or later), and decode transparently. If you want to allow Brotli for older versions node Node.js, use the [`fetch-h2-br`](https://www.npmjs.com/package/fetch-h2-br) package. ## Releases @@ -278,9 +278,9 @@ where `Cookie` is a [`tough-cookie` Cookie](https://www.npmjs.com/package/tough- ### Content encodings (compression) -By default, `gzip` and `deflate` are supported. +By default, `gzip` and `deflate` are supported, and `br` (Brotli) if running on Node.js 11.7+. -`decoders` can be an array of custom decoders, such as [`fetch-h2-br`](https://www.npmjs.com/package/fetch-h2-br) which adds Brotli content decoding support. +`decoders` can be an array of custom decoders, such as [`fetch-h2-br`](https://www.npmjs.com/package/fetch-h2-br) which adds Brotli content decoding support for older versions of node (< 11.7). ### Low-level session configuration diff --git a/lib/fetch-common.ts b/lib/fetch-common.ts index 783e9ac..0e8f02e 100644 --- a/lib/fetch-common.ts +++ b/lib/fetch-common.ts @@ -6,6 +6,7 @@ import { Finally, rethrow } from "already"; import { BodyInspector } from "./body"; import { AbortError, + Decoder, FetchInit, SimpleSession, TimeoutError, @@ -13,7 +14,7 @@ import { import { Headers, RawHeaders } from "./headers"; import { Request } from "./request"; import { Response } from "./response"; -import { arrayify } from "./utils"; +import { arrayify, hasBuiltinBrotli } from "./utils"; const { // Required for a request @@ -62,6 +63,54 @@ export interface TimeoutInfo clear: ( ) => void; } + +interface AcceptEncodings +{ + name: string; + score: number; +} + +const makeDefaultEncodings = ( mul = 1 ) => + hasBuiltinBrotli( ) + ? [ + { name: "br", score: 1.0 * mul }, + { name: "gzip", score: 0.8 * mul }, + { name: "deflate", score: 0.5 * mul }, + ] + : [ + { name: "gzip", score: 1.0 * mul }, + { name: "deflate", score: 0.5 * mul }, + ]; + +const defaultEncodings = makeDefaultEncodings( ); +const fallbackEncodings = makeDefaultEncodings( 0.8 ); + +const stringifyEncoding = ( acceptEncoding: AcceptEncodings ) => + `${acceptEncoding.name};q=${acceptEncoding.score}`; + +const stringifyEncodings = ( accepts: ReadonlyArray< AcceptEncodings > ) => + accepts + .map( acceptEncoding => stringifyEncoding( acceptEncoding ) ) + .join( ", " ); + +function getEncodings( contentDecoders: ReadonlyArray< Decoder > ): string +{ + if ( contentDecoders.length === 0 ) + return stringifyEncodings( defaultEncodings ); + + const makeScore = ( index: number ) => + 1 - ( index / ( contentDecoders.length ) ) * 0.2; + + return stringifyEncodings( + [ + ...contentDecoders.map( ( { name }, index ) => + ( { name, score: makeScore( index ) } ) + ), + ...fallbackEncodings, + ] + ); +} + export async function setupFetch( session: SimpleSession, request: Request, @@ -94,12 +143,7 @@ export async function setupFetch( const contentDecoders = session.contentDecoders( ); - const acceptEncoding = - contentDecoders.length === 0 - ? "gzip;q=1.0, deflate;q=0.5" - : contentDecoders - .map( decoder => `${decoder.name};q=1.0` ) - .join( ", " ) + ", gzip;q=0.8, deflate;q=0.5"; + const acceptEncoding = getEncodings( contentDecoders ); if ( headers.has( HTTP2_HEADER_COOKIE ) ) cookies.push( ...arrayify( headers.get( HTTP2_HEADER_COOKIE ) ) ); diff --git a/lib/response.ts b/lib/response.ts index 671b30f..ae37607 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -4,6 +4,7 @@ import { } from "http2"; import { + createBrotliDecompress, createGunzip, createInflate, } from "zlib"; @@ -25,6 +26,10 @@ import { ResponseTypes, } from "./core"; +import { + hasBuiltinBrotli, +} from "./utils"; + import { ensureHeaders, GuardedHeaders, @@ -263,6 +268,12 @@ function handleEncoding( stream.pipe( createGunzip( ) ), }; + if ( hasBuiltinBrotli( ) ) + { + decoders.br = ( stream: NodeJS.ReadableStream ) => + stream.pipe( createBrotliDecompress( ) ); + } + contentDecoders.forEach( decoder => { decoders[ decoder.name ] = decoder.decode; diff --git a/lib/utils.ts b/lib/utils.ts index f399902..7cbb7b4 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,4 +1,5 @@ import { URL } from "url"; +import { createBrotliCompress } from "zlib"; export function arrayify< T >( value: @@ -63,3 +64,31 @@ export function parseInput( url: string ) url, }; } + +export const identity = < T >( t: T ) => t; + +export function uniq< T >( arr: ReadonlyArray< T > ): Array< T >; +export function uniq< T, U >( arr: ReadonlyArray< T >, pred: ( t: T ) => U ) +: Array< T >; +export function uniq< T, U >( arr: ReadonlyArray< T >, pred?: ( t: T ) => U ) +: Array< T > +{ + if ( !pred ) + return Array.from( new Set< T >( arr ) ); + + const known = new Set< U >( ); + return arr.filter( value => + { + const u = pred( value ); + const first = !known.has( u ); + + known.add( u ); + + return first; + } ); +} + +export function hasBuiltinBrotli( ) +{ + return typeof createBrotliCompress === "function"; +} diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index 7701bf4..8d8ab5a 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -10,6 +10,8 @@ import { TestData } from "../lib/server-common"; import { makeMakeServer } from "../lib/server-helpers"; import { cleanUrl, createIntegrity } from "../lib/utils"; +import { hasBuiltinBrotli } from "../../lib/utils"; + import { context, DataBody, @@ -604,6 +606,9 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => ) ); + expect( response.headers.get( "content-encoding" ) ) + .to.equal( "gzip" ); + const stream = await response.readable( ); const data = await getStreamAsBuffer( stream ); @@ -629,6 +634,39 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => ) ); + expect( response.headers.get( "content-encoding" ) ) + .to.equal( "deflate" ); + + const stream = await response.readable( ); + + const data = await getStreamAsBuffer( stream ); + + expect( JSON.parse( data.toString( ) ) ).to.deep.equal( testData ); + + await server.shutdown( ); + } ); + + it( "should accept content-encoding (br)", async ( ) => + { + if ( !hasBuiltinBrotli( ) ) + return; + + const { server, port } = await makeServer( ); + + const testData = { foo: "bar" }; + + const response = ensureStatusSuccess( + await fetch( + `${proto}//localhost:${port}/compressed/br`, + { + json: testData, + method: "POST", + } + ) + ); + + expect( response.headers.get( "content-encoding" ) ).to.equal( "br" ); + const stream = await response.readable( ); const data = await getStreamAsBuffer( stream ); diff --git a/test/lib/server-http1.ts b/test/lib/server-http1.ts index b9e7108..0f445c8 100644 --- a/test/lib/server-http1.ts +++ b/test/lib/server-http1.ts @@ -14,7 +14,7 @@ import { import { Socket } from "net"; import { createHash } from "crypto"; -import { createDeflate, createGzip } from "zlib"; +import { createBrotliCompress, createDeflate, createGzip } from "zlib"; import { delay } from "already"; import { buffer as getStreamAsBuffer } from "get-stream"; @@ -228,6 +228,8 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > ? createGzip( ) : encoding === "deflate" ? createDeflate( ) + : encoding === "br" + ? createBrotliCompress( ) : null; const responseHeaders = { diff --git a/test/lib/server-http2.ts b/test/lib/server-http2.ts index 420c431..2c2fcee 100644 --- a/test/lib/server-http2.ts +++ b/test/lib/server-http2.ts @@ -10,7 +10,7 @@ import { } from "http2"; import { createHash } from "crypto"; -import { createDeflate, createGzip } from "zlib"; +import { createBrotliCompress, createDeflate, createGzip } from "zlib"; import { delay } from "already"; import { buffer as getStreamAsBuffer } from "get-stream"; @@ -236,6 +236,8 @@ export class ServerHttp2 extends TypedServer< Http2Server > ? createGzip( ) : encoding === "deflate" ? createDeflate( ) + : encoding === "br" + ? createBrotliCompress( ) : null; const responseHeaders = { From 974741297014c1f5b133ec85327b80e16bbd9a14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 11 Mar 2019 22:08:34 +0100 Subject: [PATCH 22/77] test(test): Made unit test server shutdown await socket close --- test/lib/server-common.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/test/lib/server-common.ts b/test/lib/server-common.ts index 7220f95..19b7455 100644 --- a/test/lib/server-common.ts +++ b/test/lib/server-common.ts @@ -68,9 +68,14 @@ export abstract class Server public async shutdown( ): Promise< void > { await this._shutdown( ); - return new Promise< void >( ( resolve, _reject ) => + return new Promise< void >( ( resolve, reject ) => { - this._server.close( resolve ); + this._server.close( ( err?: Error ) => + { + if ( err ) + return reject( err ); + resolve( ); + } ); } ); } From bbcc0b42fa7a5987dd905e213ef8ee5900955d38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 08:12:21 +0100 Subject: [PATCH 23/77] fix(deps): Bumped get-stream --- lib/body.ts | 8 ++++---- package.json | 3 +-- test/fetch-h2/body.ts | 10 +++++----- test/fetch-h2/index.ts | 8 ++++---- test/lib/server-http1.ts | 6 +++--- test/lib/server-http2.ts | 8 ++++---- 6 files changed, 21 insertions(+), 22 deletions(-) diff --git a/lib/body.ts b/lib/body.ts index c260cbc..0cd079b 100644 --- a/lib/body.ts +++ b/lib/body.ts @@ -1,7 +1,7 @@ import { createHash } from "crypto"; import { tap } from "already"; -import { buffer as getStreamAsBuffer } from "get-stream"; +import getStream from "get-stream"; import * as through2 from "through2"; import * as toArrayBuffer from "to-arraybuffer"; @@ -69,7 +69,7 @@ export class Body implements IBody return this.validateIntegrity( emptyBuffer, allowIncomplete ); else if ( isStream( this._body ) ) - return getStreamAsBuffer( < NodeJS.ReadableStream >this._body ) + return getStream.buffer( < NodeJS.ReadableStream >this._body ) .then( buffer => this.validateIntegrity( buffer, allowIncomplete ) ) @@ -100,7 +100,7 @@ export class Body implements IBody ) .then( ( ) => this._body ); else if ( isStream( this._body ) ) - return getStreamAsBuffer( < NodeJS.ReadableStream >this._body ) + return getStream.buffer( < NodeJS.ReadableStream >this._body ) .then( tap( buffer => < any >this.validateIntegrity( buffer, false ) ) ) @@ -125,7 +125,7 @@ export class Body implements IBody ) .then( ( ) => < string >< BodyTypes >this._body ); else if ( isStream( this._body ) ) - return getStreamAsBuffer( < NodeJS.ReadableStream >this._body ) + return getStream.buffer( < NodeJS.ReadableStream >this._body ) .then( tap( buffer => < any >this.validateIntegrity( buffer, allowIncomplete ) ) ) diff --git a/package.json b/package.json index 502b5a3..4c69f80 100644 --- a/package.json +++ b/package.json @@ -55,7 +55,6 @@ "@types/chai": "4.x", "@types/execa": "0.x", "@types/from2": "2.x", - "@types/get-stream": "3.x", "@types/mocha": "5.x", "@types/node": "11.x", "@types/through2": "2.x", @@ -79,7 +78,7 @@ "@types/tough-cookie": "2.x", "already": "1.x", "callguard": "1.x", - "get-stream": "4.x", + "get-stream": "5.x", "through2": "3.x", "to-arraybuffer": "1.x", "tough-cookie": "3.x" diff --git a/test/fetch-h2/body.ts b/test/fetch-h2/body.ts index 29376dc..04402e8 100644 --- a/test/fetch-h2/body.ts +++ b/test/fetch-h2/body.ts @@ -1,6 +1,6 @@ import { expect } from "chai"; import { createHash } from "crypto"; -import { buffer as getStreamAsBuffer } from "get-stream"; +import getStream from "get-stream"; import "mocha"; import * as through2 from "through2"; @@ -499,21 +499,21 @@ describe( "body", ( ) => it( "handle null", async ( ) => { const body = new DataBody( null ); - const data = await getStreamAsBuffer( await body.readable( ) ); + const data = await getStream.buffer( await body.readable( ) ); expect( data.toString( ) ).to.equal( "" ); } ); it( "handle string", async ( ) => { const body = new DataBody( "foo" ); - const data = await getStreamAsBuffer( await body.readable( ) ); + const data = await getStream.buffer( await body.readable( ) ); expect( data.toString( ) ).to.equal( "foo" ); } ); it( "handle buffer", async ( ) => { const body = new DataBody( Buffer.from( "foo" ) ); - const data = await getStreamAsBuffer( await body.readable( ) ); + const data = await getStream.buffer( await body.readable( ) ); expect( data.toString( ) ).to.equal( "foo" ); } ); @@ -522,7 +522,7 @@ describe( "body", ( ) => const stream = through2( ); stream.end( "foo" ); const body = new StreamBody( stream ); - const data = await getStreamAsBuffer( await body.readable( ) ); + const data = await getStream.buffer( await body.readable( ) ); expect( data.toString( ) ).to.equal( "foo" ); } ); } ); diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index 8d8ab5a..9c235c9 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -2,7 +2,7 @@ import { defer, delay } from "already"; import { expect } from "chai"; import { createHash } from "crypto"; import * as from2 from "from2"; -import { buffer as getStreamAsBuffer } from "get-stream"; +import getStream from "get-stream"; import "mocha"; import * as through2 from "through2"; @@ -611,7 +611,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const stream = await response.readable( ); - const data = await getStreamAsBuffer( stream ); + const data = await getStream.buffer( stream ); expect( JSON.parse( data.toString( ) ) ).to.deep.equal( testData ); @@ -639,7 +639,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const stream = await response.readable( ); - const data = await getStreamAsBuffer( stream ); + const data = await getStream.buffer( stream ); expect( JSON.parse( data.toString( ) ) ).to.deep.equal( testData ); @@ -669,7 +669,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const stream = await response.readable( ); - const data = await getStreamAsBuffer( stream ); + const data = await getStream.buffer( stream ); expect( JSON.parse( data.toString( ) ) ).to.deep.equal( testData ); diff --git a/test/lib/server-http1.ts b/test/lib/server-http1.ts index 0f445c8..c2cf53b 100644 --- a/test/lib/server-http1.ts +++ b/test/lib/server-http1.ts @@ -17,7 +17,7 @@ import { createHash } from "crypto"; import { createBrotliCompress, createDeflate, createGzip } from "zlib"; import { delay } from "already"; -import { buffer as getStreamAsBuffer } from "get-stream"; +import getStream from "get-stream"; import { ignoreError, @@ -135,7 +135,7 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > [ HTTP2_HEADER_SET_COOKIE ]: [ ], }; - const data = await getStreamAsBuffer( request ); + const data = await getStream.buffer( request ); const json = JSON.parse( data.toString( ) ); json.forEach( ( cookie: any ) => { @@ -179,7 +179,7 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > ":status": 200, }; - const data = await getStreamAsBuffer( request ); + const data = await getStream.buffer( request ); const json = JSON.parse( data.toString( ) ); sendHeaders( responseHeaders ); diff --git a/test/lib/server-http2.ts b/test/lib/server-http2.ts index 2c2fcee..1c2ac95 100644 --- a/test/lib/server-http2.ts +++ b/test/lib/server-http2.ts @@ -13,7 +13,7 @@ import { createHash } from "crypto"; import { createBrotliCompress, createDeflate, createGzip } from "zlib"; import { delay } from "already"; -import { buffer as getStreamAsBuffer } from "get-stream"; +import getStream from "get-stream"; import { ignoreError, @@ -109,7 +109,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > [ HTTP2_HEADER_SET_COOKIE ]: [ ], }; - const data = await getStreamAsBuffer( stream ); + const data = await getStream.buffer( stream ); const json = JSON.parse( data.toString( ) ); json.forEach( ( cookie: any ) => { @@ -151,7 +151,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > ":status": 200, }; - const data = await getStreamAsBuffer( stream ); + const data = await getStream.buffer( stream ); const json = JSON.parse( data.toString( ) ); stream.once( "wantTrailers", ( ) => @@ -199,7 +199,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > ":status": 200, }; - const data = await getStreamAsBuffer( stream ); + const data = await getStream.buffer( stream ); const json = JSON.parse( data.toString( ) ); json.forEach( ( pushable: any ) => From 5e89905e3c5105cd9502a0d98c08b83aba566e8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 20:00:13 +0100 Subject: [PATCH 24/77] build(tests): mocha -> jest --- jest.config.js | 6 ++ lib/body.ts | 8 +- package.json | 22 ++--- test/fetch-h2/body.ts | 118 +++++++++++++------------- test/fetch-h2/context.ts | 52 ++++++------ test/fetch-h2/event-loop-reference.ts | 9 +- test/fetch-h2/httpbin.ts | 28 +++--- test/fetch-h2/index.ts | 112 ++++++++++++------------ test/lib/server-helpers.ts | 5 +- test/mocha.opts | 3 - 10 files changed, 176 insertions(+), 187 deletions(-) create mode 100644 jest.config.js delete mode 100644 test/mocha.opts diff --git a/jest.config.js b/jest.config.js new file mode 100644 index 0000000..3e84f54 --- /dev/null +++ b/jest.config.js @@ -0,0 +1,6 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/*.ts'], + roots: ['/test/fetch-h2/'], +}; diff --git a/lib/body.ts b/lib/body.ts index 0cd079b..4005865 100644 --- a/lib/body.ts +++ b/lib/body.ts @@ -224,10 +224,14 @@ export class Body implements IBody const { algorithm, hash: expectedHash } = parseIntegrity( this._integrity ); + // jest (I presume) modifies ArrayBuffer, breaking instanceof + const instanceOfArrayBuffer = ( val: any ) => + val && val.constructor && val.constructor.name === "ArrayBuffer"; + const hash = createHash( algorithm ) .update( - data instanceof ArrayBuffer - ? new DataView( data ) as any + instanceOfArrayBuffer( data ) + ? new DataView( data ) : < Buffer >data ) .digest( "base64" ); diff --git a/package.json b/package.json index 4c69f80..9700c37 100644 --- a/package.json +++ b/package.json @@ -20,15 +20,11 @@ "scripts": { "build": "./node_modules/.bin/rimraf dist && ./node_modules/.bin/tsc -p .", "lint": "node_modules/.bin/tslint --project .", - "mocha": "node_modules/.bin/mocha --bail --check-leaks dist/test", - "mocha:debug": "node_modules/.bin/mocha --inspect-brk dist/test", - "test": "npm run lint && node_modules/.bin/nyc --require source-map-support/register npm run mocha", - "testfast": "node_modules/.bin/nyc --require source-map-support/register node_modules/.bin/_mocha -- --bail --check-leaks -i --grep nghttp2.org dist/test", - "test-nocov": "node_modules/.bin/mocha --bail --check-leaks dist/test", - "buildtest": "npm run build && npm run test-nocov", - "buildtestfast": "npm run build && node_modules/.bin/mocha -i --grep nghttp2.org dist/test", + "jest": "node_modules/.bin/jest", + "jest:debug": "node --inspect-brk node_modules/.bin/jest", + "test": "npm run lint && npm run jest --coverage", + "buildtest": "npm run build && npm run jest", "buildtestcov": "npm run build && npm run test", - "buildtestfastcov": "npm run build && npm run testfast", "coverage": "node_modules/.bin/nyc report --reporter=html", "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run buildtest && scripts/version-git-add.sh", "prepack": "npm run build && npm run test", @@ -52,24 +48,22 @@ "typescript" ], "devDependencies": { - "@types/chai": "4.x", "@types/execa": "0.x", "@types/from2": "2.x", - "@types/mocha": "5.x", + "@types/jest": "24.x", "@types/node": "11.x", "@types/through2": "2.x", - "chai": "4.x", "commitizen": "3.x", "coveralls": "3.x", "cz-conventional-changelog": "2.x", - "execa": "^1.0.0", + "execa": "1.x", "from2": "2.x", - "mocha": "6.x", + "jest": "24.x", "nyc": "13.x", "rimraf": "2.x", "semantic-release": "15.x", - "source-map-support": "0.x", "travis-deploy-once": "5.x", + "ts-jest": "24.x", "ts-node": "8.x", "tslint": "5.x", "typescript": "3.x" diff --git a/test/fetch-h2/body.ts b/test/fetch-h2/body.ts index 04402e8..c7a0bb7 100644 --- a/test/fetch-h2/body.ts +++ b/test/fetch-h2/body.ts @@ -1,7 +1,5 @@ -import { expect } from "chai"; import { createHash } from "crypto"; import getStream from "get-stream"; -import "mocha"; import * as through2 from "through2"; import { createIntegrity } from "../lib/utils"; @@ -57,11 +55,11 @@ describe( "body", ( ) => it( "throw on multiple reads", async ( ) => { const body = new DataBody( "foo" ); - expect( body.bodyUsed ).to.be.false; - expect( await body.text( ) ).to.equal( "foo" ); - expect( body.bodyUsed ).to.be.true; + expect( body.bodyUsed ).toBe( false ); + expect( await body.text( ) ).toBe( "foo" ); + expect( body.bodyUsed ).toBe( true ); expect( await makeSync( ( ) => body.text( ) ) ) - .to.throw( ReferenceError ); + .toThrow( ReferenceError ); } ); } ); @@ -71,13 +69,13 @@ describe( "body", ( ) => { const body = new DataBody( "foo" ); expect( await makeSync( ( ) => ( < any >body ).blob( ) ) ) - .to.throw( ); + .toThrow( ); } ); it( "throw on unimplemented formData()", async ( ) => { const body = new DataBody( "foo" ); - expect( await makeSync( ( ) => body.formData( ) ) ).to.throw( ); + expect( await makeSync( ( ) => body.formData( ) ) ).toThrow( ); } ); } ); @@ -88,28 +86,28 @@ describe( "body", ( ) => { const body = new DataBody( < string >< any >1 ); expect( await makeSync( ( ) => body.arrayBuffer( ) ) ) - .to.throw( "Unknown body data" ); + .toThrow( "Unknown body data" ); } ); it( "handle invalid body type when reading as json", async ( ) => { const body = new DataBody( < string >< any >1 ); expect( await makeSync( ( ) => body.json( ) ) ) - .to.throw( "Unknown body data" ); + .toThrow( "Unknown body data" ); } ); it( "handle invalid body type when reading as text", async ( ) => { const body = new DataBody( < string >< any >1 ); expect( await makeSync( ( ) => body.text( ) ) ) - .to.throw( "Unknown body data" ); + .toThrow( "Unknown body data" ); } ); it( "handle invalid body type when reading as readable", async ( ) => { const body = new DataBody( < string >< any >1 ); expect( await makeSync( ( ) => body.readable( ) ) ) - .to.throw( "Unknown body data" ); + .toThrow( "Unknown body data" ); } ); } ); @@ -121,28 +119,28 @@ describe( "body", ( ) => { const body = new DataBody( null ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.equal( "" ); + expect( data.toString( ) ).toBe( "" ); } ); it( "handle string", async ( ) => { const body = new DataBody( "foo" ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.deep.equal( "foo" ); + expect( data.toString( ) ).toBe( "foo" ); } ); it( "handle buffer", async ( ) => { const body = new DataBody( Buffer.from( "foo" ) ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.deep.equal( "foo" ); + expect( data.toString( ) ).toBe( "foo" ); } ); it( "handle JsonBody", async ( ) => { const body = new JsonBody( { foo: "bar" } ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.deep.equal( '{"foo":"bar"}' ); + expect( data.toString( ) ).toBe( '{"foo":"bar"}' ); } ); it( "handle stream", async ( ) => @@ -151,7 +149,7 @@ describe( "body", ( ) => stream.end( "foo" ); const body = new StreamBody( stream ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.deep.equal( "foo" ); + expect( data.toString( ) ).toBe( "foo" ); } ); } ); @@ -161,7 +159,7 @@ describe( "body", ( ) => { const body = new IntegrityBody( null, "" ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.equal( "" ); + expect( data.toString( ) ).toBe( "" ); } ); it( "handle string", async ( ) => @@ -169,7 +167,7 @@ describe( "body", ( ) => const testData = "foo"; const body = new IntegrityBody( testData, testData ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.deep.equal( testData ); + expect( data.toString( ) ).toBe( testData ); } ); it( "handle buffer", async ( ) => @@ -178,7 +176,7 @@ describe( "body", ( ) => const body = new IntegrityBody( Buffer.from( testData ), testData ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.deep.equal( testData ); + expect( data.toString( ) ).toBe( testData ); } ); it( "handle stream", async ( ) => @@ -188,7 +186,7 @@ describe( "body", ( ) => stream.end( testData ); const body = new IntegrityBody( stream, testData ); const data = Buffer.from( await body.arrayBuffer( ) ); - expect( data.toString( ) ).to.deep.equal( testData ); + expect( data.toString( ) ).toBe( testData ); } ); } ); @@ -198,14 +196,14 @@ describe( "body", ( ) => { const body = new IntegrityBody( null, "", "acme-hash" ); expect( await makeSync( ( ) => body.arrayBuffer( ) ) ) - .to.throw( "not supported" ); + .toThrow( "not supported" ); } ); it( "handle null", async ( ) => { const body = new IntegrityBody( null, "" + "x" ); expect( await makeSync( ( ) => body.arrayBuffer( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle string", async ( ) => @@ -213,7 +211,7 @@ describe( "body", ( ) => const testData = "foo"; const body = new IntegrityBody( testData, testData + "x" ); expect( await makeSync( ( ) => body.arrayBuffer( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle buffer", async ( ) => @@ -222,7 +220,7 @@ describe( "body", ( ) => const body = new IntegrityBody( Buffer.from( testData ), testData + "x" ); expect( await makeSync( ( ) => body.arrayBuffer( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle stream", async ( ) => @@ -232,7 +230,7 @@ describe( "body", ( ) => stream.end( testData ); const body = new IntegrityBody( stream, testData + "x" ); expect( await makeSync( ( ) => body.arrayBuffer( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); } ); } ); @@ -244,37 +242,37 @@ describe( "body", ( ) => it( "handle null", async ( ) => { const body = new DataBody( null ); - expect( await body.json( ) ).to.be.null; + expect( await body.json( ) ).toBe( null ); } ); it( "handle invalid string", async ( ) => { const body = new DataBody( "invalid json" ); - expect( await makeSync( ( ) => body.json( ) ) ).to.throw( ); + expect( await makeSync( ( ) => body.json( ) ) ).toThrow( ); } ); it( "handle valid string", async ( ) => { const body = new DataBody( '{"foo":"bar"}' ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); it( "handle invalid buffer", async ( ) => { const body = new DataBody( Buffer.from( "invalid json" ) ); - expect( await makeSync( ( ) => body.json( ) ) ).to.throw( ); + expect( await makeSync( ( ) => body.json( ) ) ).toThrow( ); } ); it( "handle valid buffer", async ( ) => { const body = new DataBody( Buffer.from( '{"foo":"bar"}' ) ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); it( "handle valid JsonBody", async ( ) => { const body = new JsonBody( { foo: "bar" } ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); it( "handle invalid stream", async ( ) => @@ -282,7 +280,7 @@ describe( "body", ( ) => const stream = through2( ); stream.end( "invalid json" ); const body = new StreamBody( stream ); - expect( await makeSync( ( ) => body.json( ) ) ).to.throw( ); + expect( await makeSync( ( ) => body.json( ) ) ).toThrow( ); } ); it( "handle valid stream", async ( ) => @@ -290,7 +288,7 @@ describe( "body", ( ) => const stream = through2( ); stream.end( '{"foo":"bar"}' ); const body = new StreamBody( stream ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); } ); @@ -300,7 +298,7 @@ describe( "body", ( ) => { const body = new DataBody( null ); setHash( body, "" ); - expect( await body.json( ) ).to.be.null; + expect( await body.json( ) ).toBe( null ); } ); it( "handle string", async ( ) => @@ -308,7 +306,7 @@ describe( "body", ( ) => const testData = '{"foo":"bar"}'; const body = new DataBody( testData ); setHash( body, testData ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); it( "handle buffer", async ( ) => @@ -316,14 +314,14 @@ describe( "body", ( ) => const testData = '{"foo":"bar"}'; const body = new DataBody( Buffer.from( testData ) ); setHash( body, testData ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); it( "handle JsonBody", async ( ) => { const body = new JsonBody( { foo: "bar" } ); setHash( body, '{"foo":"bar"}' ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); it( "handle stream", async ( ) => @@ -333,7 +331,7 @@ describe( "body", ( ) => stream.end( testData ); const body = new StreamBody( stream ); setHash( body, testData ); - expect( await body.json( ) ).to.deep.equal( { foo: "bar" } ); + expect( await body.json( ) ).toEqual( { foo: "bar" } ); } ); } ); @@ -344,7 +342,7 @@ describe( "body", ( ) => const body = new DataBody( null ); setHash( body, "" + "x" ); expect( await makeSync( ( ) => body.json( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle string", async ( ) => @@ -353,7 +351,7 @@ describe( "body", ( ) => const body = new DataBody( testData ); setHash( body, testData + "x" ); expect( await makeSync( ( ) => body.json( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle buffer", async ( ) => @@ -362,7 +360,7 @@ describe( "body", ( ) => const body = new DataBody( Buffer.from( testData ) ); setHash( body, testData + "x" ); expect( await makeSync( ( ) => body.json( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle JsonBody", async ( ) => @@ -370,7 +368,7 @@ describe( "body", ( ) => const body = new JsonBody( { foo: "bar" } ); setHash( body, '{"foo":"bar"}' + "x" ); expect( await makeSync( ( ) => body.json( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle stream", async ( ) => @@ -381,7 +379,7 @@ describe( "body", ( ) => const body = new StreamBody( stream ); setHash( body, testData + "x" ); expect( await makeSync( ( ) => body.json( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); } ); } ); @@ -393,19 +391,19 @@ describe( "body", ( ) => it( "handle null", async ( ) => { const body = new DataBody( null ); - expect( await body.text( ) ).to.be.null; + expect( await body.text( ) ).toBe( null ); } ); it( "handle string", async ( ) => { const body = new DataBody( "foo" ); - expect( await body.text( ) ).to.equal( "foo" ); + expect( await body.text( ) ).toBe( "foo" ); } ); it( "handle buffer", async ( ) => { const body = new DataBody( Buffer.from( "foo" ) ); - expect( await body.text( ) ).to.equal( "foo" ); + expect( await body.text( ) ).toBe( "foo" ); } ); it( "handle stream", async ( ) => @@ -413,7 +411,7 @@ describe( "body", ( ) => const stream = through2( ); stream.end( "foo" ); const body = new StreamBody( stream ); - expect( await body.text( ) ).to.equal( "foo" ); + expect( await body.text( ) ).toBe( "foo" ); } ); } ); @@ -423,7 +421,7 @@ describe( "body", ( ) => { const body = new DataBody( null ); setHash( body, "" ); - expect( await body.text( ) ).to.be.null; + expect( await body.text( ) ).toBe( null ); } ); it( "handle string", async ( ) => @@ -431,7 +429,7 @@ describe( "body", ( ) => const testData = "foo"; const body = new DataBody( testData ); setHash( body, testData ); - expect( await body.text( ) ).to.equal( testData ); + expect( await body.text( ) ).toBe( testData ); } ); it( "handle buffer", async ( ) => @@ -439,7 +437,7 @@ describe( "body", ( ) => const testData = "foo"; const body = new DataBody( Buffer.from( testData ) ); setHash( body, testData ); - expect( await body.text( ) ).to.equal( testData ); + expect( await body.text( ) ).toBe( testData ); } ); it( "handle stream", async ( ) => @@ -449,7 +447,7 @@ describe( "body", ( ) => stream.end( testData ); const body = new StreamBody( stream ); setHash( body, testData ); - expect( await body.text( ) ).to.equal( testData ); + expect( await body.text( ) ).toBe( testData ); } ); } ); @@ -460,7 +458,7 @@ describe( "body", ( ) => const body = new DataBody( null ); setHash( body, "" + "x" ); expect( await makeSync( ( ) => body.text( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle string", async ( ) => @@ -469,7 +467,7 @@ describe( "body", ( ) => const body = new DataBody( testData ); setHash( body, testData + "x" ); expect( await makeSync( ( ) => body.text( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle buffer", async ( ) => @@ -478,7 +476,7 @@ describe( "body", ( ) => const body = new DataBody( Buffer.from( testData ) ); setHash( body, testData + "x" ); expect( await makeSync( ( ) => body.text( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); it( "handle stream", async ( ) => @@ -489,7 +487,7 @@ describe( "body", ( ) => const body = new StreamBody( stream ); setHash( body, testData + "x" ); expect( await makeSync( ( ) => body.text( ) ) ) - .to.throw( "Resource integrity mismatch" ); + .toThrow( "Resource integrity mismatch" ); } ); } ); } ); @@ -500,21 +498,21 @@ describe( "body", ( ) => { const body = new DataBody( null ); const data = await getStream.buffer( await body.readable( ) ); - expect( data.toString( ) ).to.equal( "" ); + expect( data.toString( ) ).toBe( "" ); } ); it( "handle string", async ( ) => { const body = new DataBody( "foo" ); const data = await getStream.buffer( await body.readable( ) ); - expect( data.toString( ) ).to.equal( "foo" ); + expect( data.toString( ) ).toBe( "foo" ); } ); it( "handle buffer", async ( ) => { const body = new DataBody( Buffer.from( "foo" ) ); const data = await getStream.buffer( await body.readable( ) ); - expect( data.toString( ) ).to.equal( "foo" ); + expect( data.toString( ) ).toBe( "foo" ); } ); it( "handle stream", async ( ) => @@ -523,7 +521,7 @@ describe( "body", ( ) => stream.end( "foo" ); const body = new StreamBody( stream ); const data = await getStream.buffer( await body.readable( ) ); - expect( data.toString( ) ).to.equal( "foo" ); + expect( data.toString( ) ).toBe( "foo" ); } ); } ); } ); diff --git a/test/fetch-h2/context.ts b/test/fetch-h2/context.ts index f365788..9616b29 100644 --- a/test/fetch-h2/context.ts +++ b/test/fetch-h2/context.ts @@ -1,6 +1,3 @@ -import { expect } from "chai"; -import "mocha"; - import { TestData } from "../lib/server-common"; import { makeMakeServer } from "../lib/server-helpers"; @@ -26,11 +23,11 @@ function ensureStatusSuccess( response: Response ): Response ] as Array< TestData > ) .forEach( ( { proto, version } ) => { -describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) +describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => { const { cycleOpts, makeServer } = makeMakeServer( { proto, version } ); - this.timeout( 500 ); + jest.setTimeout( 500 ); describe( "options", ( ) => { @@ -49,7 +46,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) ); const res = await response.json( ); - expect( res[ "user-agent" ] ).to.equal( "foobar" ); + expect( res[ "user-agent" ] ).toBe( "foobar" ); disconnectAll( ); @@ -70,8 +67,8 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) ); const res = await response.json( ); - expect( res[ "user-agent" ] ).to.contain( "foobar" ); - expect( res[ "user-agent" ] ).to.contain( "fetch-h2" ); + expect( res[ "user-agent" ] ).toContain( "foobar" ); + expect( res[ "user-agent" ] ).toContain( "fetch-h2" ); disconnectAll( ); @@ -94,7 +91,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) ); const res = await response.json( ); - expect( res.accept ).to.equal( accept ); + expect( res.accept ).toBe( accept ); disconnectAll( ); @@ -119,17 +116,17 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) try { await fetch( `https://localhost:${port}/headers` ); - expect( true ).to.be.false; + expect( true ).toEqual( false ); } catch ( err ) { - expect( err.message ).to.satisfy( ( message: string ) => - message.includes( "closed" ) // < Node 9.4 + expect( + err.message.includes( "closed" ) // < Node 9.4 || - message.includes( "self signed" ) // >= Node 9.4 + err.message.includes( "self signed" ) // >= Node 9.4 || - message.includes( "expired" ) - ); + err.message.includes( "expired" ) + ).toBeTruthy( ); } disconnectAll( ); @@ -153,7 +150,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) ); const res = await response.json( ); - expect( res[ "user-agent" ] ).to.equal( "foobar" ); + expect( res[ "user-agent" ] ).toBe( "foobar" ); disconnectAll( ); @@ -171,7 +168,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) expect( await cookieJar.getCookies( `${proto}//localhost:${port}/` ) - ).to.be.empty; + ).toEqual( [ ] ); const { disconnectAll, fetch } = context( { ...cycleOpts, @@ -188,11 +185,11 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) const cookies = await cookieJar.getCookies( `${proto}//localhost:${port}/` ); - expect( cookies ).to.not.be.empty; - expect( cookies[ 0 ].key ).to.equal( "a" ); - expect( cookies[ 0 ].value ).to.equal( "b" ); - expect( cookies[ 1 ].key ).to.equal( "c" ); - expect( cookies[ 1 ].value ).to.equal( "d" ); + expect( cookies.length ).toBeGreaterThan( 1 ); + expect( cookies[ 0 ].key ).toBe( "a" ); + expect( cookies[ 0 ].value ).toBe( "b" ); + expect( cookies[ 1 ].key ).toBe( "c" ); + expect( cookies[ 1 ].value ).toBe( "d" ); // Next request should maintain cookies @@ -201,7 +198,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) const cookies2 = await cookieJar.getCookies( `${proto}//localhost:${port}/` ); - expect( cookies2 ).to.not.be.empty; + expect( cookies2.length ).toBeGreaterThan( 0 ); // If we manually clear the cookie jar, subsequent requests // shouldn't have any cookies @@ -213,7 +210,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) const cookies3 = await cookieJar.getCookies( `${proto}//localhost:${port}/` ); - expect( cookies3 ).to.be.empty; + expect( cookies3 ).toEqual( [ ] ); disconnectAll( ); @@ -234,8 +231,8 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) } ); - expect( response.headers.get( "set-cookie" ) ).to.be.null; - expect( response.headers.get( "set-cookie2" ) ).to.be.null; + expect( response.headers.get( "set-cookie" ) ).toBe( null ); + expect( response.headers.get( "set-cookie2" ) ).toBe( null ); disconnectAll( ); @@ -257,8 +254,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, function( ) } ); - expect( response.headers.get( "set-cookie" ) ) - .to.equal( "a=b,c=d" ); + expect( response.headers.get( "set-cookie" ) ).toBe( "a=b,c=d" ); disconnectAll( ); diff --git a/test/fetch-h2/event-loop-reference.ts b/test/fetch-h2/event-loop-reference.ts index 13c7a1d..5177e8d 100644 --- a/test/fetch-h2/event-loop-reference.ts +++ b/test/fetch-h2/event-loop-reference.ts @@ -1,17 +1,16 @@ import * as path from "path"; -import { expect } from "chai"; import * as execa from "execa"; import { TestData } from "../lib/server-common"; import { makeMakeServer } from "../lib/server-helpers"; -const script = path.resolve( __dirname, "../../../scripts/test-client" ); +const script = path.resolve( path.join( process.cwd( ), "scripts", "test-client" ) ); -describe( "event-loop", function( ) +describe( "event-loop", ( ) => { - this.timeout( 20000 ); + jest.setTimeout( 20000 ); const runs: Array< TestData > = [ { proto: "http:", version: "http1" }, @@ -39,7 +38,7 @@ describe( "event-loop", function( ) ); const responseBody = JSON.parse( stdout ); - expect( responseBody[ "user-agent" ] ).to.include( "fetch-h2/" ); + expect( responseBody[ "user-agent" ] ).toContain( "fetch-h2/" ); await server.shutdown( ); } ); diff --git a/test/fetch-h2/httpbin.ts b/test/fetch-h2/httpbin.ts index 71b61a1..6735550 100644 --- a/test/fetch-h2/httpbin.ts +++ b/test/fetch-h2/httpbin.ts @@ -1,8 +1,6 @@ import { URL } from "url"; import { delay } from "already"; -import { expect } from "chai"; -import "mocha"; import * as through2 from "through2"; import { @@ -33,9 +31,9 @@ const baseHost = new URL( host ).origin; const name = `${site} (${protos[ 0 ]} over ${protocol.replace( ":", "" )})`; -describe( name, function( ) +describe( name, ( ) => { - this.timeout( 5000 ); + jest.setTimeout( 5000 ); const { fetch, disconnectAll } = context( { httpsProtocols: protos, @@ -47,7 +45,7 @@ describe( name, function( ) { const response = await fetch( `${host}/user-agent` ); const data = await response.json( ); - expect( data[ "user-agent" ] ).to.include( "fetch-h2/" ); + expect( data[ "user-agent" ] ).toContain( "fetch-h2/" ); } ); it( "should be possible to POST JSON", async ( ) => @@ -62,9 +60,9 @@ describe( name, function( ) } ); const data = await response.json( ); - expect( testData ).to.deep.equal( data.json ); + expect( testData ).toEqual( data.json ); // fetch-h2 should set content type for JsonBody - expect( data.headers[ "Content-Type" ] ).to.equal( "application/json" ); + expect( data.headers[ "Content-Type" ] ).toBe( "application/json" ); } ); it( "should be possible to POST buffer-data", async ( ) => @@ -79,8 +77,8 @@ describe( name, function( ) } ); const data = await response.json( ); - expect( data.data ).to.equal( testData ); - expect( Object.keys( data.headers ) ).to.not.contain( "Content-Type" ); + expect( data.data ).toBe( testData ); + expect( data.headers ).not.toHaveProperty( "Content-Type" ); } ); it( "should be possible to POST already ended stream-data", async ( ) => @@ -102,7 +100,7 @@ describe( name, function( ) ); const data = await response.json( ); - expect( data.data ).to.equal( "foobar" ); + expect( data.data ).toBe( "foobar" ); } ); it( "should be possible to POST not yet ended stream-data", async ( ) => @@ -128,7 +126,7 @@ describe( name, function( ) const response = await eventualResponse; const data = await response.json( ); - expect( data.data ).to.equal( "foobar" ); + expect( data.data ).toBe( "foobar" ); } ); it( "should save and forward cookies", async ( ) => @@ -139,13 +137,13 @@ describe( name, function( ) `${host}/cookies/set?foo=bar`, { redirect: "manual" } ); - expect( responseSet.headers.has( "location" ) ).to.be.true; + expect( responseSet.headers.has( "location" ) ).toBe( true ); const redirectedTo = responseSet.headers.get( "location" ); const response = await fetch( baseHost + redirectedTo ); const data = await response.json( ); - expect( data.cookies ).to.deep.equal( { foo: "bar" } ); + expect( data.cookies ).toEqual( { foo: "bar" } ); await disconnectAll( ); } ); @@ -158,7 +156,7 @@ describe( name, function( ) `${host}/relative-redirect/2`, { redirect: "follow" } ); - expect( response.url ).to.equal( `${host}/get` ); + expect( response.url ).toBe( `${host}/get` ); await response.text( ); await disconnectAll( ); @@ -168,7 +166,7 @@ describe( name, function( ) { const response = await fetch( `${host}/gzip` ); const data = await response.json( ); - expect( data ).to.deep.include( { gzipped: true, method: "GET" } ); + expect( data ).toMatchObject( { gzipped: true, method: "GET" } ); } ); } ); } ); diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index 9c235c9..c6ed1d6 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -1,9 +1,7 @@ import { defer, delay } from "already"; -import { expect } from "chai"; import { createHash } from "crypto"; import * as from2 from "from2"; import getStream from "get-stream"; -import "mocha"; import * as through2 from "through2"; import { TestData } from "../lib/server-common"; @@ -79,13 +77,13 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const res = await response.json( ); if ( version === "http1" ) - expect( res[ "http1-path" ] ).to.equal( "/headers" ); + expect( res[ "http1-path" ] ).toBe( "/headers" ); else - expect( res[ ":path" ] ).to.equal( "/headers" ); + expect( res[ ":path" ] ).toBe( "/headers" ); const versionNumber = parseInt( version.substr( version.length - 1 ), 10 ); - expect( response.httpVersion ).to.equal( versionNumber ); + expect( response.httpVersion ).toBe( versionNumber ); await server.shutdown( ); } ); @@ -114,7 +112,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const res = await response.json( ); for ( const [ key, val ] of Object.entries( headers ) ) - expect( res[ key.toLowerCase( ) ] ).to.equal( val ); + expect( res[ key.toLowerCase( ) ] ).toBe( val ); await server.shutdown( ); } ); @@ -136,8 +134,8 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const data = await response.json( ); const { headers } = response; - expect( headers.get( "Content-Type" ) ).to.equal( "application/json" ); - expect( data ).to.deep.equal( json ); + expect( headers.get( "Content-Type" ) ).toBe( "application/json" ); + expect( data ).toEqual( json ); await server.shutdown( ); } ); @@ -166,7 +164,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const res = await response.json( ); for ( const [ key, val ] of Object.entries( headers ) ) - expect( res[ key ] ).to.equal( `${val}` ); + expect( res[ key ] ).toBe( `${val}` ); await server.shutdown( ); } ); @@ -197,7 +195,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const response = ensureStatusSuccess( await eventualResponse ); const data = await response.text( ); - expect( data ).to.equal( "foobar" ); + expect( data ).toBe( "foobar" ); await server.shutdown( ); } ); @@ -226,7 +224,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const response = ensureStatusSuccess( await eventualResponse ); const data = await response.text( ); - expect( data ).to.equal( "foobar" ); + expect( data ).toBe( "foobar" ); await server.shutdown( ); } ); @@ -246,7 +244,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const err = await getRejection( eventualResponse ); - expect( err.message ).to.contain( "Cannot specify both" ); + expect( err.message ).toContain( "Cannot specify both" ); await server.shutdown( ); } ); @@ -268,8 +266,8 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const data = await response.json( ); const { headers } = response; - expect( headers.get( "content-type" ) ).to.equal( "application/json" ); - expect( data ).to.deep.equal( json ); + expect( headers.get( "content-type" ) ).toBe( "application/json" ); + expect( data ).toEqual( json ); await server.shutdown( ); } ); @@ -290,7 +288,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const data = await response.text( ); - expect( data ).to.deep.equal( body ); + expect( data ).toEqual( body ); await server.shutdown( ); } ); @@ -311,7 +309,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const data = await response.arrayBuffer( ); - expect( Buffer.compare( Buffer.from( data ), body ) ).to.equal( 0 ); + expect( Buffer.compare( Buffer.from( data ), body ) ).toBe( 0 ); await server.shutdown( ); } ); @@ -336,7 +334,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const data = await response.text( ); - expect( data ).to.equal( "foobar" ); + expect( data ).toBe( "foobar" ); await server.shutdown( ); } ); @@ -362,20 +360,20 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const data = await response.text( ); const receivedTrailers = await deferredTrailers.promise; - expect( data ).to.contain( "trailers will be sent" ); + expect( data ).toContain( "trailers will be sent" ); Object.keys( trailers ) .forEach( key => { - expect( receivedTrailers.get( key ) ).to.equal( trailers[ key ] ); + expect( receivedTrailers.get( key ) ).toBe( trailers[ key ] ); } ); await server.shutdown( ); } ); - it( "should timeout on a slow request", async function( ) + it( "should timeout on a slow request", async ( ) => { - this.timeout( 500 ); + jest.setTimeout( 500 ); const { server, port } = await makeServer( ); @@ -389,7 +387,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const err = await getRejection( eventualResponse ); - expect( err.message ).to.contain( "timed out" ); + expect( err.message ).toContain( "timed out" ); await server.shutdown( ); } ); @@ -406,15 +404,15 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => } ); - expect( response.status ).to.equal( 200 ); + expect( response.status ).toBe( 200 ); await server.shutdown( ); } ); it( "should be able to POST large (16MiB) stream with known length", - async function( ) + async ( ) => { - this.timeout( 2000 ); + jest.setTimeout( 2000 ); const { server, port } = await makeServer( ); @@ -454,15 +452,15 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const response = ensureStatusSuccess( await eventualResponse ); const data = await response.text( ); - expect( data ).to.equal( referenceHash ); + expect( data ).toBe( referenceHash ); await server.shutdown( ); } ); it( "should be able to POST large (16MiB) stream with unknown length", - async function( ) + async ( ) => { - this.timeout( 2000 ); + jest.setTimeout( 2000 ); const { server, port } = await makeServer( ); @@ -500,7 +498,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const response = ensureStatusSuccess( await eventualResponse ); const data = await response.text( ); - expect( data ).to.equal( referenceHash ); + expect( data ).toBe( referenceHash ); await server.shutdown( ); } ); @@ -537,12 +535,12 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const responseText = await response.text( ); - expect( responseText ).to.equal( "push-route" ); + expect( responseText ).toBe( "push-route" ); const pushedResponse = await onPushPromise; const pushedData = await pushedResponse.json( ); - expect( pushedData ).to.deep.equal( data ); + expect( pushedData ).toEqual( data ); onPush( ); @@ -568,9 +566,9 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const responseData = await response.json( ); if ( version === "http2" ) - expect( responseData[ ":authority" ] ).to.equal( host ); + expect( responseData[ ":authority" ] ).toBe( host ); else - expect( responseData.host ).to.equal( host ); + expect( responseData.host ).toBe( host ); await server.shutdown( ); } ); @@ -585,7 +583,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => const responseData = await response.json( ); - expect( responseData[ "accept-encoding" ] ).to.contain( "gzip" ); + expect( responseData[ "accept-encoding" ] ).toContain( "gzip" ); await server.shutdown( ); } ); @@ -606,14 +604,13 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => ) ); - expect( response.headers.get( "content-encoding" ) ) - .to.equal( "gzip" ); + expect( response.headers.get( "content-encoding" ) ).toBe( "gzip" ); const stream = await response.readable( ); const data = await getStream.buffer( stream ); - expect( JSON.parse( data.toString( ) ) ).to.deep.equal( testData ); + expect( JSON.parse( data.toString( ) ) ).toEqual( testData ); await server.shutdown( ); } ); @@ -634,14 +631,13 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => ) ); - expect( response.headers.get( "content-encoding" ) ) - .to.equal( "deflate" ); + expect( response.headers.get( "content-encoding" ) ).toBe( "deflate" ); const stream = await response.readable( ); const data = await getStream.buffer( stream ); - expect( JSON.parse( data.toString( ) ) ).to.deep.equal( testData ); + expect( JSON.parse( data.toString( ) ) ).toEqual( testData ); await server.shutdown( ); } ); @@ -665,13 +661,13 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => ) ); - expect( response.headers.get( "content-encoding" ) ).to.equal( "br" ); + expect( response.headers.get( "content-encoding" ) ).toBe( "br" ); const stream = await response.readable( ); const data = await getStream.buffer( stream ); - expect( JSON.parse( data.toString( ) ) ).to.deep.equal( testData ); + expect( JSON.parse( data.toString( ) ) ).toEqual( testData ); await server.shutdown( ); } ); @@ -687,7 +683,7 @@ describe( `response (${proto})`, ( ) => const response = ensureStatusSuccess( await fetch( url ) ); - expect( response.url ).to.equal( cleanUrl( url ) ); + expect( response.url ).toBe( cleanUrl( url ) ); await disconnectAll( ); await server.shutdown( ); @@ -706,10 +702,10 @@ describe( `goaway (${proto})`, ( ) => const url2 = `${proto}//localhost:${port}/headers`; const response1 = ensureStatusSuccess( await fetch( url1 ) ); - expect( response1.url ).to.equal( cleanUrl( url1 ) ); + expect( response1.url ).toBe( cleanUrl( url1 ) ); const response2 = ensureStatusSuccess( await fetch( url2 ) ); - expect( response2.url ).to.equal( cleanUrl( url2 ) ); + expect( response2.url ).toBe( cleanUrl( url2 ) ); await response1.text( ); await response2.text( ); @@ -726,12 +722,12 @@ describe( `goaway (${proto})`, ( ) => const url2 = `${proto}//localhost:${port}/headers`; const response1 = ensureStatusSuccess( await fetch( url1 ) ); - expect( response1.url ).to.equal( cleanUrl( url1 ) ); + expect( response1.url ).toBe( cleanUrl( url1 ) ); await delay(20); const response2 = ensureStatusSuccess( await fetch( url2 ) ); - expect( response2.url ).to.equal( cleanUrl( url2 ) ); + expect( response2.url ).toBe( cleanUrl( url2 ) ); await response1.text( ); await response2.text( ); @@ -748,12 +744,12 @@ describe( `goaway (${proto})`, ( ) => const url2 = `${proto}//localhost:${port}/slow/50`; const response1 = ensureStatusSuccess( await fetch( url1 ) ); - expect( response1.url ).to.equal( cleanUrl( url1 ) ); + expect( response1.url ).toBe( cleanUrl( url1 ) ); await delay( 10 ); const response2 = ensureStatusSuccess( await fetch( url2 ) ); - expect( response2.url ).to.equal( cleanUrl( url2 ) ); + expect( response2.url ).toBe( cleanUrl( url2 ) ); await delay( 10 ); @@ -761,8 +757,8 @@ describe( `goaway (${proto})`, ( ) => const text1 = await response1.text( true ); const text2 = await response2.text( true ); - expect( text1 ).to.equal( "abcde" ); - expect( text2 ).to.equal( "abcde" ); + expect( text1 ).toBe( "abcde" ); + expect( text2 ).toBe( "abcde" ); await server.shutdown( ); } ); @@ -780,9 +776,9 @@ describe( `integrity (${proto})`, ( ) => const integrity = createIntegrity( data ); const response = ensureStatusSuccess( await fetch( url, { integrity } ) ); - expect( response.url ).to.equal( cleanUrl( url ) ); + expect( response.url ).toBe( cleanUrl( url ) ); - expect( await response.text( ) ).to.equal( data ); + expect( await response.text( ) ).toBe( data ); await disconnectAll( ); await server.shutdown( ); @@ -798,16 +794,16 @@ describe( `integrity (${proto})`, ( ) => const integrity = createIntegrity( data ); const response = ensureStatusSuccess( await fetch( url, { integrity } ) ); - expect( response.url ).to.equal( cleanUrl( url ) ); + expect( response.url ).toBe( cleanUrl( url ) ); try { await response.text( ); - expect( false ).to.equal( true ); + expect( false ).toBe( true ); } catch ( err ) { - expect( err.message ).to.contain( "integrity" ); + expect( err.message ).toContain( "integrity" ); } await disconnectAll( ); @@ -826,7 +822,7 @@ describe( `premature stream close (${proto})`, ( ) => try { await fetch( url ); - expect( false ).to.equal( true ); + expect( false ).toBe( true ); } catch ( err ) { @@ -834,7 +830,7 @@ describe( `premature stream close (${proto})`, ( ) => version === "http1" ? "socket hang up" : "Stream prematurely closed"; - expect( err.message ).to.contain( expected ); + expect( err.message ).toContain( expected ); } await disconnectAll( ); diff --git a/test/lib/server-helpers.ts b/test/lib/server-helpers.ts index 3d08244..99e517a 100644 --- a/test/lib/server-helpers.ts +++ b/test/lib/server-helpers.ts @@ -1,4 +1,5 @@ import { readFileSync } from "fs"; +import * as path from "path"; import { ServerOptions, @@ -12,8 +13,8 @@ import { } from "./server-http2"; -const key = readFileSync( __dirname + "/../../../certs/key.pem" ); -const cert = readFileSync( __dirname + "/../../../certs/cert.pem" ); +const key = readFileSync( path.join( process.cwd(), "certs", "key.pem" ) ); +const cert = readFileSync( path.join( process.cwd(), "certs", "cert.pem" ) ); export function makeMakeServer( { proto, version }: TestData ) { diff --git a/test/mocha.opts b/test/mocha.opts deleted file mode 100644 index 94a484c..0000000 --- a/test/mocha.opts +++ /dev/null @@ -1,3 +0,0 @@ ---recursive ---timeout 200 ---require source-map-support/register From b7aac98fd4877c8258602b89cb7abc6dcadbefeb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 20:56:33 +0100 Subject: [PATCH 25/77] ci(coverage): Fixed coveralls upload from travis --- .travis.yml | 3 +-- package.json | 3 ++- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4e0ceb3..9889b87 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,9 +10,8 @@ node_js: install: - npm install - npm run build -after_script: - - "node_modules/.bin/nyc report --reporter=text-lcov | node_modules/.bin/coveralls" after_success: + - npm run coveralls - npm run travis-deploy-once "npm run semantic-release" branches: except: diff --git a/package.json b/package.json index 9700c37..6050473 100644 --- a/package.json +++ b/package.json @@ -22,10 +22,11 @@ "lint": "node_modules/.bin/tslint --project .", "jest": "node_modules/.bin/jest", "jest:debug": "node --inspect-brk node_modules/.bin/jest", - "test": "npm run lint && npm run jest --coverage", + "test": "npm run lint && node_modules/.bin/jest --coverage", "buildtest": "npm run build && npm run jest", "buildtestcov": "npm run build && npm run test", "coverage": "node_modules/.bin/nyc report --reporter=html", + "coveralls": "cat coverage/lcov.info | node_modules/.bin/coveralls", "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run buildtest && scripts/version-git-add.sh", "prepack": "npm run build && npm run test", "makecerts": "openssl req -x509 -nodes -days 7300 -newkey rsa:2048 -keyout certs/key.pem -out certs/cert.pem", From 18c3f6a927f7f1e6f3206dfa545c8832a77947f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 21:42:21 +0100 Subject: [PATCH 26/77] test(jest): Test speedup by concurrent slow tests --- test/fetch-h2/httpbin.ts | 50 +++++++++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 19 deletions(-) diff --git a/test/fetch-h2/httpbin.ts b/test/fetch-h2/httpbin.ts index 6735550..e4ec943 100644 --- a/test/fetch-h2/httpbin.ts +++ b/test/fetch-h2/httpbin.ts @@ -1,11 +1,12 @@ import { URL } from "url"; -import { delay } from "already"; +import { delay, Finally } from "already"; import * as through2 from "through2"; import { context, DataBody, + fetch as fetchType, HttpProtocols, JsonBody, StreamBody, @@ -35,20 +36,27 @@ describe( name, ( ) => { jest.setTimeout( 5000 ); - const { fetch, disconnectAll } = context( { - httpsProtocols: protos, - } ); + function wrapContext( fn: ( fetch: typeof fetchType ) => Promise< void > ) + { + return async ( ) => + { + const { fetch, disconnectAll } = context( { + httpsProtocols: protos, + } ); - afterEach( disconnectAll ); + await fn( fetch ).then( ...Finally( disconnectAll ) ); + }; + } - it( "should be possible to GET", async ( ) => + it.concurrent( "should be possible to GET", wrapContext( async ( fetch ) => { const response = await fetch( `${host}/user-agent` ); const data = await response.json( ); expect( data[ "user-agent" ] ).toContain( "fetch-h2/" ); - } ); + } ) ); - it( "should be possible to POST JSON", async ( ) => + it.concurrent( "should be possible to POST JSON", wrapContext( + async ( fetch ) => { const testData = { foo: "bar" }; @@ -63,9 +71,10 @@ describe( name, ( ) => expect( testData ).toEqual( data.json ); // fetch-h2 should set content type for JsonBody expect( data.headers[ "Content-Type" ] ).toBe( "application/json" ); - } ); + } ) ); - it( "should be possible to POST buffer-data", async ( ) => + it.concurrent( "should be possible to POST buffer-data", wrapContext( + async ( fetch ) => { const testData = '{"foo": "data"}'; @@ -79,9 +88,10 @@ describe( name, ( ) => const data = await response.json( ); expect( data.data ).toBe( testData ); expect( data.headers ).not.toHaveProperty( "Content-Type" ); - } ); + } ) ); - it( "should be possible to POST already ended stream-data", async ( ) => + it.concurrent( "should be possible to POST already ended stream-data", + wrapContext( async ( fetch ) => { const stream = through2( ); @@ -101,9 +111,10 @@ describe( name, ( ) => const data = await response.json( ); expect( data.data ).toBe( "foobar" ); - } ); + } ) ); - it( "should be possible to POST not yet ended stream-data", async ( ) => + it.concurrent( "should be possible to POST not yet ended stream-data", + wrapContext( async ( fetch ) => { const stream = through2( ); @@ -127,9 +138,9 @@ describe( name, ( ) => const data = await response.json( ); expect( data.data ).toBe( "foobar" ); - } ); + } ) ); - it( "should save and forward cookies", async ( ) => + it.concurrent( "should save and forward cookies", async ( ) => { const { fetch, disconnectAll } = context( ); @@ -148,7 +159,7 @@ describe( name, ( ) => await disconnectAll( ); } ); - it( "should handle (and follow) relative paths", async ( ) => + it.concurrent( "should handle (and follow) relative paths", async ( ) => { const { fetch, disconnectAll } = context( ); @@ -162,11 +173,12 @@ describe( name, ( ) => await disconnectAll( ); } ); - it( "should be possible to GET gzip data", async ( ) => + it.concurrent( "should be possible to GET gzip data", wrapContext( + async ( fetch ) => { const response = await fetch( `${host}/gzip` ); const data = await response.json( ); expect( data ).toMatchObject( { gzipped: true, method: "GET" } ); - } ); + } ) ); } ); } ); From a2cbd690e7da695e575c713d9a443ffda8daa3f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 22:49:35 +0100 Subject: [PATCH 27/77] test(test): Renamed meta-created tests --- test/fetch-h2/index.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index c6ed1d6..1d81fb0 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -58,11 +58,13 @@ const { disconnectAll, fetch, onPush } = ? { disconnectAll: _disconnectAll, fetch: _fetch, onPush: _onPush } : context( { ...cycleOpts } ); +const protoVersion = `${version} over ${proto.replace( ":", "" )}`; + describe( "basic", ( ) => { afterEach( disconnectAll ); -describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => +describe( `generic (${protoVersion})`, ( ) => { it( "should be able to perform simple GET", async ( ) => { @@ -673,7 +675,7 @@ describe( `(${version} over ${proto.replace( ":", "" )})`, ( ) => } ); } ); -describe( `response (${proto})`, ( ) => +describe( `response (${protoVersion})`, ( ) => { it( "should have a proper url", async ( ) => { @@ -691,7 +693,7 @@ describe( `response (${proto})`, ( ) => } ); if ( version === "http2" ) -describe( `goaway (${proto})`, ( ) => +describe( `goaway (${protoVersion})`, ( ) => { if ( proto === "http:" ) // This race is too fast for TLS it( "handle session failover (race conditioned)", async ( ) => @@ -764,7 +766,7 @@ describe( `goaway (${proto})`, ( ) => } ); } ); -describe( `integrity (${proto})`, ( ) => +describe( `integrity (${protoVersion})`, ( ) => { it( "handle and succeed on valid integrity", async ( ) => { @@ -811,7 +813,7 @@ describe( `integrity (${proto})`, ( ) => } ); } ); -describe( `premature stream close (${proto})`, ( ) => +describe( `premature stream close (${protoVersion})`, ( ) => { it( "handle and reject fetch operation", async ( ) => { From 55ea6dacd840a02a61da92a1e4c80606a340e204 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 22:51:03 +0100 Subject: [PATCH 28/77] fix(http1): Harder disconnect (destroy rather than end) --- lib/context-http1.ts | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/context-http1.ts b/lib/context-http1.ts index d1237fc..2dde350 100644 --- a/lib/context-http1.ts +++ b/lib/context-http1.ts @@ -178,9 +178,7 @@ class OriginPool private async disconnectSocket( socket: Socket ) { - await new Promise< void >( ( resolve ) => - socket.end( Buffer.from( [ ] ), ( ) => resolve ) - ); + socket.destroy( ); } private makeCleaner( socket: Socket ) From 94a983cbb2859a775d97e8164e465f63a0d05234 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 22:51:46 +0100 Subject: [PATCH 29/77] fix(http1): Fixed erroneous socket-reuse logic --- lib/context-http1.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/context-http1.ts b/lib/context-http1.ts index 2dde350..2fb6528 100644 --- a/lib/context-http1.ts +++ b/lib/context-http1.ts @@ -202,7 +202,7 @@ class OriginPool this.usedSockets.delete( socket ); - if ( this.maxFreeSockets >= this.unusedSockets.size + 1 ) + if ( this.maxFreeSockets < this.unusedSockets.size + 1 ) { await this.disconnectSocket( socket ); return; From c1fcbe2f6acbdcc360676c005e4c60e128726d37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 12 Mar 2019 22:53:45 +0100 Subject: [PATCH 30/77] feat(http1): Changed keep-alive to default to true Fixes #33 --- README.md | 2 +- lib/context-http1.ts | 12 ++++++-- test/fetch-h2/http1.ts | 65 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 test/fetch-h2/http1.ts diff --git a/README.md b/README.md index 1354625..76745c6 100644 --- a/README.md +++ b/README.md @@ -308,7 +308,7 @@ The `http1` options object can be used to configure this. #### Keep-alive -`http1.keepAlive` defaults to false, but can be set to true, to allow connections to linger so that they can be reused. The `http1.keepAliveMsecs` time (defaults to 1000ms, i.e. 1s) specifies the delay before keep-alive probing. +`http1.keepAlive` defaults to true, to allow connections to linger so that they can be reused. The `http1.keepAliveMsecs` time (defaults to 1000ms, i.e. 1s) specifies the delay before keep-alive probing. #### Sockets diff --git a/lib/context-http1.ts b/lib/context-http1.ts index 2fb6528..dabe448 100644 --- a/lib/context-http1.ts +++ b/lib/context-http1.ts @@ -223,9 +223,10 @@ class OriginPool class ContextPool { + public readonly keepAlive: boolean | PerOrigin< boolean >; + private pools = new Map< string, OriginPool >( ); - private keepAlive: boolean | PerOrigin< boolean >; private keepAliveMsecs: number | PerOrigin< number >; private maxSockets: number | PerOrigin< number >; private maxFreeSockets: number | PerOrigin< number >; @@ -233,7 +234,7 @@ class ContextPool constructor( options: Partial< Http1Options > ) { - this.keepAlive = parsePerOrigin( options.keepAlive, false ); + this.keepAlive = parsePerOrigin( options.keepAlive, true ); this.keepAliveMsecs = parsePerOrigin( options.keepAliveMsecs, 1000 ); this.maxSockets = parsePerOrigin( options.maxSockets, 256 ); this.maxFreeSockets = parsePerOrigin( options.maxFreeSockets, Infinity ); @@ -345,6 +346,13 @@ export class H1Context ...auth, }; + if ( !options.headers ) + options.headers = { }; + + options.headers.connection = this.contextPool.keepAlive + ? "keep-alive" + : "close"; + return this.contextPool.getOriginPool( origin ).connect( options ); } diff --git a/test/fetch-h2/http1.ts b/test/fetch-h2/http1.ts new file mode 100644 index 0000000..e45d3c6 --- /dev/null +++ b/test/fetch-h2/http1.ts @@ -0,0 +1,65 @@ +import { makeMakeServer } from "../lib/server-helpers"; + +import { + context, + Response, +} from "../../"; + +function ensureStatusSuccess( response: Response ): Response +{ + if ( response.status < 200 || response.status >= 300 ) + throw new Error( "Status not 2xx" ); + return response; +} + + +describe( `http1`, ( ) => +{ + const { cycleOpts, makeServer } = + makeMakeServer( { proto: "http:", version: "http1" } ); + + describe( "keep-alive", ( ) => + { + it( "should not send 'connection: close' by default", async ( ) => + { + const { server, port } = await makeServer( ); + const { disconnectAll, fetch } = context( { ...cycleOpts } ); + + const response1 = ensureStatusSuccess( + await fetch( `http://localhost:${port}/headers` ) + ); + + const headers = await response1.json( ); + + expect( headers.connection ).not.toBe( "close" ); + + disconnectAll( ); + + await server.shutdown( ); + } ); + + it( "should send 'connection: close' if http1.keelAlive === false", + async ( ) => + { + const { server, port } = await makeServer( ); + const { disconnectAll, fetch } = context( { + ...cycleOpts, + http1: { + keepAlive: false, + }, + } ); + + const response1 = ensureStatusSuccess( + await fetch( `http://localhost:${port}/headers` ) + ); + + const headers = await response1.json( ); + + expect( headers.connection ).toBe( "close" ); + + disconnectAll( ); + + await server.shutdown( ); + } ); + } ); +} ); From 67878aad4deda65ef527535cfb80d439d9846e07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 19 Mar 2019 21:25:02 +0100 Subject: [PATCH 31/77] test(coverage): Fixed coverage to cover TypeScript not transpiled JavaScript --- jest.config.js | 5 +++-- test/fetch-h2/body.ts | 3 ++- test/fetch-h2/context.ts | 3 ++- test/fetch-h2/http1.ts | 11 ++--------- test/fetch-h2/httpbin.ts | 2 +- test/fetch-h2/index.ts | 11 ++--------- test/lib/server-common.ts | 2 +- test/lib/utils.ts | 12 ++++++++++++ 8 files changed, 25 insertions(+), 24 deletions(-) diff --git a/jest.config.js b/jest.config.js index 3e84f54..2436668 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,6 +1,7 @@ module.exports = { preset: 'ts-jest', testEnvironment: 'node', - testMatch: ['**/*.ts'], - roots: ['/test/fetch-h2/'], + testMatch: ['/test/fetch-h2/**/*.ts'], + collectCoverageFrom: ['/lib/**', 'index.ts'], + coverageReporters: ['lcov', 'text', 'html'], }; diff --git a/test/fetch-h2/body.ts b/test/fetch-h2/body.ts index c7a0bb7..8e9c644 100644 --- a/test/fetch-h2/body.ts +++ b/test/fetch-h2/body.ts @@ -9,7 +9,8 @@ import { DataBody, JsonBody, StreamBody, -} from "../../"; +} from "../../index"; + async function makeSync< T >( fn: ( ) => PromiseLike< T > ) : Promise< ( ) => T > diff --git a/test/fetch-h2/context.ts b/test/fetch-h2/context.ts index 9616b29..1412800 100644 --- a/test/fetch-h2/context.ts +++ b/test/fetch-h2/context.ts @@ -5,7 +5,8 @@ import { context, CookieJar, Response, -} from "../../"; +} from "../../index"; + function ensureStatusSuccess( response: Response ): Response { diff --git a/test/fetch-h2/http1.ts b/test/fetch-h2/http1.ts index e45d3c6..8f3ace9 100644 --- a/test/fetch-h2/http1.ts +++ b/test/fetch-h2/http1.ts @@ -2,15 +2,8 @@ import { makeMakeServer } from "../lib/server-helpers"; import { context, - Response, -} from "../../"; - -function ensureStatusSuccess( response: Response ): Response -{ - if ( response.status < 200 || response.status >= 300 ) - throw new Error( "Status not 2xx" ); - return response; -} +} from "../../index"; +import { ensureStatusSuccess } from "../lib/utils"; describe( `http1`, ( ) => diff --git a/test/fetch-h2/httpbin.ts b/test/fetch-h2/httpbin.ts index e4ec943..9b53d34 100644 --- a/test/fetch-h2/httpbin.ts +++ b/test/fetch-h2/httpbin.ts @@ -10,7 +10,7 @@ import { HttpProtocols, JsonBody, StreamBody, -} from "../../"; +} from "../../index"; interface TestData diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index 1d81fb0..79abaf5 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -6,7 +6,7 @@ import * as through2 from "through2"; import { TestData } from "../lib/server-common"; import { makeMakeServer } from "../lib/server-helpers"; -import { cleanUrl, createIntegrity } from "../lib/utils"; +import { cleanUrl, createIntegrity, ensureStatusSuccess } from "../lib/utils"; import { hasBuiltinBrotli } from "../../lib/utils"; @@ -19,7 +19,7 @@ import { onPush as _onPush, Response, StreamBody, -} from "../../"; +} from "../../index"; async function getRejection< T >( promise: Promise< T > ): Promise< Error > @@ -35,13 +35,6 @@ async function getRejection< T >( promise: Promise< T > ): Promise< Error > throw new Error( "Expected exception" ); } -function ensureStatusSuccess( response: Response ): Response -{ - if ( response.status < 200 || response.status >= 300 ) - throw new Error( "Status not 2xx" ); - return response; -} - ( [ { proto: "http:", version: "http1" }, diff --git a/test/lib/server-common.ts b/test/lib/server-common.ts index 19b7455..3680cdd 100644 --- a/test/lib/server-common.ts +++ b/test/lib/server-common.ts @@ -11,7 +11,7 @@ import { Server as HttpsServer, } from "https"; -import { HttpProtocols } from "../../"; +import { HttpProtocols } from "../../index"; export interface TestData diff --git a/test/lib/utils.ts b/test/lib/utils.ts index a34eeec..2416e45 100644 --- a/test/lib/utils.ts +++ b/test/lib/utils.ts @@ -1,5 +1,10 @@ import { createHash } from "crypto"; +import { + Response, +} from "../../index"; + + export function createIntegrity( data: string, hashType = "sha256" ) { const hash = createHash( hashType ); @@ -9,3 +14,10 @@ export function createIntegrity( data: string, hashType = "sha256" ) export const cleanUrl = ( url: string ) => url.replace( /^http[12]:\/\//, "http://" ); + +export function ensureStatusSuccess( response: Response ): Response +{ + if ( response.status < 200 || response.status >= 300 ) + throw new Error( "Status not 2xx" ); + return response; +} From c83823cc4984c64036ca6f767fc1642bd2d228b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 19 Mar 2019 21:28:03 +0100 Subject: [PATCH 32/77] feat(abort): Added AbortController support Fixes #35 --- README.md | 7 +- index.ts | 3 + lib/abort.ts | 39 +++++++++++ lib/body.ts | 21 +++++- lib/context-http2.ts | 1 + lib/core.ts | 9 +-- lib/fetch-common.ts | 10 +-- lib/fetch-http1.ts | 31 +++++++-- lib/fetch-http2.ts | 30 ++++++-- lib/response.ts | 13 +++- test/fetch-h2/abort.ts | 143 +++++++++++++++++++++++++++++++++++++++ test/lib/server-http1.ts | 19 ++++++ test/lib/server-http2.ts | 35 +++++++++- 13 files changed, 336 insertions(+), 25 deletions(-) create mode 100644 lib/abort.ts create mode 100644 test/fetch-h2/abort.ts diff --git a/README.md b/README.md index 76745c6..30f9ebb 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,7 @@ import { Request, Response, AbortError, + AbortController, TimeoutError, ContextOptions, @@ -67,7 +68,11 @@ import { } from 'fetch-h2' ``` -Apart from the obvious `fetch`, the functions `setup`, `context`, `disconnect`, `disconnectAll` and `onPush` are described below, and the classes [`Body`](https://developer.mozilla.org/docs/Web/API/Body), [`Headers`](https://developer.mozilla.org/docs/Web/API/Headers), [`Request`](https://developer.mozilla.org/docs/Web/API/Request) and [`Response`](https://developer.mozilla.org/docs/Web/API/Response) are part of the [Fetch API](https://developer.mozilla.org/docs/Web/API/Fetch_API). `AbortError` is the error thrown in case of an [abort signal](https://developer.mozilla.org/docs/Web/API/AbortSignal) (this is also the error thrown in case of a *timeout*, which in `fetch-h2` is internally implemented as an abort signal), `TimeoutError` is thrown if the request times out. +Apart from the obvious `fetch`, the functions `setup`, `context`, `disconnect`, `disconnectAll` and `onPush` are described below, and the classes [`Body`](https://developer.mozilla.org/docs/Web/API/Body), [`Headers`](https://developer.mozilla.org/docs/Web/API/Headers), [`Request`](https://developer.mozilla.org/docs/Web/API/Request) and [`Response`](https://developer.mozilla.org/docs/Web/API/Response) are part of the [Fetch API](https://developer.mozilla.org/docs/Web/API/Fetch_API). + +`AbortError` is the error thrown in case of an [abort signal](https://developer.mozilla.org/docs/Web/API/AbortSignal) (this is also the error thrown in case of a *timeout*, which in `fetch-h2` is internally implemented as an abort signal) and the [`AbortController`](https://developer.mozilla.org/docs/Web/API/AbortController) provides a way to abort requests. + +`TimeoutError` is thrown if the request times out. The `ContextOptions`, `DecodeFunction` and `Decoder` types are described below. diff --git a/index.ts b/index.ts index 69c942f..37c3133 100644 --- a/index.ts +++ b/index.ts @@ -1,3 +1,4 @@ +import { AbortController, AbortSignal } from "./lib/abort"; import { Body, DataBody, JsonBody, StreamBody } from "./lib/body"; import { Context, ContextOptions } from "./lib/context"; import { PushHandler } from "./lib/context-http2"; @@ -55,6 +56,8 @@ export { onPush, // Re-export + AbortController, + AbortSignal, HttpProtocols, Body, JsonBody, diff --git a/lib/abort.ts b/lib/abort.ts new file mode 100644 index 0000000..ad30b42 --- /dev/null +++ b/lib/abort.ts @@ -0,0 +1,39 @@ +import { EventEmitter } from "events"; + + +export const signalEvent = "internal-abort"; + +export interface AbortSignal extends EventEmitter +{ + readonly aborted: boolean; + onabort: ( ) => void; +} + +class AbortSignalImpl extends EventEmitter implements AbortSignal +{ + public aborted = false; + + constructor( ) + { + super( ); + + this.once( signalEvent, ( ) => + { + this.aborted = true; + this.emit( "abort" ); + this.onabort && this.onabort( ); + } ); + } + + public onabort = ( ) => { }; +} + +export class AbortController +{ + public readonly signal: AbortSignal = new AbortSignalImpl( ); + + public abort = ( ) => + { + this.signal.emit( signalEvent ); + } +} diff --git a/lib/body.ts b/lib/body.ts index 4005865..808ecef 100644 --- a/lib/body.ts +++ b/lib/body.ts @@ -5,7 +5,8 @@ import getStream from "get-stream"; import * as through2 from "through2"; import * as toArrayBuffer from "to-arraybuffer"; -import { BodyTypes, IBody, StorageBodyTypes } from "./core"; +import { AbortSignal } from "./abort"; +import { AbortError, BodyTypes, IBody, StorageBodyTypes } from "./core"; function makeUnknownDataError( ) @@ -47,6 +48,7 @@ export class Body implements IBody private _body?: StorageBodyTypes | null; private _used: boolean; private _integrity?: string; + private _signal?: AbortSignal; constructor( ) { @@ -64,6 +66,7 @@ export class Body implements IBody public async arrayBuffer( allowIncomplete = false ): Promise< ArrayBuffer > { this._ensureUnused( ); + this._ensureNotAborted( ); if ( this._body == null ) return this.validateIntegrity( emptyBuffer, allowIncomplete ); @@ -93,6 +96,7 @@ export class Body implements IBody public async json( ): Promise< any > { this._ensureUnused( ); + this._ensureNotAborted( ); if ( this._body == null ) return Promise.resolve( @@ -118,6 +122,7 @@ export class Body implements IBody public async text( allowIncomplete = false ): Promise< string > { this._ensureUnused( ); + this._ensureNotAborted( ); if ( this._body == null ) return Promise.resolve( @@ -143,6 +148,7 @@ export class Body implements IBody public async readable( ): Promise< NodeJS.ReadableStream > { this._ensureUnused( ); + this._ensureNotAborted( ); if ( this._body == null ) { @@ -164,6 +170,11 @@ export class Body implements IBody throw makeUnknownDataError( ); } + protected setSignal( signal: AbortSignal | undefined ) + { + this._signal = signal; + } + protected hasBody( ): boolean { return "_body" in this; @@ -210,6 +221,8 @@ export class Body implements IBody ) : T { + this._ensureNotAborted( ); + if ( !allowIncomplete && this._length != null && @@ -242,6 +255,12 @@ export class Body implements IBody return data; } + private _ensureNotAborted( ) + { + if ( this._signal && this._signal.aborted ) + throw new AbortError( "Response aborted" ); + } + private _ensureUnused( ) { if ( this._used ) diff --git a/lib/context-http2.ts b/lib/context-http2.ts index e5aea6a..ef387b5 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -359,6 +359,7 @@ export class H2Context responseHeaders, false, { }, + void 0, 2, false ); diff --git a/lib/core.ts b/lib/core.ts index 4256662..eac6541 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -1,6 +1,7 @@ import { ClientRequest } from "http"; import { ClientHttp2Session } from "http2"; +import { AbortSignal } from "./abort"; import { CookieJar } from "./cookie-jar"; import { Headers, RawHeaders } from "./headers"; @@ -103,12 +104,6 @@ export interface IBody readable( ): Promise< NodeJS.ReadableStream >; } -export interface Signal -{ - readonly aborted: boolean; - onabort: ( ) => void; -} - export interface RequestInitWithoutBody { method: Method; @@ -138,7 +133,7 @@ export type OnTrailers = ( headers: Headers ) => void; export interface FetchInit extends RequestInit { - signal: Signal; + signal: AbortSignal; // This is a helper (just like node-fetch), not part of the Fetch API. // Must not be used if signal is used. diff --git a/lib/fetch-common.ts b/lib/fetch-common.ts index 0e8f02e..dabc2a1 100644 --- a/lib/fetch-common.ts +++ b/lib/fetch-common.ts @@ -247,15 +247,17 @@ export async function setupFetch( if ( signal && signal.aborted ) throw abortError( ); + let abortHandler: ( ( ) => void ) | undefined; + const signalPromise: Promise< Response > | null = signal ? new Promise< Response >( ( _resolve, reject ) => { - signal.onabort = ( ) => + signal.once( "abort", abortHandler = ( ) => { reject( abortError( ) ); - }; + } ); } ) : null; @@ -264,8 +266,8 @@ export async function setupFetch( if ( timeoutInfo && timeoutInfo.clear ) timeoutInfo.clear( ); - if ( signal ) - delete signal.onabort; + if ( signal && abortHandler ) + signal.removeListener( "abort", abortHandler ); } return { diff --git a/lib/fetch-http1.ts b/lib/fetch-http1.ts index 7fdcd91..afa9801 100644 --- a/lib/fetch-http1.ts +++ b/lib/fetch-http1.ts @@ -4,6 +4,7 @@ import { Socket } from "net"; import { syncGuard } from "callguard"; +import { AbortController } from "./abort"; import { FetchInit, SimpleSessionHttp1, @@ -113,11 +114,32 @@ export async function fetchImpl( { res.once( "end", socketCleanup ); - if ( signal && signal.aborted ) + const { + signal: bodySignal = void 0, + abort: bodyAbort = void 0, + } = signal ? new AbortController( ) : { }; + + if ( signal ) { - // No reason to continue, the request is aborted - req.abort( ); - return; + const abortHandler = ( ) => + { + ( < ( ) => void >bodyAbort )( ); + req.abort( ); + res.destroy( ); + }; + + if ( signal.aborted ) + { + // No reason to continue, the request is aborted + abortHandler( ); + return; + } + + signal.once( "abort", abortHandler ); + res.once( "end", ( ) => + { + signal.removeListener( "abort", abortHandler ); + } ); } const { headers, statusCode } = res; @@ -186,6 +208,7 @@ export async function fetchImpl( status: res.statusCode, statusText: res.statusMessage, }, + bodySignal, 1, input.allowForbiddenHeaders, integrity diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index 3b7621d..bde2f95 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -5,6 +5,7 @@ import { import { syncGuard } from "callguard"; +import { AbortController } from "./abort"; import { AbortError, FetchInit, @@ -226,11 +227,31 @@ async function fetchImpl( stream.on( "response", guard( ( headers: IncomingHttp2Headers ) => { - if ( signal && signal.aborted ) + const { + signal: bodySignal = void 0, + abort: bodyAbort = void 0, + } = signal ? new AbortController( ) : { }; + + if ( signal ) { - // No reason to continue, the request is aborted - stream.destroy( ); - return; + const abortHandler = ( ) => + { + ( < ( ) => void >bodyAbort )( ); + stream.destroy( ); + }; + + if ( signal.aborted ) + { + // No reason to continue, the request is aborted + abortHandler( ); + return; + } + + signal.once( "abort", abortHandler ); + stream.once( "close", ( ) => + { + signal.removeListener( "abort", abortHandler ); + } ); } const status = "" + headers[ HTTP2_HEADER_STATUS ]; @@ -269,6 +290,7 @@ async function fetchImpl( ? false : extra.redirected.length > 0, { }, + bodySignal, 2, input.allowForbiddenHeaders, integrity diff --git a/lib/response.ts b/lib/response.ts index ae37607..ebbdef1 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -26,6 +26,10 @@ import { ResponseTypes, } from "./core"; +import { + AbortSignal, +} from "./abort"; + import { hasBuiltinBrotli, } from "./utils"; @@ -50,6 +54,7 @@ interface Extra httpVersion: HttpVersion; redirected: boolean; integrity: string; + signal: AbortSignal; type: ResponseTypes; url: string; } @@ -96,6 +101,8 @@ export class Response extends Body const url = _extra.url || ""; const integrity = _extra.integrity || null; + this.setSignal( _extra.signal ); + if ( body ) { const contentType = headers.get( HTTP2_HEADER_CONTENT_TYPE ); @@ -240,13 +247,14 @@ function makeExtra( httpVersion: HttpVersion, url: string, redirected: boolean, + signal?: AbortSignal, integrity?: string ) : Partial< Extra > { const type = "basic"; // TODO: Implement CORS - return { httpVersion, redirected, integrity, type, url }; + return { httpVersion, redirected, integrity, signal, type, url }; } function handleEncoding( @@ -298,6 +306,7 @@ export class StreamResponse extends Response headers: IncomingHttpHeaders, redirected: boolean, init: Partial< ResponseInit >, + signal: AbortSignal | undefined, httpVersion: HttpVersion, allowForbiddenHeaders: boolean, integrity?: string @@ -318,7 +327,7 @@ export class StreamResponse extends Response : makeInitHttp2( headers, allowForbiddenHeaders ) ), }, - makeExtra( httpVersion, url, redirected, integrity ) + makeExtra( httpVersion, url, redirected, signal, integrity ) ); } } diff --git a/test/fetch-h2/abort.ts b/test/fetch-h2/abort.ts new file mode 100644 index 0000000..960914e --- /dev/null +++ b/test/fetch-h2/abort.ts @@ -0,0 +1,143 @@ +import { + AbortController, + AbortError, + fetch, +} from "../../index"; + +import { Server } from "../lib/server-common"; +import { makeServer as makeServerHttp1 } from "../lib/server-http1"; +import { makeServer as makeServerHttp2 } from "../lib/server-http2"; +import { ensureStatusSuccess } from "../lib/utils"; + +type Protocols = "http1" | "http2"; +const protos: Array< Protocols > = [ "http1", "http2" ]; + +async function makeServer( proto: Protocols ) +: Promise< { server: Server; port: number | null; } > +{ + if ( proto === "http1" ) + return makeServerHttp1( ); + else if ( proto === "http2" ) + return makeServerHttp2( ); + return < any >void 0; +} + +const testProtos = protos.map( proto => ( { + makeServer: ( ) => makeServer( proto ), + proto: proto === "http1" ? "http" : "http2", + version: proto, +} ) ); + +describe( "abort", ( ) => +{ + describe( "AbortController", ( ) => + { + it( "should create proper signal and trigger abort once", async ( ) => + { + const controller = new AbortController( ); + + const signal = controller.signal; + + const spy = jest.fn( ); + + signal.on( "abort", spy ); + + expect( signal.aborted ).toBe( false ); + controller.abort( ); + expect( signal.aborted ).toBe( true ); + controller.abort( ); + expect( signal.aborted ).toBe( true ); + + expect( spy.mock.calls.length ).toBe( 1 ); + } ); + + it( "should be destructable", async ( ) => + { + const { signal, abort } = new AbortController( ); + + const spy = jest.fn( ); + + signal.on( "abort", spy ); + + expect( signal.aborted ).toBe( false ); + abort( ); + expect( signal.aborted ).toBe( true ); + abort( ); + expect( signal.aborted ).toBe( true ); + + expect( spy.mock.calls.length ).toBe( 1 ); + } ); + + it( "signal.onaborted should trigger once", async ( ) => + { + const { signal, abort } = new AbortController( ); + + const spy = jest.fn( ); + + signal.onabort = spy; + + expect( signal.aborted ).toBe( false ); + abort( ); + expect( signal.aborted ).toBe( true ); + abort( ); + expect( signal.aborted ).toBe( true ); + + expect( spy.mock.calls.length ).toBe( 1 ); + } ); + } ); + + testProtos.forEach( ( { proto, makeServer, version } ) => + describe( `fetch (${version})`, ( ) => + { + it( "should handle pre-aborted", async ( ) => + { + const { signal, abort } = new AbortController( ); + + const { server, port } = await makeServer( ); + + abort( ); + + const awaitFetch = + fetch( `${proto}://localhost:${port}/delay/100`, { signal } ); + + await expect( awaitFetch ).rejects.toThrowError( AbortError ); + + await server.shutdown( ); + } ); + + it( "should handle abort on request", async ( ) => + { + const { signal, abort } = new AbortController( ); + + const { server, port } = await makeServer( ); + + setTimeout( abort, 20 ); + + const awaitFetch = + fetch( `${proto}://localhost:${port}/delay/100`, { signal } ); + + await expect( awaitFetch ).rejects.toThrowError( AbortError ); + + await server.shutdown( ); + } ); + + it( "should handle abort on body", async ( ) => + { + const { signal, abort } = new AbortController( ); + + const { server, port } = await makeServer( ); + + setTimeout( abort, 50 ); + + const response = ensureStatusSuccess( + await fetch( `${proto}://localhost:${port}/slow/100`, { signal } ) + ); + + const awaitBody = response.arrayBuffer( ); + + await expect( awaitBody ).rejects.toThrowError( AbortError ); + + await server.shutdown( ); + } ); + } ) ); +} ); diff --git a/test/lib/server-http1.ts b/test/lib/server-http1.ts index c2cf53b..8259cdd 100644 --- a/test/lib/server-http1.ts +++ b/test/lib/server-http1.ts @@ -243,6 +243,25 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > else request.pipe( response ); } + else if ( path.startsWith( "/delay/" ) ) + { + const waitMs = parseInt( path.replace( "/delay/", "" ), 10 ); + + if ( waitMs > 0 ) + await delay( waitMs ); + + const responseHeaders = { + ":status": 200, + [ HTTP2_HEADER_CONTENT_LENGTH ]: "10", + }; + + sendHeaders( responseHeaders ); + + response.write( "abcde" ); + + ignoreError( ( ) => response.write( "fghij" ) ); + ignoreError( ( ) => response.end( ) ); + } else if ( path.startsWith( "/slow/" ) ) { const waitMs = parseInt( path.replace( "/slow/", "" ), 10 ); diff --git a/test/lib/server-http2.ts b/test/lib/server-http2.ts index 1c2ac95..24da286 100644 --- a/test/lib/server-http2.ts +++ b/test/lib/server-http2.ts @@ -33,6 +33,7 @@ const { export class ServerHttp2 extends TypedServer< Http2Server > { private _sessions: Set< Http2Session >; + private _awaits: Array< Promise< any > > = [ ]; constructor( opts: ServerOptions ) { @@ -48,12 +49,21 @@ export class ServerHttp2 extends TypedServer< Http2Server > this._server.on( "stream", ( stream, headers ) => { - this.onStream( stream, headers ) + const awaitStream = this.onStream( stream, headers ) .catch( err => { - console.error( "Unit test server failed", err ); + console.error( "Unit test server failed", err.stack ); process.exit( 1 ); + } ) + .then( ( ) => + { + const index = this._awaits.findIndex( promise => + promise === awaitStream ); + if ( index !== -1 ) + this._awaits.splice( index, 1 ); } ); + + this._awaits.push( awaitStream ); } ); } @@ -63,6 +73,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > { session.destroy( ); } + await Promise.all( this._awaits ); this._sessions.clear( ); } @@ -274,6 +285,23 @@ export class ServerHttp2 extends TypedServer< Http2Server > ignoreError( ( ) => stream.write( "fghij" ) ); ignoreError( ( ) => stream.end( ) ); } + else if ( path.startsWith( "/delay/" ) ) + { + const waitMs = parseInt( path.replace( "/delay/", "" ), 10 ); + + if ( waitMs > 0 ) + await delay( waitMs ); + + const responseHeaders = { + ":status": 200, + [ HTTP2_HEADER_CONTENT_LENGTH ]: "10", + }; + + ignoreError( ( ) => stream.respond( responseHeaders ) ); + ignoreError( ( ) => stream.write( "abcde" ) ); + ignoreError( ( ) => stream.write( "fghij" ) ); + ignoreError( ( ) => stream.end( ) ); + } else if ( path.startsWith( "/slow/" ) ) { const waitMs = parseInt( path.replace( "/slow/", "" ), 10 ); @@ -308,6 +336,9 @@ export class ServerHttp2 extends TypedServer< Http2Server > stream.end( ); } } + + if ( !stream.closed ) + return new Promise( resolve => stream.once( "close", resolve ) ); } } From 89236f4d7b3e1c7616ea7bbb5ed59308832e6768 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Wed, 20 Mar 2019 08:26:05 +0100 Subject: [PATCH 33/77] build(jest): Force-exit tests (this is temporary) --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 6050473..de5e48d 100644 --- a/package.json +++ b/package.json @@ -22,12 +22,12 @@ "lint": "node_modules/.bin/tslint --project .", "jest": "node_modules/.bin/jest", "jest:debug": "node --inspect-brk node_modules/.bin/jest", - "test": "npm run lint && node_modules/.bin/jest --coverage", + "test": "npm run lint && node_modules/.bin/jest --forceExit --detectOpenHandles --coverage", "buildtest": "npm run build && npm run jest", "buildtestcov": "npm run build && npm run test", "coverage": "node_modules/.bin/nyc report --reporter=html", "coveralls": "cat coverage/lcov.info | node_modules/.bin/coveralls", - "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run buildtest && scripts/version-git-add.sh", + "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run build && npm run test && scripts/version-git-add.sh", "prepack": "npm run build && npm run test", "makecerts": "openssl req -x509 -nodes -days 7300 -newkey rsa:2048 -keyout certs/key.pem -out certs/cert.pem", "travis-deploy-once": "travis-deploy-once", From 6ecf5b0c5c406c300b4321f7755dcb443b900936 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 4 May 2019 11:45:33 +0200 Subject: [PATCH 34/77] test(deps): Bumped versions of dev dependencies --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index de5e48d..8607ce1 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,7 @@ "@types/execa": "0.x", "@types/from2": "2.x", "@types/jest": "24.x", - "@types/node": "11.x", + "@types/node": "12.x", "@types/through2": "2.x", "commitizen": "3.x", "coveralls": "3.x", @@ -60,7 +60,7 @@ "execa": "1.x", "from2": "2.x", "jest": "24.x", - "nyc": "13.x", + "nyc": "14.x", "rimraf": "2.x", "semantic-release": "15.x", "travis-deploy-once": "5.x", From 68c02a31c8366550a32961397d87c30d703af284 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 30 Sep 2019 09:28:09 +0200 Subject: [PATCH 35/77] build(deps): bumped dev deps --- package.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 8607ce1..5e6bd21 100644 --- a/package.json +++ b/package.json @@ -49,19 +49,19 @@ "typescript" ], "devDependencies": { - "@types/execa": "0.x", + "@types/execa": "^2.0.0", "@types/from2": "2.x", "@types/jest": "24.x", "@types/node": "12.x", "@types/through2": "2.x", - "commitizen": "3.x", + "commitizen": "^4.0.3", "coveralls": "3.x", - "cz-conventional-changelog": "2.x", - "execa": "1.x", + "cz-conventional-changelog": "^3.0.2", + "execa": "^2.0.4", "from2": "2.x", "jest": "24.x", "nyc": "14.x", - "rimraf": "2.x", + "rimraf": "^3.0.0", "semantic-release": "15.x", "travis-deploy-once": "5.x", "ts-jest": "24.x", From 1170be88eb68183e398e185c10adb9fde5628c6a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 30 Sep 2019 09:28:50 +0200 Subject: [PATCH 36/77] build(ts): fixed minor typing issues with latest TypeScript --- lib/core.ts | 2 +- lib/fetch-http2.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/core.ts b/lib/core.ts index eac6541..8268037 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -202,7 +202,7 @@ export function getByOrigin< T >( } export function parsePerOrigin< T >( - val: T | PerOrigin< T > | void, + val: T | PerOrigin< T > | undefined, _default: T ) : T | PerOrigin< T > diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index bde2f95..a790892 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -362,7 +362,7 @@ export function fetch( { const timeoutAt = void 0; - const raceConditionedGoaway = new Set( ); + const raceConditionedGoaway = new Set< string>( ); const extra = { timeoutAt, redirected: [ ], raceConditionedGoaway }; return fetchImpl( session, input, init, extra ); From c2dbbbd03e18c425d1169c04af33e1c9f3065d50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Mon, 7 Oct 2019 13:21:31 +0200 Subject: [PATCH 37/77] fix(core): fixed invalid default accept header (missing 'q=') Most web servers ignores invalid q-factors, but some are pedantic and reply with a 400. --- lib/context.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/context.ts b/lib/context.ts index c6999c1..ddfe69d 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -42,7 +42,7 @@ function makeDefaultUserAgent( ): string } const defaultUserAgent = makeDefaultUserAgent( ); -const defaultAccept = "application/json, text/*;0.9, */*;q=0.8"; +const defaultAccept = "application/json,text/*;q=0.9,*/*;q=0.8"; export interface ContextOptions { From b94058439166afa2f5b716e7c8d0a7b7497eb247 Mon Sep 17 00:00:00 2001 From: Stefan Guggisberg Date: Thu, 5 Dec 2019 21:05:34 +0100 Subject: [PATCH 38/77] fix(context-http2): don't guard headers received as part of a PUSH_PROMISE (#67) fix(context-http2): headers received as part of a PUSH_PROMISE should not be guarded Headers received as part of a PUSH_PROMISE should not be subject to forbidden headers check fix #66 --- lib/context-http2.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/context-http2.ts b/lib/context-http2.ts index ef387b5..5b9627b 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -331,7 +331,10 @@ export class H2Context .filter( name => name.charAt( 0 ) === ":" ) .forEach( name => { delete requestHeaders[ name ]; } ); - const pushedRequest = new Request( path, { headers: requestHeaders } ); + const pushedRequest = new Request( + path, + { headers: requestHeaders, allowForbiddenHeaders: true } + ); ref( ); From 0f3f677bf3f8c83895d53936b413bf0cfb78dc5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 5 May 2019 21:57:23 +0200 Subject: [PATCH 39/77] ci(node): Added Node.js 12 to travis --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 9889b87..df0bffc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,6 +7,7 @@ notifications: node_js: - "10" - "11" + - "12" install: - npm install - npm run build From 5a40852c8199d444987a0d53eaa2537302cc9507 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Wed, 11 Dec 2019 23:57:39 +0100 Subject: [PATCH 40/77] fix(test): Made unit test frameworks treat Headers as a Map Fix #69 --- lib/headers.ts | 5 +++++ test/fetch-h2/headers.ts | 40 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 test/fetch-h2/headers.ts diff --git a/lib/headers.ts b/lib/headers.ts index b19538d..0ebf80d 100644 --- a/lib/headers.ts +++ b/lib/headers.ts @@ -166,6 +166,11 @@ export class Headers } } + get [ Symbol.toStringTag ]( ) + { + return "Map"; // This causes unit test libraries to treat this as a Map + } + public append( name: string, value: string ): void { const _name = filterName( name ); diff --git a/test/fetch-h2/headers.ts b/test/fetch-h2/headers.ts new file mode 100644 index 0000000..90ec8c4 --- /dev/null +++ b/test/fetch-h2/headers.ts @@ -0,0 +1,40 @@ +import { Headers } from "../../index" +import { GuardedHeaders } from "../../lib/headers" + + +describe( "headers", ( ) => +{ + describe( "regular", ( ) => + { + it( "empty", async ( ) => + { + const headers = new Headers( ); + + expect( headers ).toMatchObject( new Map( ) ); + } ); + + it( "value", async ( ) => + { + const headers = new Headers( { a: "b" } ); + + expect( headers ).toMatchObject( new Map( [ [ "a", "b" ] ] ) ); + } ); + } ); + + describe( "gaurded", ( ) => + { + it( "empty", async ( ) => + { + const headers = new GuardedHeaders( "response" ); + + expect( headers ).toMatchObject( new Map( ) ); + } ); + + it( "value", async ( ) => + { + const headers = new GuardedHeaders( "response", { a: "b" } ); + + expect( headers ).toMatchObject( new Map( [ [ "a", "b" ] ] ) ); + } ); + } ); +} ); From 62be943097e61a58c20185ae5057a86da7846973 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 7 Jan 2020 12:46:58 +0100 Subject: [PATCH 41/77] fix(typings): export missing Method type --- index.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/index.ts b/index.ts index 37c3133..9560d4c 100644 --- a/index.ts +++ b/index.ts @@ -9,6 +9,7 @@ import { Decoder, FetchInit, HttpProtocols, + Method, OnTrailers, TimeoutError, } from "./lib/core"; @@ -73,4 +74,5 @@ export { DecodeFunction, Decoder, CookieJar, + Method, }; From 1c2a6aac975a898fcf0d29fc22b8cf38ff2eb3e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 7 Jan 2020 13:01:43 +0100 Subject: [PATCH 42/77] feat(headers): added [Symbol.iterator] to Headeres to make it easily iterable, like a Map re #69 --- lib/headers.ts | 5 +++++ test/fetch-h2/headers.ts | 46 ++++++++++++++++++++++++++++++++++------ 2 files changed, 44 insertions(+), 7 deletions(-) diff --git a/lib/headers.ts b/lib/headers.ts index 0ebf80d..7863a7b 100644 --- a/lib/headers.ts +++ b/lib/headers.ts @@ -171,6 +171,11 @@ export class Headers return "Map"; // This causes unit test libraries to treat this as a Map } + public [ Symbol.iterator ]( ) + { + return this.entries( ); + } + public append( name: string, value: string ): void { const _name = filterName( name ); diff --git a/test/fetch-h2/headers.ts b/test/fetch-h2/headers.ts index 90ec8c4..fd47ce6 100644 --- a/test/fetch-h2/headers.ts +++ b/test/fetch-h2/headers.ts @@ -1,7 +1,15 @@ -import { Headers } from "../../index" -import { GuardedHeaders } from "../../lib/headers" +import { Headers } from "../../index"; +import { GuardedHeaders } from "../../lib/headers"; +const toObject = ( keyvals: IterableIterator< [ string, string ] > ) => + [ ...keyvals ].reduce( + ( prev, cur ) => + Object.assign( prev, { [ cur[ 0 ] ]: cur[ 1 ] } ) + , + { } + ); + describe( "headers", ( ) => { describe( "regular", ( ) => @@ -10,31 +18,55 @@ describe( "headers", ( ) => { const headers = new Headers( ); - expect( headers ).toMatchObject( new Map( ) ); + expect( toObject( headers.entries( ) ) ).toMatchObject( { } ); } ); it( "value", async ( ) => { const headers = new Headers( { a: "b" } ); - expect( headers ).toMatchObject( new Map( [ [ "a", "b" ] ] ) ); + expect( toObject( headers.entries( ) ) ) + .toMatchObject( { a: "b" } ); } ); } ); - describe( "gaurded", ( ) => + describe( "guarded", ( ) => { it( "empty", async ( ) => { const headers = new GuardedHeaders( "response" ); - expect( headers ).toMatchObject( new Map( ) ); + expect( toObject( headers.entries( ) ) ).toMatchObject( { } ); } ); it( "value", async ( ) => { const headers = new GuardedHeaders( "response", { a: "b" } ); - expect( headers ).toMatchObject( new Map( [ [ "a", "b" ] ] ) ); + expect( toObject( headers.entries( ) ) ) + .toMatchObject( { a: "b" } ); + } ); + } ); + + describe( "iterable", ( ) => + { + it( "for-of iterable", async ( ) => + { + const headers = new GuardedHeaders( "response" ); + headers.append( "foo", "bar" ); + headers.append( "foo", "baz" ); + headers.append( "a", "b" ); + + const test: any = { }; + for ( const [ key, value ] of headers ) + { + test[ key ] = value; + } + + expect( test ).toMatchObject( { + a: "b", + foo: "bar,baz", + } ); } ); } ); } ); From 2c21a50052629fb6cf961d16f5368bc0ab425297 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 7 Jan 2020 13:30:59 +0100 Subject: [PATCH 43/77] ci(httpbin): increased test timeout for httpbin tests --- test/fetch-h2/httpbin.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/fetch-h2/httpbin.ts b/test/fetch-h2/httpbin.ts index 9b53d34..60b8117 100644 --- a/test/fetch-h2/httpbin.ts +++ b/test/fetch-h2/httpbin.ts @@ -34,7 +34,7 @@ const name = `${site} (${protos[ 0 ]} over ${protocol.replace( ":", "" )})`; describe( name, ( ) => { - jest.setTimeout( 5000 ); + jest.setTimeout( 10000 ); function wrapContext( fn: ( fetch: typeof fetchType ) => Promise< void > ) { From e877529170da731359e498e951a3a3baf8d74048 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 28 Jan 2020 18:06:26 +0100 Subject: [PATCH 44/77] fix(ts): lock transpilation to es2018 --- tsconfig.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tsconfig.json b/tsconfig.json index a3d457f..9fe7e19 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -5,7 +5,7 @@ "outDir": "dist", "sourceMap": true, "module": "CommonJS", - "target": "esnext", + "target": "es2018", "noUnusedParameters": true, "noUnusedLocals": true, "strict": true From baa5431f0498dcd93b37823a4cc6d1d3eca9476e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 28 Jan 2020 18:09:04 +0100 Subject: [PATCH 45/77] ci(travis): changed .travis.yml to use deploy pipeline (and skipped Node 11) --- .travis.yml | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index df0bffc..fcfa01e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,19 +1,20 @@ language: node_js -cache: - directories: - - ~/.npm notifications: email: false node_js: - "10" - - "11" - "12" -install: - - npm install - - npm run build -after_success: - - npm run coveralls - - npm run travis-deploy-once "npm run semantic-release" + - "13" +before_script: + - "npm run build" branches: except: - /^v\d+\.\d+\.\d+$/ +jobs: + include: + - stage: release + node_js: lts/* + script: + - npm run test + - cat coverage/lcov.info | npx coveralls + - npx semantic-release From c38ebc05f3a87ad3bcd5e1238b56b22b88e195b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Wed, 29 Jan 2020 00:07:40 +0100 Subject: [PATCH 46/77] fix(abort): fixed abort signal handling on body reading --- lib/body.ts | 42 ++++++++++++++++++++++++++++++++++++++---- test/fetch-h2/abort.ts | 4 ++-- 2 files changed, 40 insertions(+), 6 deletions(-) diff --git a/lib/body.ts b/lib/body.ts index 808ecef..825714a 100644 --- a/lib/body.ts +++ b/lib/body.ts @@ -9,6 +9,8 @@ import { AbortSignal } from "./abort"; import { AbortError, BodyTypes, IBody, StorageBodyTypes } from "./core"; +const abortError = new AbortError( "Response aborted" ); + function makeUnknownDataError( ) { return new Error( "Unknown body data" ); @@ -72,7 +74,7 @@ export class Body implements IBody return this.validateIntegrity( emptyBuffer, allowIncomplete ); else if ( isStream( this._body ) ) - return getStream.buffer( < NodeJS.ReadableStream >this._body ) + return this.awaitBuffer( < NodeJS.ReadableStream >this._body ) .then( buffer => this.validateIntegrity( buffer, allowIncomplete ) ) @@ -104,7 +106,7 @@ export class Body implements IBody ) .then( ( ) => this._body ); else if ( isStream( this._body ) ) - return getStream.buffer( < NodeJS.ReadableStream >this._body ) + return this.awaitBuffer( < NodeJS.ReadableStream >this._body ) .then( tap( buffer => < any >this.validateIntegrity( buffer, false ) ) ) @@ -130,7 +132,7 @@ export class Body implements IBody ) .then( ( ) => < string >< BodyTypes >this._body ); else if ( isStream( this._body ) ) - return getStream.buffer( < NodeJS.ReadableStream >this._body ) + return this.awaitBuffer( < NodeJS.ReadableStream >this._body ) .then( tap( buffer => < any >this.validateIntegrity( buffer, allowIncomplete ) ) ) @@ -215,6 +217,38 @@ export class Body implements IBody this._integrity = integrity; } + private async awaitBuffer( readable: NodeJS.ReadableStream ) + : Promise< Buffer > + { + if ( !this._signal ) + return getStream.buffer( readable ); + + // Race the readable against the abort signal + let callback: ( ) => void = ( ) => { }; + const onAborted = new Promise< Buffer >( ( _, reject ) => + { + callback = ( ) => { reject( abortError ); }; + this._signal?.addListener( 'abort', callback ); + } ); + + try + { + this._ensureNotAborted( ); + + return await Promise.race( [ + getStream.buffer( readable ), + onAborted, + ] ); + } + finally + { + this._signal.removeListener( 'abort', callback ); + // Could happen if abort and other error happen practically + // simultaneously. Ensure Node.js won't get mad about this. + onAborted.catch( ( ) => { } ); + } + } + private validateIntegrity< T extends Buffer | ArrayBuffer >( data: T, allowIncomplete: boolean @@ -258,7 +292,7 @@ export class Body implements IBody private _ensureNotAborted( ) { if ( this._signal && this._signal.aborted ) - throw new AbortError( "Response aborted" ); + throw abortError; } private _ensureUnused( ) diff --git a/test/fetch-h2/abort.ts b/test/fetch-h2/abort.ts index 960914e..5f068e2 100644 --- a/test/fetch-h2/abort.ts +++ b/test/fetch-h2/abort.ts @@ -127,10 +127,10 @@ describe( "abort", ( ) => const { server, port } = await makeServer( ); - setTimeout( abort, 50 ); + setTimeout( abort, 100 ); const response = ensureStatusSuccess( - await fetch( `${proto}://localhost:${port}/slow/100`, { signal } ) + await fetch( `${proto}://localhost:${port}/slow/200`, { signal } ) ); const awaitBody = response.arrayBuffer( ); From c486b49e217864e21b88d429331879a0f99b1959 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Wed, 29 Jan 2020 00:09:52 +0100 Subject: [PATCH 47/77] test(integration): use compd and docker httpbin for testing --- .travis.yml | 6 ++- jest.config.js | 3 +- jest.config.unit.js | 8 +++ package.json | 34 +++++++------ scripts/make-certs.sh | 15 ++++++ test/docker-compose.yaml | 24 +++++++++ test/{fetch-h2 => integration}/httpbin.ts | 59 +++++++++++++---------- 7 files changed, 104 insertions(+), 45 deletions(-) create mode 100644 jest.config.unit.js create mode 100755 scripts/make-certs.sh create mode 100644 test/docker-compose.yaml rename test/{fetch-h2 => integration}/httpbin.ts (68%) diff --git a/.travis.yml b/.travis.yml index fcfa01e..846a235 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,9 @@ node_js: - "12" - "13" before_script: - - "npm run build" + - yarn build +script: + - yarn test branches: except: - /^v\d+\.\d+\.\d+$/ @@ -15,6 +17,6 @@ jobs: - stage: release node_js: lts/* script: - - npm run test + - yarn test - cat coverage/lcov.info | npx coveralls - npx semantic-release diff --git a/jest.config.js b/jest.config.js index 2436668..9f1ac53 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,7 +1,8 @@ module.exports = { preset: 'ts-jest', testEnvironment: 'node', - testMatch: ['/test/fetch-h2/**/*.ts'], + testMatch: ['/test/**/*.ts'], + modulePathIgnorePatterns: ['/lib/', '/test-client/'], collectCoverageFrom: ['/lib/**', 'index.ts'], coverageReporters: ['lcov', 'text', 'html'], }; diff --git a/jest.config.unit.js b/jest.config.unit.js new file mode 100644 index 0000000..57129cc --- /dev/null +++ b/jest.config.unit.js @@ -0,0 +1,8 @@ +const config = require( './jest.config.js' ); +module.exports = { + ...config, + modulePathIgnorePatterns: [ + ...config.modulePathIgnorePatterns, + '/integration/' + ], +}; diff --git a/package.json b/package.json index 5e6bd21..753aa0c 100644 --- a/package.json +++ b/package.json @@ -18,14 +18,17 @@ "dist" ], "scripts": { - "build": "./node_modules/.bin/rimraf dist && ./node_modules/.bin/tsc -p .", + "build:ts": "./node_modules/.bin/rimraf dist && ./node_modules/.bin/tsc -p .", + "build:cert": "scripts/make-certs.sh", + "build": "concurrently 'yarn build:ts' 'yarn build:cert'", "lint": "node_modules/.bin/tslint --project .", - "jest": "node_modules/.bin/jest", + "jest:core": "node_modules/.bin/jest --forceExit --detectOpenHandles --coverage", + "jest:fast": "yarn jest:core --config jest.config.unit.js $@", + "jest:integration": "node_modules/.bin/compd -f test/docker-compose.yaml yarn jest:core", "jest:debug": "node --inspect-brk node_modules/.bin/jest", - "test": "npm run lint && node_modules/.bin/jest --forceExit --detectOpenHandles --coverage", + "test": "yarn lint && yarn jest:integration", "buildtest": "npm run build && npm run jest", "buildtestcov": "npm run build && npm run test", - "coverage": "node_modules/.bin/nyc report --reporter=html", "coveralls": "cat coverage/lcov.info | node_modules/.bin/coveralls", "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run build && npm run test && scripts/version-git-add.sh", "prepack": "npm run build && npm run test", @@ -51,27 +54,26 @@ "devDependencies": { "@types/execa": "^2.0.0", "@types/from2": "2.x", - "@types/jest": "24.x", - "@types/node": "12.x", + "@types/jest": "25.1.0", + "@types/node": "13.5.0", "@types/through2": "2.x", "commitizen": "^4.0.3", - "coveralls": "3.x", - "cz-conventional-changelog": "^3.0.2", + "compd": "^1.3.7", + "concurrently": "^5.1.0", + "cz-conventional-changelog": "^3.1.0", "execa": "^2.0.4", "from2": "2.x", "jest": "24.x", - "nyc": "14.x", - "rimraf": "^3.0.0", - "semantic-release": "15.x", - "travis-deploy-once": "5.x", + "mkcert": "^1.2.0", + "rimraf": "^3.0.1", "ts-jest": "24.x", - "ts-node": "8.x", - "tslint": "5.x", - "typescript": "3.x" + "ts-node": "8.6.2", + "tslint": "6.0.0", + "typescript": "3.7.5" }, "dependencies": { "@types/tough-cookie": "2.x", - "already": "1.x", + "already": "1.10.1", "callguard": "1.x", "get-stream": "5.x", "through2": "3.x", diff --git a/scripts/make-certs.sh b/scripts/make-certs.sh new file mode 100755 index 0000000..b419ca3 --- /dev/null +++ b/scripts/make-certs.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +set -e + +# Same as in the test and docker-compose (!) +DIR=/tmp/fetch-h2-certs + +node_modules/.bin/rimraf ${DIR} +mkdir -p ${DIR} +node_modules/.bin/mkcert create-ca \ + --key ${DIR}/ca-key.pem --cert ${DIR}/ca.pem +node_modules/.bin/mkcert create-cert \ + --ca-key ${DIR}/ca-key.pem --ca-cert ${DIR}/ca.pem \ + --key ${DIR}/key.pem --cert ${DIR}/cert.pem \ + --domains localhost,127.0.0.1 diff --git a/test/docker-compose.yaml b/test/docker-compose.yaml new file mode 100644 index 0000000..e0ccebd --- /dev/null +++ b/test/docker-compose.yaml @@ -0,0 +1,24 @@ +version: '3' +services: + http1bin: + image: kennethreitz/httpbin + ports: + - "80" + http2bin: + image: skydoctor/httpbin-http2 + ports: + - "8000" + https1proxy: + image: fsouza/docker-ssl-proxy + environment: + DOMAIN: localhost + TARGET_HOST: http1bin + TARGET_PORT: 80 + links: + - http1bin + ports: + - 443 + volumes: + - "/tmp/fetch-h2-certs:/etc/nginx/certs" +# node_modules/.bin/mkcert create-ca --key /tmp/asdf/ca.key --cert /tmp/asdf/ca.crt +# node_modules/.bin/mkcert create-cert --ca-key /tmp/asdf/ca.key --ca-cert /tmp/asdf/ca.crt --key /tmp/asdf/cert.key --cert /tmp/asdf/cert.crt --domains localhost,127.0.0.1 diff --git a/test/fetch-h2/httpbin.ts b/test/integration/httpbin.ts similarity index 68% rename from test/fetch-h2/httpbin.ts rename to test/integration/httpbin.ts index 60b8117..a5f3e24 100644 --- a/test/fetch-h2/httpbin.ts +++ b/test/integration/httpbin.ts @@ -1,4 +1,5 @@ import { URL } from "url"; +import * as fs from "fs"; import { delay, Finally } from "already"; import * as through2 from "through2"; @@ -15,47 +16,58 @@ import { interface TestData { - protocol: string; + scheme: string; site: string; protos: Array< HttpProtocols >; + certs?: boolean; } +const ca = fs.readFileSync( "/tmp/fetch-h2-certs/ca.pem" ); +const cert = fs.readFileSync( "/tmp/fetch-h2-certs/cert.pem" ); + +const http1bin = `localhost:${process.env.HTTP1BIN_PORT}`; +const http2bin = `localhost:${process.env.HTTP2BIN_PORT}`; +const https1bin = `localhost:${process.env.HTTPS1PROXY_PORT}`; + ( [ - { protocol: "https:", site: "nghttp2.org/httpbin", protos: [ "http2" ] }, - { protocol: "http:", site: "httpbin.org", protos: [ "http1" ] }, - { protocol: "https:", site: "httpbin.org", protos: [ "http1" ] }, + { scheme: "http:", site: http2bin, protos: [ "http2" ] }, + { scheme: "http:", site: http1bin, protos: [ "http1" ] }, + { scheme: "https:", site: https1bin, protos: [ "http1" ], certs: false }, + { scheme: "https:", site: https1bin, protos: [ "http1" ], certs: true }, ] as Array< TestData > ) -.forEach( ( { site, protocol, protos } ) => +.forEach( ( { site, scheme, protos, certs } ) => { -const host = `${protocol}//${site}`; +const host = `${scheme}//${site}`; const baseHost = new URL( host ).origin; -const name = `${site} (${protos[ 0 ]} over ${protocol.replace( ":", "" )})`; +const name = `${site} (${protos[ 0 ]} over ${scheme.replace( ":", "" )})` + + ( certs ? ' (using explicit certificates)' : '' ); describe( name, ( ) => { - jest.setTimeout( 10000 ); - function wrapContext( fn: ( fetch: typeof fetchType ) => Promise< void > ) { return async ( ) => { const { fetch, disconnectAll } = context( { httpsProtocols: protos, + session: certs + ? { ca, cert, rejectUnauthorized: false } + : { rejectUnauthorized: false }, } ); await fn( fetch ).then( ...Finally( disconnectAll ) ); }; } - it.concurrent( "should be possible to GET", wrapContext( async ( fetch ) => + it( "should be possible to GET", wrapContext( async ( fetch ) => { const response = await fetch( `${host}/user-agent` ); const data = await response.json( ); expect( data[ "user-agent" ] ).toContain( "fetch-h2/" ); } ) ); - it.concurrent( "should be possible to POST JSON", wrapContext( + it( "should be possible to POST JSON", wrapContext( async ( fetch ) => { const testData = { foo: "bar" }; @@ -73,7 +85,7 @@ describe( name, ( ) => expect( data.headers[ "Content-Type" ] ).toBe( "application/json" ); } ) ); - it.concurrent( "should be possible to POST buffer-data", wrapContext( + it( "should be possible to POST buffer-data", wrapContext( async ( fetch ) => { const testData = '{"foo": "data"}'; @@ -90,7 +102,7 @@ describe( name, ( ) => expect( data.headers ).not.toHaveProperty( "Content-Type" ); } ) ); - it.concurrent( "should be possible to POST already ended stream-data", + it( "should be possible to POST already ended stream-data", wrapContext( async ( fetch ) => { const stream = through2( ); @@ -113,7 +125,7 @@ describe( name, ( ) => expect( data.data ).toBe( "foobar" ); } ) ); - it.concurrent( "should be possible to POST not yet ended stream-data", + it( "should be possible to POST not yet ended stream-data", wrapContext( async ( fetch ) => { const stream = through2( ); @@ -140,10 +152,9 @@ describe( name, ( ) => expect( data.data ).toBe( "foobar" ); } ) ); - it.concurrent( "should save and forward cookies", async ( ) => + it( "should save and forward cookies", + wrapContext( async ( fetch ) => { - const { fetch, disconnectAll } = context( ); - const responseSet = await fetch( `${host}/cookies/set?foo=bar`, { redirect: "manual" } ); @@ -155,13 +166,11 @@ describe( name, ( ) => const data = await response.json( ); expect( data.cookies ).toEqual( { foo: "bar" } ); + } ) ); - await disconnectAll( ); - } ); - - it.concurrent( "should handle (and follow) relative paths", async ( ) => + it( "should handle (and follow) relative paths", + wrapContext( async ( fetch ) => { - const { fetch, disconnectAll } = context( ); const response = await fetch( `${host}/relative-redirect/2`, @@ -169,11 +178,9 @@ describe( name, ( ) => expect( response.url ).toBe( `${host}/get` ); await response.text( ); + } ) ); - await disconnectAll( ); - } ); - - it.concurrent( "should be possible to GET gzip data", wrapContext( + it( "should be possible to GET gzip data", wrapContext( async ( fetch ) => { const response = await fetch( `${host}/gzip` ); From 3bcd1294b1a33dc83e5ca961b3299f4c9f28d282 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Wed, 29 Jan 2020 00:37:09 +0100 Subject: [PATCH 48/77] test(upstream): bumped dev dependencies --- package.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 753aa0c..32531ab 100644 --- a/package.json +++ b/package.json @@ -55,18 +55,18 @@ "@types/execa": "^2.0.0", "@types/from2": "2.x", "@types/jest": "25.1.0", - "@types/node": "13.5.0", + "@types/node": "13.5.1", "@types/through2": "2.x", "commitizen": "^4.0.3", "compd": "^1.3.7", "concurrently": "^5.1.0", "cz-conventional-changelog": "^3.1.0", - "execa": "^2.0.4", + "execa": "^4.0.0", "from2": "2.x", - "jest": "24.x", + "jest": "25.1.0", "mkcert": "^1.2.0", "rimraf": "^3.0.1", - "ts-jest": "24.x", + "ts-jest": "25.0.0", "ts-node": "8.6.2", "tslint": "6.0.0", "typescript": "3.7.5" From c416f012e8a1fb5426aea1f4dee8cff645414ac6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 1 Feb 2020 17:45:10 +0100 Subject: [PATCH 49/77] chore(deps): bumped deps and minor cleanup --- package.json | 36 +++++++++++++++++------------------- test/docker-compose.yaml | 2 -- 2 files changed, 17 insertions(+), 21 deletions(-) diff --git a/package.json b/package.json index 32531ab..5285b62 100644 --- a/package.json +++ b/package.json @@ -33,8 +33,6 @@ "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run build && npm run test && scripts/version-git-add.sh", "prepack": "npm run build && npm run test", "makecerts": "openssl req -x509 -nodes -days 7300 -newkey rsa:2048 -keyout certs/key.pem -out certs/cert.pem", - "travis-deploy-once": "travis-deploy-once", - "semantic-release": "semantic-release", "cz": "git-cz" }, "repository": { @@ -53,32 +51,32 @@ ], "devDependencies": { "@types/execa": "^2.0.0", - "@types/from2": "2.x", - "@types/jest": "25.1.0", - "@types/node": "13.5.1", - "@types/through2": "2.x", + "@types/from2": "^2.3.0", + "@types/jest": "^25.1.1", + "@types/node": "^13.7.0", + "@types/through2": "^2.0.34", "commitizen": "^4.0.3", "compd": "^1.3.7", "concurrently": "^5.1.0", "cz-conventional-changelog": "^3.1.0", "execa": "^4.0.0", - "from2": "2.x", - "jest": "25.1.0", + "from2": "^2.3.0", + "jest": "^25.1.0", "mkcert": "^1.2.0", "rimraf": "^3.0.1", - "ts-jest": "25.0.0", - "ts-node": "8.6.2", - "tslint": "6.0.0", - "typescript": "3.7.5" + "ts-jest": "^25.1.0", + "ts-node": "^8.6.2", + "tslint": "^6.0.0", + "typescript": "^3.7.5" }, "dependencies": { - "@types/tough-cookie": "2.x", - "already": "1.10.1", - "callguard": "1.x", - "get-stream": "5.x", - "through2": "3.x", - "to-arraybuffer": "1.x", - "tough-cookie": "3.x" + "@types/tough-cookie": "^2.3.6", + "already": "^1.10.1", + "callguard": "^1.2.1", + "get-stream": "^5.1.0", + "through2": "^3.0.1", + "to-arraybuffer": "^1.0.1", + "tough-cookie": "^3.0.1" }, "config": { "commitizen": { diff --git a/test/docker-compose.yaml b/test/docker-compose.yaml index e0ccebd..75307b2 100644 --- a/test/docker-compose.yaml +++ b/test/docker-compose.yaml @@ -20,5 +20,3 @@ services: - 443 volumes: - "/tmp/fetch-h2-certs:/etc/nginx/certs" -# node_modules/.bin/mkcert create-ca --key /tmp/asdf/ca.key --cert /tmp/asdf/ca.crt -# node_modules/.bin/mkcert create-cert --ca-key /tmp/asdf/ca.key --ca-cert /tmp/asdf/ca.crt --key /tmp/asdf/cert.key --cert /tmp/asdf/cert.crt --domains localhost,127.0.0.1 From f03e11117dd6244b6172ce37cbb2609c6911308b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 1 Feb 2020 22:52:21 +0100 Subject: [PATCH 50/77] feat(san): full SAN support This commit also has a few bug fixes wrt GOAWAY. Fix #63 --- .npmignore | 7 - README.md | 2 + lib/body.ts | 17 +- lib/context-http1.ts | 28 ++- lib/context-http2.ts | 131 +++++----- lib/context-https.ts | 21 +- lib/context.ts | 328 ++++++++++++++++++-------- lib/core.ts | 13 +- lib/fetch-common.ts | 5 +- lib/fetch-http1.ts | 8 +- lib/fetch-http2.ts | 113 +++++---- lib/origin-cache.ts | 132 +++++++++++ lib/response.ts | 13 +- lib/san.ts | 66 ++++++ lib/utils.ts | 5 + package.json | 2 +- test-client/index.ts | 15 +- test/fetch-h2/event-loop-reference.ts | 2 +- test/fetch-h2/index.ts | 2 +- test/fetch-h2/origin-cache.ts | 111 +++++++++ test/fetch-h2/san.ts | 31 +++ test/integration/httpbin.ts | 12 +- test/lib/server-http1.ts | 11 +- test/lib/server-http2.ts | 11 +- 24 files changed, 812 insertions(+), 274 deletions(-) delete mode 100644 .npmignore create mode 100644 lib/origin-cache.ts create mode 100644 lib/san.ts create mode 100644 test/fetch-h2/origin-cache.ts create mode 100644 test/fetch-h2/san.ts diff --git a/.npmignore b/.npmignore deleted file mode 100644 index 616c784..0000000 --- a/.npmignore +++ /dev/null @@ -1,7 +0,0 @@ -scripts -.npmignore -.travis.yml -tsconfig.json -test/ -dist/test/ -dist/test-client/ diff --git a/README.md b/README.md index 30f9ebb..ac82ecb 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,8 @@ Since 1.0.0, `fetch-h2` requires Node.js 10. Since 2.0.0, `fetch-h2` requires Node.js 10.4. +Since 2.4.0, `fetch-h2` has full TLS SAN (Subject Alternative Name) support. + # API diff --git a/lib/body.ts b/lib/body.ts index 825714a..2354484 100644 --- a/lib/body.ts +++ b/lib/body.ts @@ -33,7 +33,7 @@ function parseIntegrity( integrity: string ) return { algorithm, hash: expectedHash.join( "-" ) }; } -function isStream( body: StorageBodyTypes ): boolean +function isStream( body: StorageBodyTypes ): body is NodeJS.ReadableStream { return body && ( "readable" in ( < NodeJS.ReadableStream >Object( body ) ) ); @@ -47,7 +47,7 @@ export class Body implements IBody public readonly bodyUsed: boolean; protected _length: number | null; protected _mime?: string; - private _body?: StorageBodyTypes | null; + protected _body?: StorageBodyTypes | null; private _used: boolean; private _integrity?: string; private _signal?: AbortSignal; @@ -133,7 +133,7 @@ export class Body implements IBody .then( ( ) => < string >< BodyTypes >this._body ); else if ( isStream( this._body ) ) return this.awaitBuffer( < NodeJS.ReadableStream >this._body ) - .then( tap( buffer => + .then( tap( buffer => < any >this.validateIntegrity( buffer, allowIncomplete ) ) ) .then( buffer => buffer.toString( ) ); @@ -363,6 +363,11 @@ export class BodyInspector extends Body return this._length; } + private _getBody( ) + { + return this._body; + } + get mime( ) { return this._getMime.call( this._ref ); @@ -372,4 +377,10 @@ export class BodyInspector extends Body { return this._getLength.call( this._ref ); } + + get stream( ) + { + const rawBody = this._getBody.call( this._ref ); + return rawBody && isStream( rawBody ) ? rawBody : undefined; + } } diff --git a/lib/context-http1.ts b/lib/context-http1.ts index dabe448..5ea5599 100644 --- a/lib/context-http1.ts +++ b/lib/context-http1.ts @@ -42,7 +42,7 @@ export interface FreeSocketInfoWithoutSocket export type FreeSocketInfo = FreeSocketInfoWithSocket | FreeSocketInfoWithoutSocket; -class OriginPool +export class OriginPool { private usedSockets = new Set< Socket >( ); private unusedSockets = new Set< Socket >( ); @@ -286,6 +286,11 @@ class ContextPool } } +function sessionToPool( session: unknown ) +{ + return session as OriginPool; +} + export class H1Context { private contextPool: ContextPool; @@ -295,21 +300,26 @@ export class H1Context this.contextPool = new ContextPool( options ); } - public getFreeSocketForOrigin( origin: string ): FreeSocketInfo + public getSessionForOrigin( origin: string ) + { + return this.contextPool.getOriginPool( origin ); + } + + public getFreeSocketForSession( session: OriginPool ): FreeSocketInfo { - return this.contextPool.hasOrigin( origin ) - ? this.contextPool.getOriginPool( origin ).getFreeSocket( ) - : { shouldCreateNew: true } as FreeSocketInfoWithoutSocket; + const pool = sessionToPool( session ); + return pool.getFreeSocket( ); } - public addUsedSocket( origin: string, socket: Socket ) + public addUsedSocket( session: OriginPool, socket: Socket ) { - return this.contextPool.getOriginPool( origin ).addUsed( socket ); + const pool = sessionToPool( session ); + return pool.addUsed( socket ); } - public waitForSocket( origin: string ): Promise< SocketAndCleanup > + public waitForSocketBySession( session: OriginPool ): Promise< SocketAndCleanup > { - return this.contextPool.getOriginPool( origin ).waitForSocket( ); + return sessionToPool( session ).waitForSocket( ); } public connect( url: URL, extraOptions: ConnectOptions, request: Request ) diff --git a/lib/context-http2.ts b/lib/context-http2.ts index 5b9627b..45b6fe4 100644 --- a/lib/context-http2.ts +++ b/lib/context-http2.ts @@ -33,6 +33,7 @@ const { interface H2SessionItem { + firstOrigin: string; session: ClientHttp2Session; promise: Promise< ClientHttp2Session >; @@ -40,6 +41,13 @@ interface H2SessionItem unref: ( ) => void; } +export interface CacheableH2Session +{ + ref: ( ) => void; + session: Promise< ClientHttp2Session >; + unref: ( ) => void; +} + export type PushHandler = ( origin: string, @@ -55,9 +63,9 @@ export class H2Context { public _pushHandler?: PushHandler; - private _h2sessions: Map< string, H2SessionItem > = new Map( ); - private _h2staleSessions: Map< string, Set< ClientHttp2Session > > = - new Map( ); + // TODO: Remove in favor of protocol-agnostic origin cache + private _h2sessions = new Map< string, H2SessionItem >( ); + private _h2staleSessions = new Map< string, Set< ClientHttp2Session > >( ); private _getDecoders: GetDecoders; private _getSessionOptions: GetSessionOptions; @@ -80,7 +88,7 @@ export class H2Context const printSession = ( origin: string, session: MonkeyH2Session ) => { - debug( " Origin:", origin ); + debug( " First origin:", origin ); debug( " Ref-counter:", session.__fetch_h2_refcount ); debug( " Destroyed:", session.destroyed ); debug( " Destroyed mark:", session.__fetch_h2_destroyed ); @@ -111,80 +119,53 @@ export class H2Context } } - public hasOrigin( origin: string ) - { - return this._h2sessions.has( origin ); - } - - public getOrCreateHttp2( + public createHttp2( origin: string, + onGotGoaway: ( ) => void, extraOptions?: SecureClientSessionOptions ) - : { - didCreate: boolean; - session: Promise< ClientHttp2Session >; - cleanup: ( ) => void; - } + : CacheableH2Session { - const willCreate = !this._h2sessions.has( origin ); - - if ( willCreate ) - { - const sessionItem = this.connectHttp2( origin, extraOptions ); - - const { promise } = sessionItem; + const sessionItem = this.connectHttp2( origin, extraOptions ); - // Handle session closure (delete from store) - promise - .then( session => - { - session.once( - "close", - ( ) => this.disconnect( origin, session ) - ); - - session.once( - "goaway", - ( - _errorCode: number, - _lastStreamID: number, - _opaqueData: Buffer - ) => - { - setGotGoaway( session ); - this.releaseSession( origin ); - } - ); - } ) - .catch( ( ) => - { - if ( sessionItem.session ) - this.disconnect( origin, sessionItem.session ); - } ); + const { promise } = sessionItem; - this._h2sessions.set( origin, sessionItem ); - } + // Handle session closure (delete from store) + promise + .then( session => + { + session.once( + "close", + ( ) => this.disconnect( origin, session ) + ); - const { promise: session, ref, unref } = - ( < H2SessionItem >this._h2sessions.get( origin ) ); + session.once( + "goaway", + ( + _errorCode: number, + _lastStreamID: number, + _opaqueData: Buffer + ) => + { + setGotGoaway( session ); + onGotGoaway( ); + this.releaseSession( origin ); + } + ); + } ) + .catch( ( ) => + { + if ( sessionItem.session ) + this.disconnect( origin, sessionItem.session ); + } ); - if ( !willCreate ) - // This was re-used - ref( ); + this._h2sessions.set( origin, sessionItem ); - // Avoid potential double-clean races - let hasCleanedUp = false; - const cleanup = ( ) => - { - if ( hasCleanedUp ) - return; - hasCleanedUp = true; - unref( ); - }; + const { promise: session, ref, unref } = sessionItem; return { - cleanup, - didCreate: willCreate, + ref, + unref, session, }; } @@ -266,7 +247,8 @@ export class H2Context return Promise.all( promises ).then( ( ) => { } ); } - public disconnect( url: string, session?: ClientHttp2Session ): Promise< void > + public disconnect( url: string, session?: ClientHttp2Session ) + : Promise< void > { const { origin } = new URL( url ); const promises: Array< Promise< void > > = [ ]; @@ -400,7 +382,7 @@ export class H2Context // tslint:disable-next-line const aGuard = asyncGuard( console.error.bind( console ) ); - const sessionRefs: Partial< H2SessionItem > = { }; + const sessionRefs = { } as Pick< H2SessionItem, 'ref' | 'unref' >; const makeRefs = ( session: ClientHttp2Session ) => { @@ -418,7 +400,7 @@ export class H2Context }; sessionRefs.unref = ( ) => { - if ( session.destroyed ) + if ( isDestroyed( session ) ) return; --monkeySession.__fetch_h2_refcount; @@ -450,8 +432,8 @@ export class H2Context origin, stream, headers, - < ( ) => void >sessionRefs.ref, - < ( ) => void >sessionRefs.unref + ( ) => sessionRefs.ref( ), + ( ) => sessionRefs.unref( ) ) ) ); @@ -466,10 +448,11 @@ export class H2Context ); return { + firstOrigin: origin, promise, - ref: < ( ) => void >sessionRefs.ref, + ref: ( ) => sessionRefs.ref( ), session, - unref: < ( ) => void >sessionRefs.unref, + unref: ( ) => sessionRefs.unref( ), }; } } diff --git a/lib/context-https.ts b/lib/context-https.ts index 22fb9e1..7cf5243 100644 --- a/lib/context-https.ts +++ b/lib/context-https.ts @@ -2,6 +2,7 @@ import { SecureClientSessionOptions } from "http2"; import { connect, ConnectionOptions, TLSSocket } from "tls"; import { HttpProtocols } from "./core"; +import { AltNameMatch, parseOrigin } from "./san"; const alpnProtocols = { @@ -13,6 +14,7 @@ export interface HttpsSocketResult { socket: TLSSocket; protocol: "http1" | "http2"; + altNameMatch: AltNameMatch; } const defaultMethod: Array< HttpProtocols > = [ "http2", "http1" ]; @@ -48,10 +50,13 @@ export function connectTLS( return new Promise< HttpsSocketResult >( ( resolve, reject ) => { - const socket: TLSSocket = connect( parseInt( port, 10 ), host, opts, ( ) => + const socket: TLSSocket = connect( parseInt( port, 10 ), host, opts, + ( ) => { const { authorized, authorizationError, alpnProtocol = "" } = socket; + const cert = socket.getPeerCertificate( ); + const altNameMatch = parseOrigin( cert ); if ( !authorized && opts.rejectUnauthorized !== false ) return reject( authorizationError ); @@ -61,14 +66,22 @@ export function connectTLS( // Maybe the server doesn't understand ALPN, enforce // user-provided protocol, or fallback to HTTP/1 if ( _protocols.length === 1 ) - return resolve( { protocol: _protocols[ 0 ], socket } ); + return resolve( { + altNameMatch, + protocol: _protocols[ 0 ], + socket, + } ); else - return resolve( { protocol: "http1", socket } ); + return resolve( { + altNameMatch, + protocol: "http1", + socket, + } ); } const protocol = alpnProtocol === "h2" ? "http2" : "http1"; - resolve( { socket, protocol } ); + resolve( { socket, protocol, altNameMatch } ); } ); socket.once( "error", reject ); diff --git a/lib/context.ts b/lib/context.ts index ddfe69d..8808d58 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -4,9 +4,10 @@ import { } from "http2"; import { Socket } from "net"; import { URL } from "url"; +import { funnel, once, specific } from "already"; -import { H1Context } from "./context-http1"; -import { H2Context, PushHandler } from "./context-http2"; +import { H1Context, OriginPool } from "./context-http1"; +import { CacheableH2Session, H2Context, PushHandler } from "./context-http2"; import { connectTLS } from "./context-https"; import { CookieJar } from "./cookie-jar"; import { @@ -21,7 +22,7 @@ import { SimpleSession, SimpleSessionHttp1, SimpleSessionHttp2, - SimpleSessionHttp2Session, + RetryError, } from "./core"; import { fetch as fetchHttp1 } from "./fetch-http1"; import { fetch as fetchHttp2 } from "./fetch-http2"; @@ -29,6 +30,7 @@ import { version } from "./generated/version"; import { Request } from "./request"; import { Response } from "./response"; import { parseInput } from "./utils"; +import OriginCache from "./origin-cache"; function makeDefaultUserAgent( ): string @@ -61,6 +63,14 @@ export interface ContextOptions http1: Partial< Http1Options > | PerOrigin< Partial< Http1Options > >; } +interface SessionMap +{ + http1: OriginPool; + https1: OriginPool; + http2: CacheableH2Session; + https2: CacheableH2Session; +} + export class Context { private h1Context: H1Context; @@ -79,6 +89,10 @@ export class Context ReadonlyArray< HttpProtocols > | PerOrigin< ReadonlyArray< HttpProtocols > >; private _http1Options: Partial< Http1Options | PerOrigin< Http1Options > >; + private _httpsFunnel = funnel< Response >( ); + private _http1Funnel = funnel< Response >( ); + private _http2Funnel = funnel< Response >( ); + private _originCache = new OriginCache< SessionMap >( ); constructor( opts?: Partial< ContextOptions > ) { @@ -159,6 +173,58 @@ export class Context } public async fetch( input: string | Request, init?: Partial< FetchInit > ) + { + return this.retryFetch( input, init, 0 ); + } + + public async disconnect( url: string ) + { + const { origin } = this.parseInput( url ); + const sessions = this._originCache.getAny( origin ); + sessions.forEach( ( { session } ) => + { + this._originCache.delete( session ); + } ); + + await Promise.all( [ + this.h1Context.disconnect( url ), + this.h2Context.disconnect( url ), + ] ); + } + + public async disconnectAll( ) + { + this._originCache.clear( ); + + await Promise.all( [ + this.h1Context.disconnectAll( ), + this.h2Context.disconnectAll( ), + ] ); + } + + private async retryFetch( + input: string | Request, + init: Partial< FetchInit > | undefined, + count: number + ) + : Promise< Response > + { + ++count; + + return this.retryableFetch( input, init ) + .catch( specific( RetryError, err => + { + // TODO: Implement a more robust retry logic + if ( count > 10 ) + throw err; + return this.retryFetch( input, init, count ); + } ) ); + } + + private async retryableFetch( + input: string | Request, + init?: Partial< FetchInit > + ) : Promise< Response > { const { hostname, origin, port, protocol, url } = @@ -200,19 +266,30 @@ export class Context return fetchHttp1( sessionGetterHttp1, request, init ); }; - const doFetchHttp2 = ( ) => + const doFetchHttp2 = async ( cacheableSession: CacheableH2Session ) => { - const sessionGetterHttp2: SimpleSessionHttp2 = { - get: ( url: string ) => this.getHttp2( url ), - ...makeSimpleSession( "http2" ), - }; - return fetchHttp2( sessionGetterHttp2, request, init ); + const { session, unref } = cacheableSession; + const cleanup = once( unref ); + + try + { + const sessionGetterHttp2: SimpleSessionHttp2 = { + get: ( ) => ( { session, cleanup } ), + ...makeSimpleSession( "http2" ), + }; + return await fetchHttp2( sessionGetterHttp2, request, init ); + } + catch ( err ) + { + cleanup( ); + throw err; + } }; - const tryWaitForHttp1 = async ( ) => + const tryWaitForHttp1 = async ( session: OriginPool ) => { const { socket: freeHttp1Socket, cleanup, shouldCreateNew } = - this.h1Context.getFreeSocketForOrigin( origin ); + this.h1Context.getFreeSocketForSession( session ); if ( freeHttp1Socket ) return doFetchHttp1( freeHttp1Socket, cleanup ); @@ -222,89 +299,175 @@ export class Context // We've maxed out HTTP/1 connections, wait for one to be // freed. const { socket, cleanup } = - await this.h1Context.waitForSocket( origin ); + await this.h1Context.waitForSocketBySession( session ); return doFetchHttp1( socket, cleanup ); } }; if ( protocol === "http1" ) { - // Plain text HTTP/1(.1) - const resp = await tryWaitForHttp1( ); - if ( resp ) - return resp; + return this._http1Funnel( async ( shouldRetry, retry, shortcut ) => + { + if ( shouldRetry( ) ) + return retry( ); - const socket = await this.h1Context.makeNewConnection( url ); - const cleanup = this.h1Context.addUsedSocket( origin, socket ); - return doFetchHttp1( socket, cleanup ); + // Plain text HTTP/1(.1) + const cacheItem = this._originCache.get( "http1", origin ); + + const session = + cacheItem?.session ?? + this.h1Context.getSessionForOrigin( origin ); + + const resp = await tryWaitForHttp1( session ); + if ( resp ) + return resp; + + const socket = await this.h1Context.makeNewConnection( url ); + + this._originCache.set( origin, "http1", session ); + + shortcut( ); + + const cleanup = + this.h1Context.addUsedSocket( session, socket ); + return doFetchHttp1( socket, cleanup ); + } ); } else if ( protocol === "http2" ) { - // Plain text HTTP/2 - return doFetchHttp2( ); - } - else // protocol === "https" - { - // If we already have a session/socket open to this origin, - // re-use it + return this._http2Funnel( async ( _, __, shortcut ) => + { + // Plain text HTTP/2 + const cacheItem = this._originCache.get( "http2", origin ); - if ( this.h2Context.hasOrigin( origin ) ) - return doFetchHttp2( ); + if ( cacheItem ) + { + cacheItem.session.ref( ); + shortcut( ); + return doFetchHttp2( cacheItem.session ); + } - const resp = await tryWaitForHttp1( ); - if ( resp ) - return resp; + // Convert socket into http2 session, this will ref (*) + const cacheableSession = this.h2Context.createHttp2( + origin, + ( ) => { this._originCache.delete( cacheableSession ); } + ); - // TODO: Make queue for subsequent fetch requests to the same - // origin, so they can re-use the http2 session, or http1 - // pool once we know what protocol will be used. - // This must apply to plain-text http1 too. + this._originCache.set( origin, "http2", cacheableSession ); - // Use ALPN to figure out protocol lazily - const { protocol, socket } = await connectTLS( - hostname, - port, - getByOrigin( this._httpsProtocols, origin ), - getByOrigin( this._sessionOptions, origin ) + shortcut( ); + + // Session now lingering, it will be re-used by the next get() + return doFetchHttp2( cacheableSession ); + } ); + } + else // protocol === "https" + { + return this._httpsFunnel( ( shouldRetry, retry, shortcut ) => + shouldRetry( ) + ? retry( ) + : this.connectSequenciallyTLS( + shortcut, + hostname, + port, + origin, + tryWaitForHttp1, + doFetchHttp1, + doFetchHttp2 + ) ); + } + } - if ( protocol === "http2" ) + private async connectSequenciallyTLS( + shortcut: ( ) => void, + hostname: string, + port: string, + origin: string, + tryWaitForHttp1: + ( session: OriginPool ) => Promise< Response | undefined >, + doFetchHttp1: + ( socket: Socket, cleanup: ( ) => void ) => Promise< Response >, + doFetchHttp2: + ( cacheableSession: CacheableH2Session ) => Promise< Response > + ) + { + const cacheItem = + this._originCache.get( "https2", origin ) ?? + this._originCache.get( "https1", origin ); + + if ( cacheItem ) + { + if ( cacheItem.protocol === "https1" ) { - // Convert socket into http2 session, this will ref (*) - const { cleanup } = await this.h2Context.getOrCreateHttp2( + const resp = await tryWaitForHttp1( cacheItem.session ); + if ( resp ) + return resp; + } + else if ( cacheItem.protocol === "https2" ) + { + cacheItem.session.ref( ); + return doFetchHttp2( cacheItem.session ); + } + } + + // Use ALPN to figure out protocol lazily + const { protocol, socket, altNameMatch } = await connectTLS( + hostname, + port, + getByOrigin( this._httpsProtocols, origin ), + getByOrigin( this._sessionOptions, origin ) + ); + + if ( protocol === "http2" ) + { + // Convert socket into http2 session, this will ref (*) + // const { cleanup, session, didCreate } = + const cacheableSession = this.h2Context.createHttp2( origin, + ( ) => { this._originCache.delete( cacheableSession ); }, { createConnection: ( ) => socket, } ); - // Session now lingering, it will be re-used by the next get() - const ret = doFetchHttp2( ); - // Unref lingering ref - cleanup( ); - return ret; - } - else // protocol === "http1" - { - const cleanup = this.h1Context.addUsedSocket( origin, socket ); - return doFetchHttp1( socket, cleanup ); - } + + this._originCache.set( + origin, + "https2", + cacheableSession, + altNameMatch + ); + + shortcut( ); + + // Session now lingering, it will be re-used by the next get() + return doFetchHttp2( cacheableSession ); } - } + else // protocol === "http1" + { + const session = + cacheItem?.session ?? + this.h1Context.getSessionForOrigin( origin ); + + // TODO: Update the alt-name list in the origin cache (if the new + // TLS socket contains more/other alt-names). + if ( !cacheItem ) + this._originCache.set( + origin, + "https1", + session, + altNameMatch + ); - public async disconnect( url: string ) - { - await Promise.all( [ - this.h1Context.disconnect( url ), - this.h2Context.disconnect( url ), - ] ); - } + const cleanup = this.h1Context.addUsedSocket( + session, + socket + ); - public async disconnectAll( ) - { - await Promise.all([ - this.h1Context.disconnectAll( ), - this.h2Context.disconnectAll( ), - ]); + shortcut( ); + + return doFetchHttp1( socket, cleanup ); + } } private getHttp1( @@ -325,33 +488,6 @@ export class Context ); } - private getOrCreateHttp2( origin: string, created = false ) - : Promise< SimpleSessionHttp2Session > - { - const { didCreate, session, cleanup } = - this.h2Context.getOrCreateHttp2( origin ); - - return session - .catch( err => - { - if ( didCreate || created ) - // Created in this request, forward error - throw err; - // Not created in this request, try again - return this.getOrCreateHttp2( origin, true ) - .then( ( { session } ) => session ); - } ) - .then( session => ( { session, cleanup } ) ); - } - - private getHttp2( url: string ) - : Promise< SimpleSessionHttp2Session > - { - const { origin } = typeof url === "string" ? new URL( url ) : url; - - return this.getOrCreateHttp2( origin ); - } - private parseInput( input: string | Request ) { const { hostname, origin, port, protocol, url } = diff --git a/lib/core.ts b/lib/core.ts index 8268037..e579346 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -179,6 +179,15 @@ export class TimeoutError extends Error } } +export class RetryError extends Error +{ + constructor( message: string ) + { + super( message ); + Object.setPrototypeOf( this, RetryError.prototype ); + } +} + export type DecodeFunction = ( stream: NodeJS.ReadableStream ) => NodeJS.ReadableStream; @@ -253,7 +262,7 @@ export interface SimpleSessionHttp1Request export interface SimpleSessionHttp2Session { - session: ClientHttp2Session; + session: Promise< ClientHttp2Session >; cleanup: ( ) => void; } @@ -264,5 +273,5 @@ export interface SimpleSessionHttp1 extends SimpleSession export interface SimpleSessionHttp2 extends SimpleSession { - get( url: string ): Promise< SimpleSessionHttp2Session >; + get( ): SimpleSessionHttp2Session; } diff --git a/lib/fetch-common.ts b/lib/fetch-common.ts index dabc2a1..1c5a333 100644 --- a/lib/fetch-common.ts +++ b/lib/fetch-common.ts @@ -263,8 +263,8 @@ export async function setupFetch( function cleanup( ) { - if ( timeoutInfo && timeoutInfo.clear ) - timeoutInfo.clear( ); + timeoutInfo?.clear?.( ); + timeoutInfo?.promise?.catch( _err => { } ); if ( signal && abortHandler ) signal.removeListener( "abort", abortHandler ); @@ -303,7 +303,6 @@ export function handleSignalAndTimeout( < Promise< any > >signalPromise, < Promise< any > >( timeoutInfo && timeoutInfo.promise ), fetcher( ).catch( rethrow( onError ) ), - ] .filter( promise => promise ) ) diff --git a/lib/fetch-http1.ts b/lib/fetch-http1.ts index afa9801..0b1bea0 100644 --- a/lib/fetch-http1.ts +++ b/lib/fetch-http1.ts @@ -23,7 +23,7 @@ import { import { GuardedHeaders } from "./headers"; import { Request } from "./request"; import { Response, StreamResponse } from "./response"; -import { arrayify, isRedirectStatus, parseLocation } from "./utils"; +import { arrayify, isRedirectStatus, parseLocation, pipeline } from "./utils"; const { // Responses, these are the same in HTTP/1.1 and HTTP/2 @@ -252,7 +252,11 @@ export async function fetchImpl( await request.readable( ) .then( readable => { - readable.pipe( req ); + pipeline( readable, req ) + .catch ( _err => + { + // TODO: Implement error handling + } ); } ); return response; diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index a790892..1ea06db 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -1,6 +1,7 @@ import { constants as h2constants, IncomingHttpHeaders as IncomingHttp2Headers, + ClientHttp2Stream, } from "http2"; import { syncGuard } from "callguard"; @@ -8,6 +9,7 @@ import { syncGuard } from "callguard"; import { AbortController } from "./abort"; import { AbortError, + RetryError, FetchInit, SimpleSessionHttp2, } from "./core"; @@ -25,7 +27,7 @@ import { import { GuardedHeaders } from "./headers"; import { Request } from "./request"; import { Response, StreamResponse } from "./response"; -import { arrayify, isRedirectStatus, parseLocation } from "./utils"; +import { arrayify, isRedirectStatus, parseLocation, pipeline } from "./utils"; import { hasGotGoaway } from "./utils-http2"; const { @@ -75,53 +77,67 @@ async function fetchImpl( const { raceConditionedGoaway } = extra; - const streamPromise = session.get( url ); + const streamPromise = session.get( ); async function doFetch( ): Promise< Response > { - const { session: h2session, cleanup: socketCleanup } = - await streamPromise; + const { session: ph2session, cleanup: socketCleanup } = streamPromise; + const h2session = await ph2session; - const stream = h2session.request( headersToSend, { endStream } ); - - const response = new Promise< Response >( ( resolve, reject ) => + const tryRetryOnGoaway = + ( resolve: ( value: Promise< Response > ) => void ) => { - const guard = syncGuard( reject, { catchAsync: true } ); + // This could be due to a race-condition in GOAWAY. + // As of current Node.js, the 'goaway' event is emitted on the + // session before this event (at least frameError, probably + // 'error' too) is emitted, so we will know if we got it. + if ( + !raceConditionedGoaway.has( origin ) && + hasGotGoaway( h2session ) + ) + { + // Don't retry again due to potential GOAWAY + raceConditionedGoaway.add( origin ); + + // Since we've got the 'goaway' event, the + // context has already released the session, + // so a retry will create a new session. + resolve( + fetchImpl( + session, + request, + { signal, onTrailers }, + { + raceConditionedGoaway, + redirected, + timeoutAt, + } + ) + ); + + return true; + } + return false; + }; - const tryRetryOnGoaway = ( ) => + let stream: ClientHttp2Stream; + try + { + stream = h2session.request( headersToSend, { endStream } ); + } + catch ( err ) + { + if ( err.code === "ERR_HTTP2_GOAWAY_SESSION" ) { - // This could be due to a race-condition in GOAWAY. - // As of current Node.js, the 'goaway' event is emitted on the - // session before this event (at least frameError, probably - // 'error' too) is emitted, so we will know if we got it. - if ( - !raceConditionedGoaway.has( origin ) && - hasGotGoaway( h2session ) - ) - { - // Don't retry again due to potential GOAWAY - raceConditionedGoaway.add( origin ); - - // Since we've got the 'goaway' event, the - // context has already released the session, - // so a retry will create a new session. - resolve( - fetchImpl( - session, - request, - { signal, onTrailers }, - { - raceConditionedGoaway, - redirected, - timeoutAt, - } - ) - ); + // Retry with new session + throw new RetryError( err.code ); + } + throw err; + } - return true; - } - return false; - }; + const response = new Promise< Response >( ( resolve, reject ) => + { + const guard = syncGuard( reject, { catchAsync: true } ); stream.on( "aborted", guard( ( ..._whatever ) => { @@ -137,7 +153,7 @@ async function fetchImpl( err.message.includes( "NGHTTP2_REFUSED_STREAM" ) ) { - if ( tryRetryOnGoaway( ) ) + if ( tryRetryOnGoaway( resolve ) ) return; } reject( err ); @@ -151,7 +167,7 @@ async function fetchImpl( endStream ) { - if ( tryRetryOnGoaway( ) ) + if ( tryRetryOnGoaway( resolve ) ) return; } @@ -333,7 +349,11 @@ async function fetchImpl( await request.readable( ) .then( readable => { - readable.pipe( stream ); + pipeline( readable, stream ) + .catch ( _err => + { + // TODO: Implement error handling + } ); } ); return response; @@ -344,12 +364,7 @@ async function fetchImpl( timeoutInfo, cleanup, doFetch, - ( ) => - { - streamPromise - .then( ( { cleanup } ) => cleanup( ) ) - .catch( _err => { } ); - } + streamPromise.cleanup ); } diff --git a/lib/origin-cache.ts b/lib/origin-cache.ts new file mode 100644 index 0000000..c0ce5ff --- /dev/null +++ b/lib/origin-cache.ts @@ -0,0 +1,132 @@ +import { AltNameMatch } from "./san" + + +export type Protocol = 'https1' | 'https2' | 'http1' | 'http2'; + +interface State< Session > +{ + protocol: Protocol; + firstOrigin: string; + session: Session; + match?: AltNameMatch; + resolved: Array< string >; +} + +function makeKey( protocol: Protocol, origin: string ) +{ + return protocol + ":" + origin; +} + +type AnySessionMap = { [ key in Protocol ]: unknown; }; + +export interface OriginCacheEntry< P, Session > +{ + protocol: P; + session: Session; + firstOrigin: string; +} + +export default class OriginCache< SessionMap extends AnySessionMap > +{ + private sessionMap: Map< unknown, State< unknown > > = new Map( ); + private staticMap: Map< string, State< unknown > > = new Map( ); + + public getAny( origin: string ) + { + return [ + this.get( 'https1', origin ), + this.get( 'https2', origin ), + this.get( 'http1', origin ), + this.get( 'http2', origin ), + ] + .filter( < T >( t: T ): t is NonNullable< T > => !!t ); + } + + public get< P extends Protocol >( protocol: P, origin: string ) + : OriginCacheEntry< typeof protocol, SessionMap[ P ] > | undefined + { + const key = makeKey( protocol, origin ); + + const stateByStatic = this.staticMap.get( key ); + if ( stateByStatic ) + return { + protocol: stateByStatic.protocol as P, + session: stateByStatic.session, + firstOrigin: stateByStatic.firstOrigin, + }; + + const stateByDynamic = [ ...this.sessionMap.values( ) ].find( state => + state.protocol === protocol && + state.match && + state.match.dynamic && + state.match.dynamic( origin ) + ); + + if ( stateByDynamic ) + { + // An origin matching a dynamic (wildcard) alt-name was found. + // Cache this to find it statically in the future. + stateByDynamic.resolved.push( origin ); + this.staticMap.set( key, stateByDynamic ); + return { + protocol: stateByDynamic.protocol as P, + session: stateByDynamic.session, + firstOrigin: stateByDynamic.firstOrigin, + }; + } + } + + public set( + origin: string, + protocol: Protocol, + session: SessionMap[ typeof protocol ], + altNameMatch?: AltNameMatch + ) + { + const state: State< typeof session > = { + protocol, + firstOrigin: origin, + session, + match: altNameMatch, + resolved: [ ], + }; + + this.sessionMap.set( session, state ); + + if ( altNameMatch ) + altNameMatch.names.forEach( origin => + { + this.staticMap.set( makeKey( protocol, origin ), state ); + } ); + + this.staticMap.set( makeKey( protocol, origin ), state ); + } + + // Returns true if a session was deleted, false otherwise + public delete( session: SessionMap[ keyof SessionMap ] ) + { + const state = this.sessionMap.get( session ); + + if ( !state ) + return false; + + [ + state.firstOrigin, + ...state.resolved, + ...( state.match?.names ?? [ ] ), + ] + .forEach( origin => + { + this.staticMap.delete( makeKey( state.protocol, origin ) ); + } ); + this.sessionMap.delete( session ); + + return true; + } + + public clear( ) + { + this.sessionMap.clear( ); + this.staticMap.clear( ); + } +} diff --git a/lib/response.ts b/lib/response.ts index ebbdef1..ca8e671 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -3,6 +3,8 @@ import { constants as h2constants, } from "http2"; +import { pipeline } from "stream"; + import { createBrotliDecompress, createGunzip, @@ -269,17 +271,22 @@ function handleEncoding( if ( !contentEncoding ) return stream; + const handleStreamResult = ( _err: NodeJS.ErrnoException | null ) => + { + // TODO: Add error handling + }; + const decoders: { [ name: string ]: DecodeFunction; } = { deflate: ( stream: NodeJS.ReadableStream ) => - stream.pipe( createInflate( ) ), + pipeline( stream, createInflate( ), handleStreamResult ), gzip: ( stream: NodeJS.ReadableStream ) => - stream.pipe( createGunzip( ) ), + pipeline( stream, createGunzip( ), handleStreamResult ), }; if ( hasBuiltinBrotli( ) ) { decoders.br = ( stream: NodeJS.ReadableStream ) => - stream.pipe( createBrotliDecompress( ) ); + pipeline( stream, createBrotliDecompress( ), handleStreamResult ); } contentDecoders.forEach( decoder => diff --git a/lib/san.ts b/lib/san.ts new file mode 100644 index 0000000..eeacde2 --- /dev/null +++ b/lib/san.ts @@ -0,0 +1,66 @@ +import { PeerCertificate } from "tls" + + +export type AltNameMatcher = ( name: string ) => boolean; + +export interface AltNameMatch +{ + names: Array< string >; + dynamic?: AltNameMatcher; +} + + +function getAltNames( cert: PeerCertificate ) +{ + const CN = cert.subject?.CN; + const sans = ( cert.subjectaltname ?? '' ) + .split( ',' ) + .map( name => name.trim( ) ) + .filter( name => name.startsWith( 'DNS:' ) ) + .map( name => name.substr( 4 ) ); + + if ( CN ) + sans.push( CN ); + + return [ ...new Set( sans ) ]; +} + +export function makeRegex( name: string ) +{ + return "^" + name + .split( '*' ) + .map( part => part.replace( /[^a-zA-Z0-9]/g, val => `\\${val}` ) ) + .join( '[^.]+' ) + "$"; +} + +function makeMatcher( regexes: ReadonlyArray< RegExp > ): AltNameMatcher +{ + return ( name: string ) => regexes.some( regex => name.match( regex ) ); +} + +export function parseOrigin( cert?: PeerCertificate ): AltNameMatch +{ + const names: Array< string > = [ ]; + const regexes: Array< RegExp > = [ ]; + + if ( cert ) + { + getAltNames( cert ).forEach( name => + { + if ( name.match( /.*\*.*\*.*/ ) ) + throw new Error( `Invalid CN/subjectAltNames: ${name}` ); + + if ( name.includes( "*" ) ) + regexes.push( new RegExp( makeRegex( name ) ) ); + else + names.push( name ); + } ); + } + + const ret: AltNameMatch = { + names, + ...( !regexes.length ? { } : { dynamic: makeMatcher( regexes ) } ), + }; + + return ret; +} diff --git a/lib/utils.ts b/lib/utils.ts index 7cbb7b4..21e3171 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,5 +1,10 @@ import { URL } from "url"; import { createBrotliCompress } from "zlib"; +import { promisify } from "util"; +import * as stream from "stream"; + + +export const pipeline = promisify( stream.pipeline ); export function arrayify< T >( value: diff --git a/package.json b/package.json index 5285b62..1a58a54 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "build:cert": "scripts/make-certs.sh", "build": "concurrently 'yarn build:ts' 'yarn build:cert'", "lint": "node_modules/.bin/tslint --project .", - "jest:core": "node_modules/.bin/jest --forceExit --detectOpenHandles --coverage", + "jest:core": "node_modules/.bin/jest --detectOpenHandles --coverage", "jest:fast": "yarn jest:core --config jest.config.unit.js $@", "jest:integration": "node_modules/.bin/compd -f test/docker-compose.yaml yarn jest:core", "jest:debug": "node --inspect-brk node_modules/.bin/jest", diff --git a/test-client/index.ts b/test-client/index.ts index 0599fd8..c09ea14 100755 --- a/test-client/index.ts +++ b/test-client/index.ts @@ -1,5 +1,8 @@ // tslint:disable-next-line import { fetch, setup, HttpProtocols } from ".."; +import { pipeline } from "stream"; + +// tslint:disable no-console async function work( ) { @@ -29,11 +32,15 @@ async function work( ) } ); - const readable = await response.readable( ); + pipeline( await response.readable( ), process.stdout, err => + { + if ( !err ) + return; - readable.pipe( process.stdout ); + console.error( "Failed to fetch", err.stack ); + process.exit( 1 ); + } ) } work( ) -// tslint:disable-next-line -.catch( err => { console.error( err.stack ); } ); +.catch( err => { console.error( err, err.stack ); } ); diff --git a/test/fetch-h2/event-loop-reference.ts b/test/fetch-h2/event-loop-reference.ts index 5177e8d..ac29f7e 100644 --- a/test/fetch-h2/event-loop-reference.ts +++ b/test/fetch-h2/event-loop-reference.ts @@ -34,7 +34,7 @@ describe( "event-loop", ( ) => const { stdout } = await execa( script, [ "GET", url, version, "insecure" ], - { input: JSON.stringify( body ) } + { input: JSON.stringify( body ), stderr: 'inherit' } ); const responseBody = JSON.parse( stdout ); diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index 79abaf5..f1b57ae 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -719,7 +719,7 @@ describe( `goaway (${protoVersion})`, ( ) => const response1 = ensureStatusSuccess( await fetch( url1 ) ); expect( response1.url ).toBe( cleanUrl( url1 ) ); - await delay(20); + await delay( 20 ); const response2 = ensureStatusSuccess( await fetch( url2 ) ); expect( response2.url ).toBe( cleanUrl( url2 ) ); diff --git a/test/fetch-h2/origin-cache.ts b/test/fetch-h2/origin-cache.ts new file mode 100644 index 0000000..29ac197 --- /dev/null +++ b/test/fetch-h2/origin-cache.ts @@ -0,0 +1,111 @@ +import OriginCache from "../../lib/origin-cache" +import { makeRegex } from "../../lib/san" + + +describe( "Origin cache", ( ) => +{ + it( "should handle not-found origins", async ( ) => + { + const oc = new OriginCache( ); + + expect( oc.get( "http1", "foo.com" ) ).toBeUndefined( ); + } ); + + it( "should handle static and dynamic (wildcard) alt-names", async ( ) => + { + const oc = new OriginCache( ); + + const firstOrigin = "example.com"; + const protocol = "http1"; + const session = { }; + + oc.set( + firstOrigin, + protocol, + session, + { + names: [ firstOrigin, "example.org" ], + dynamic: ( origin: string ) => + !!origin.match( makeRegex( "*.example.com" ) ), + } + ); + + const result = { + protocol, + session, + firstOrigin, + }; + + expect( oc.get( protocol, "foo.com" ) ).toBeUndefined( ); + expect( oc.get( protocol, "example.com" ) ).toEqual( result ); + expect( oc.get( protocol, "example.org" ) ).toEqual( result ); + expect( oc.get( protocol, "foo.example.com" ) ).toEqual( result ); + expect( oc.get( "http2", "example.com" ) ).toBeUndefined( ); + expect( oc.get( "http2", "example.org" ) ).toBeUndefined( ); + expect( oc.get( "http2", "foo.example.com" ) ).toBeUndefined( ); + expect( oc.get( protocol, "sub.foo.example.com" ) ).toBeUndefined( ); + } ); + + it( "should handle origin without alt-names (non-TLS)", async ( ) => + { + const oc = new OriginCache( ); + + const firstOrigin = "example.com"; + const protocol = "http1"; + const session = { }; + + oc.set( + firstOrigin, + protocol, + session + ); + + const result = { + protocol, + session, + firstOrigin, + }; + + expect( oc.get( protocol, "foo.com" ) ).toBeUndefined( ); + expect( oc.get( protocol, "example.com" ) ).toEqual( result ); + expect( oc.get( protocol, "foo.example.com" ) ).toBeUndefined( ); + expect( oc.get( "http2", "example.com" ) ).toBeUndefined( ); + expect( oc.get( "http2", "example.org" ) ).toBeUndefined( ); + expect( oc.get( "http2", "foo.example.com" ) ).toBeUndefined( ); + expect( oc.get( protocol, "sub.foo.example.com" ) ).toBeUndefined( ); + } ); + + it( "should cleanup properly", async ( ) => + { + const oc = new OriginCache( ); + + const firstOrigin = "example.com"; + const protocol = "http1"; + const session = { }; + + oc.set( + firstOrigin, + protocol, + session, + { + names: [ firstOrigin, "example.org" ], + dynamic: ( origin: string ) => + !!origin.match( makeRegex( "*.example.com" ) ), + } + ); + + oc.get( protocol, "foo.com" ); + oc.get( protocol, "example.com" ); + oc.get( protocol, "example.org" ); + oc.get( protocol, "foo.example.com" ); + oc.get( protocol, "sub.foo.example.com" ); + + expect( oc.delete( session ) ).toBe( true ); + + expect( ( oc as any ).sessionMap.size ).toBe( 0 ); + expect( ( oc as any ).staticMap.size ).toBe( 0 ); + + expect( oc.delete( session ) ).toBe( false ); + expect( oc.delete( "foo" ) ).toBe( false ); + } ); +} ); diff --git a/test/fetch-h2/san.ts b/test/fetch-h2/san.ts new file mode 100644 index 0000000..64102c6 --- /dev/null +++ b/test/fetch-h2/san.ts @@ -0,0 +1,31 @@ +import { makeRegex } from "../../lib/san" + + +describe( "SAN", ( ) => +{ + describe( "makeRegex", ( ) => + { + it( "should handle non alpha-numeric characters right", async ( ) => + { + const regex = makeRegex( "*.example-domain.com" ); + + expect( regex ).toBe( "^[^.]+\\.example\\-domain\\.com$" ); + + const re = new RegExp( regex ); + const testOrigin = "foo.example-domain.com"; + const m = testOrigin.match( re ) as RegExpMatchArray; + + expect( m[ 0 ] ).toBe( testOrigin ); + } ); + + it( "should not allow sub-domains", async ( ) => + { + const regex = makeRegex( "*.example-domain.com" ); + + const re = new RegExp( regex ); + const testOrigin = "sub.foo.example-domain.com"; + + expect( testOrigin.match( re ) ).toBeNull( ); + } ); + } ); +} ); diff --git a/test/integration/httpbin.ts b/test/integration/httpbin.ts index a5f3e24..24cc591 100644 --- a/test/integration/httpbin.ts +++ b/test/integration/httpbin.ts @@ -1,7 +1,7 @@ import { URL } from "url"; import * as fs from "fs"; -import { delay, Finally } from "already"; +import { delay } from "already"; import * as through2 from "through2"; import { @@ -49,14 +49,16 @@ describe( name, ( ) => { return async ( ) => { - const { fetch, disconnectAll } = context( { + const { fetch } = context( { httpsProtocols: protos, session: certs ? { ca, cert, rejectUnauthorized: false } : { rejectUnauthorized: false }, } ); - await fn( fetch ).then( ...Finally( disconnectAll ) ); + // Disconnection shouldn't be necessary, fetch-h2 should unref + // the sockets correctly. + await fn( fetch ); }; } @@ -67,8 +69,7 @@ describe( name, ( ) => expect( data[ "user-agent" ] ).toContain( "fetch-h2/" ); } ) ); - it( "should be possible to POST JSON", wrapContext( - async ( fetch ) => + it( "should be possible to POST JSON", wrapContext( async ( fetch ) => { const testData = { foo: "bar" }; @@ -161,6 +162,7 @@ describe( name, ( ) => expect( responseSet.headers.has( "location" ) ).toBe( true ); const redirectedTo = responseSet.headers.get( "location" ); + await responseSet.text( ); const response = await fetch( baseHost + redirectedTo ); diff --git a/test/lib/server-http1.ts b/test/lib/server-http1.ts index 8259cdd..f172a6d 100644 --- a/test/lib/server-http1.ts +++ b/test/lib/server-http1.ts @@ -12,6 +12,7 @@ import { Server as HttpsServer, } from "https"; import { Socket } from "net"; +import { pipeline } from "../../lib/utils"; import { createHash } from "crypto"; import { createBrotliCompress, createDeflate, createGzip } from "zlib"; @@ -126,7 +127,7 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > } ); sendHeaders( responseHeaders ); - request.pipe( response ); + pipeline( request, response ); } else if ( path === "/set-cookie" ) { @@ -166,7 +167,7 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > try { sendHeaders( responseHeaders ); - request.pipe( response ); + pipeline( request, response ); } catch ( err ) // We ignore errors since this route is used to intentionally @@ -209,7 +210,7 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > } } ); - request.pipe( hash ); + pipeline( request, hash ); } else if ( path.startsWith( "/compressed/" ) ) { @@ -239,9 +240,9 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > sendHeaders( responseHeaders ); if ( encoder ) - request.pipe( encoder ).pipe( response ); + pipeline( request, encoder, response ); else - request.pipe( response ); + pipeline( request, response ); } else if ( path.startsWith( "/delay/" ) ) { diff --git a/test/lib/server-http2.ts b/test/lib/server-http2.ts index 24da286..4c7d098 100644 --- a/test/lib/server-http2.ts +++ b/test/lib/server-http2.ts @@ -8,6 +8,7 @@ import { OutgoingHttpHeaders, ServerHttp2Stream, } from "http2"; +import { pipeline } from "../../lib/utils"; import { createHash } from "crypto"; import { createBrotliCompress, createDeflate, createGzip } from "zlib"; @@ -111,7 +112,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > } ); stream.respond( responseHeaders ); - stream.pipe( stream ); + pipeline( stream, stream ); } else if ( path === "/set-cookie" ) { @@ -149,7 +150,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > try { stream.respond( responseHeaders ); - stream.pipe( stream ); + pipeline( stream, stream ); } catch ( err ) // We ignore errors since this route is used to intentionally @@ -202,7 +203,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > } } ); - stream.pipe( hash ); + pipeline( stream, hash ); } else if ( path === "/push" ) { @@ -258,9 +259,9 @@ export class ServerHttp2 extends TypedServer< Http2Server > stream.respond( responseHeaders ); if ( encoder ) - stream.pipe( encoder ).pipe( stream ); + pipeline( stream, encoder, stream ); else - stream.pipe( stream ); + pipeline( stream, stream ); } else if ( path.startsWith( "/goaway" ) ) { From 553ab45fe107f03ff8abc50f296cafa687303e6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 15 Mar 2020 15:33:20 +0100 Subject: [PATCH 51/77] ci(github-actions): start using GitHub Actions --- .github/workflows/branches.yml | 32 +++++++++++++++++++ .github/workflows/master.yml | 57 ++++++++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+) create mode 100644 .github/workflows/branches.yml create mode 100644 .github/workflows/master.yml diff --git a/.github/workflows/branches.yml b/.github/workflows/branches.yml new file mode 100644 index 0000000..29f729f --- /dev/null +++ b/.github/workflows/branches.yml @@ -0,0 +1,32 @@ +# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions + +name: Branches + +on: + push: + branches: + - '*' + +jobs: + build: + name: Build + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: + - 10.x + - 12.x + - 13.x + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - run: npm i + - run: npm run build + - run: npm run test + env: + CI: true diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml new file mode 100644 index 0000000..ead7d9c --- /dev/null +++ b/.github/workflows/master.yml @@ -0,0 +1,57 @@ +# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions + +name: Master + +on: + push: + branches: + - master + +jobs: + build: + name: Build + runs-on: ubuntu-latest + strategy: + matrix: + node-version: + - 10.x + - 12.x + - 13.x + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - run: npm i + - run: npm run build + - run: npm run test + env: + CI: true + + release: + name: Release + runs-on: ubuntu-latest + needs: build + steps: + - name: Checkout + uses: actions/checkout@v1 + - name: Setup Node.js + uses: actions/setup-node@v1 + with: + node-version: 12 + - run: npm i + - run: npm run build + - run: npm run test --coverage + env: + CI: true + - name: Coveralls + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npx semantic-release From 5309ce44a6103bbb389bf31d0beb4cfd36aced7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 15 Mar 2020 16:40:32 +0100 Subject: [PATCH 52/77] fix(deps): bumped deps (primarily fix in 'already') --- package.json | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/package.json b/package.json index 1a58a54..373971a 100644 --- a/package.json +++ b/package.json @@ -52,8 +52,8 @@ "devDependencies": { "@types/execa": "^2.0.0", "@types/from2": "^2.3.0", - "@types/jest": "^25.1.1", - "@types/node": "^13.7.0", + "@types/jest": "^25.1.4", + "@types/node": "^13.9.1", "@types/through2": "^2.0.34", "commitizen": "^4.0.3", "compd": "^1.3.7", @@ -63,15 +63,15 @@ "from2": "^2.3.0", "jest": "^25.1.0", "mkcert": "^1.2.0", - "rimraf": "^3.0.1", - "ts-jest": "^25.1.0", + "rimraf": "^3.0.2", + "ts-jest": "^25.2.1", "ts-node": "^8.6.2", - "tslint": "^6.0.0", - "typescript": "^3.7.5" + "tslint": "^6.1.0", + "typescript": "^3.8.3" }, "dependencies": { "@types/tough-cookie": "^2.3.6", - "already": "^1.10.1", + "already": "^1.11.1", "callguard": "^1.2.1", "get-stream": "^5.1.0", "through2": "^3.0.1", From 13818a7d1cb129348a8bc211451a6d12aa6b7cae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 15 Mar 2020 16:42:54 +0100 Subject: [PATCH 53/77] docs(readme): changed build badge from Travis to GitHub Actions --- README.md | 6 +++--- test/fetch-h2/index.ts | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index ac82ecb..21df5c2 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ [![npm version][npm-image]][npm-url] -[![build status][travis-image]][travis-url] +[![build status][build-image]][build-url] [![coverage status][coverage-image]][coverage-url] [![Greenkeeper badge](https://badges.greenkeeper.io/grantila/fetch-h2.svg)](https://greenkeeper.io/) [![Language grade: JavaScript][lgtm-image]][lgtm-url] @@ -388,8 +388,8 @@ const response = await fetch( url, { method, body } ); [npm-image]: https://img.shields.io/npm/v/fetch-h2.svg [npm-url]: https://npmjs.org/package/fetch-h2 -[travis-image]: https://img.shields.io/travis/grantila/fetch-h2/master.svg -[travis-url]: https://travis-ci.org/grantila/fetch-h2 +[build-image]: https://img.shields.io/github/workflow/status/grantila/fetch-h2/Master.svg +[build-url]: https://github.com/grantila/fetch-h2/actions?query=workflow%3AMaster [coverage-image]: https://coveralls.io/repos/github/grantila/fetch-h2/badge.svg?branch=master [coverage-url]: https://coveralls.io/github/grantila/fetch-h2?branch=master [lgtm-image]: https://img.shields.io/lgtm/grade/javascript/g/grantila/fetch-h2.svg?logo=lgtm&logoWidth=18 diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index f1b57ae..aac2c72 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -368,7 +368,7 @@ describe( `generic (${protoVersion})`, ( ) => it( "should timeout on a slow request", async ( ) => { - jest.setTimeout( 500 ); + jest.setTimeout( 1000 ); const { server, port } = await makeServer( ); From 91748a5b910178dcee1e210e75594038e2548247 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sun, 15 Mar 2020 17:18:58 +0100 Subject: [PATCH 54/77] fix(core): fixed session cleanup/disconnect --- lib/context.ts | 23 ++++++++++++++-------- lib/origin-cache.ts | 39 ++++++++++++++++++++++++------------- test/integration/httpbin.ts | 22 +++++++++++++++------ 3 files changed, 57 insertions(+), 27 deletions(-) diff --git a/lib/context.ts b/lib/context.ts index 8808d58..0cbf475 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -180,11 +180,7 @@ export class Context public async disconnect( url: string ) { const { origin } = this.parseInput( url ); - const sessions = this._originCache.getAny( origin ); - sessions.forEach( ( { session } ) => - { - this._originCache.delete( session ); - } ); + this._originCache.disconnect( origin ); await Promise.all( [ this.h1Context.disconnect( url ), @@ -194,7 +190,7 @@ export class Context public async disconnectAll( ) { - this._originCache.clear( ); + this._originCache.disconnectAll( ); await Promise.all( [ this.h1Context.disconnectAll( ), @@ -419,6 +415,15 @@ export class Context getByOrigin( this._sessionOptions, origin ) ); + const disconnect = once( ( ) => + { + if ( !socket.destroyed ) + { + socket.destroy( ); + socket.unref( ); + } + } ); + if ( protocol === "http2" ) { // Convert socket into http2 session, this will ref (*) @@ -435,7 +440,8 @@ export class Context origin, "https2", cacheableSession, - altNameMatch + altNameMatch, + disconnect ); shortcut( ); @@ -456,7 +462,8 @@ export class Context origin, "https1", session, - altNameMatch + altNameMatch, + disconnect ); const cleanup = this.h1Context.addUsedSocket( diff --git a/lib/origin-cache.ts b/lib/origin-cache.ts index c0ce5ff..2eaab38 100644 --- a/lib/origin-cache.ts +++ b/lib/origin-cache.ts @@ -10,6 +10,7 @@ interface State< Session > session: Session; match?: AltNameMatch; resolved: Array< string >; + cleanup?: ( ) => void; } function makeKey( protocol: Protocol, origin: string ) @@ -31,17 +32,6 @@ export default class OriginCache< SessionMap extends AnySessionMap > private sessionMap: Map< unknown, State< unknown > > = new Map( ); private staticMap: Map< string, State< unknown > > = new Map( ); - public getAny( origin: string ) - { - return [ - this.get( 'https1', origin ), - this.get( 'https2', origin ), - this.get( 'http1', origin ), - this.get( 'http2', origin ), - ] - .filter( < T >( t: T ): t is NonNullable< T > => !!t ); - } - public get< P extends Protocol >( protocol: P, origin: string ) : OriginCacheEntry< typeof protocol, SessionMap[ P ] > | undefined { @@ -80,7 +70,8 @@ export default class OriginCache< SessionMap extends AnySessionMap > origin: string, protocol: Protocol, session: SessionMap[ typeof protocol ], - altNameMatch?: AltNameMatch + altNameMatch?: AltNameMatch, + cleanup?: ( ) => void ) { const state: State< typeof session > = { @@ -89,6 +80,7 @@ export default class OriginCache< SessionMap extends AnySessionMap > session, match: altNameMatch, resolved: [ ], + cleanup, }; this.sessionMap.set( session, state ); @@ -124,9 +116,30 @@ export default class OriginCache< SessionMap extends AnySessionMap > return true; } - public clear( ) + public disconnectAll( ) { + [ ...this.sessionMap ].forEach( ( [ _, session ] ) => + { + session.cleanup?.( ); + } ); + this.sessionMap.clear( ); this.staticMap.clear( ); } + + public disconnect( origin: string ) + { + [ + this.get( 'https1', origin ), + this.get( 'https2', origin ), + this.get( 'http1', origin ), + this.get( 'http2', origin ), + ] + .filter( < T >( t: T ): t is NonNullable< T > => !!t ) + .forEach( ( { session } ) => + { + this.sessionMap.get( session )?.cleanup?.( ); + this.delete( session ); + } ); + } } diff --git a/test/integration/httpbin.ts b/test/integration/httpbin.ts index 24cc591..8a64d08 100644 --- a/test/integration/httpbin.ts +++ b/test/integration/httpbin.ts @@ -8,6 +8,7 @@ import { context, DataBody, fetch as fetchType, + disconnectAll as disconnectAllType, HttpProtocols, JsonBody, StreamBody, @@ -16,12 +17,16 @@ import { interface TestData { - scheme: string; + scheme: "http:" | "https:"; site: string; protos: Array< HttpProtocols >; certs?: boolean; } +type TestFunction = + ( fetch: typeof fetchType, disconnectAll: typeof disconnectAllType ) => + Promise< void >; + const ca = fs.readFileSync( "/tmp/fetch-h2-certs/ca.pem" ); const cert = fs.readFileSync( "/tmp/fetch-h2-certs/cert.pem" ); @@ -45,11 +50,11 @@ const name = `${site} (${protos[ 0 ]} over ${scheme.replace( ":", "" )})` + describe( name, ( ) => { - function wrapContext( fn: ( fetch: typeof fetchType ) => Promise< void > ) + function wrapContext( fn: TestFunction ) { return async ( ) => { - const { fetch } = context( { + const { fetch, disconnectAll } = context( { httpsProtocols: protos, session: certs ? { ca, cert, rejectUnauthorized: false } @@ -58,7 +63,7 @@ describe( name, ( ) => // Disconnection shouldn't be necessary, fetch-h2 should unref // the sockets correctly. - await fn( fetch ); + await fn( fetch, disconnectAll ); }; } @@ -154,7 +159,7 @@ describe( name, ( ) => } ) ); it( "should save and forward cookies", - wrapContext( async ( fetch ) => + wrapContext( async ( fetch, disconnectAll ) => { const responseSet = await fetch( `${host}/cookies/set?foo=bar`, @@ -162,12 +167,17 @@ describe( name, ( ) => expect( responseSet.headers.has( "location" ) ).toBe( true ); const redirectedTo = responseSet.headers.get( "location" ); - await responseSet.text( ); + if ( scheme === "https:" ) + // Over TLS, we need to read the payload, or the socket will not + // deref. + await responseSet.text( ); const response = await fetch( baseHost + redirectedTo ); const data = await response.json( ); expect( data.cookies ).toEqual( { foo: "bar" } ); + + await disconnectAll( ); } ) ); it( "should handle (and follow) relative paths", From 533ed4019641e8976262b3d4f158bb4a419ed77b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 17 Mar 2020 00:36:45 +0100 Subject: [PATCH 55/77] fix(session): fixed concurrency of streams within sessions Having multiple fetch requests to the same host caused them to be requested sequencially due to a concurrency bug in the underlying concurrency library 'already' which is now fixed. fixes #39, fixes #77 and fixes #85 --- lib/context.ts | 2 ++ package.json | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/context.ts b/lib/context.ts index 0cbf475..24bf723 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -396,6 +396,7 @@ export class Context { if ( cacheItem.protocol === "https1" ) { + shortcut( ); const resp = await tryWaitForHttp1( cacheItem.session ); if ( resp ) return resp; @@ -403,6 +404,7 @@ export class Context else if ( cacheItem.protocol === "https2" ) { cacheItem.session.ref( ); + shortcut( ); return doFetchHttp2( cacheItem.session ); } } diff --git a/package.json b/package.json index 373971a..a4cf496 100644 --- a/package.json +++ b/package.json @@ -71,7 +71,7 @@ }, "dependencies": { "@types/tough-cookie": "^2.3.6", - "already": "^1.11.1", + "already": "^1.12.0", "callguard": "^1.2.1", "get-stream": "^5.1.0", "through2": "^3.0.1", From d859e5bc4a781ff1913e7e575ed7b9abd04111f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Tue, 17 Mar 2020 23:24:41 +0100 Subject: [PATCH 56/77] test(san): added multi wildcard SAN parsing tests --- test/fetch-h2/san.ts | 62 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/test/fetch-h2/san.ts b/test/fetch-h2/san.ts index 64102c6..bba6fa4 100644 --- a/test/fetch-h2/san.ts +++ b/test/fetch-h2/san.ts @@ -1,4 +1,5 @@ -import { makeRegex } from "../../lib/san" +import { parseOrigin, makeRegex } from "../../lib/san" +import { PeerCertificate } from "tls" describe( "SAN", ( ) => @@ -28,4 +29,63 @@ describe( "SAN", ( ) => expect( testOrigin.match( re ) ).toBeNull( ); } ); } ); + + describe( "Multi wildcard domains", ( ) => + { + it( "Should throw on double-wildcards", ( ) => + { + const cert = { subject: { CN: "*.*.foo.com" } } as PeerCertificate; + const test = ( ) => parseOrigin( cert ); + expect( test ).toThrow( /invalid/i ); + } ); + + const certs = [ + { + name: "CN is wildcard", + cert: { + subject: { CN: "*.example1.com" }, + subjectaltname: + "DNS:foo.com, DNS:bar.com, DNS:*.example2.com", + } as PeerCertificate, + }, + { + name: "CN is plain", + cert: { + subject: { CN: "foo.com" }, + subjectaltname: + "DNS:bar.com, DNS:*.example1.com, DNS:*.example2.com", + } as PeerCertificate, + }, + ]; + + certs.forEach( ( { name, cert } ) => describe( name, ( ) => + { + it( `Should not match other domains`, ( ) => + { + const match = parseOrigin( cert ); + + expect( match.dynamic?.( "other.com" ) ).toBe( false ); + expect( match.dynamic?.( "sub.foo.com" ) ).toBe( false ); + expect( match.dynamic?.( "sub.bar.com" ) ).toBe( false ); + } ); + + it( `Should not plain origins`, ( ) => + { + const match = parseOrigin( cert ); + + expect( match.dynamic?.( "foo.com" ) ).toBe( false ); + expect( match.dynamic?.( "bar.com" ) ).toBe( false ); + expect( match.names.includes( "foo.com" ) ).toBe( true ); + expect( match.names.includes( "bar.com" ) ).toBe( true ); + } ); + + it( `Should not wildcard origins`, ( ) => + { + const match = parseOrigin( cert ); + + expect( match.dynamic?.( "sub.example1.com" ) ).toBe( true ); + expect( match.dynamic?.( "sub.example2.com" ) ).toBe( true ); + } ); + } ) ); + } ); } ); From 32106bfac34f4e1ac6f6df63cc1dcf7361b96d1f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 21 Mar 2020 19:56:56 +0100 Subject: [PATCH 57/77] fix(san): fixed SAN handling Will no longer care about subject CN in certificates, when a subjectaltname is present fix #92 --- lib/san.ts | 13 +++++----- test/fetch-h2/san.ts | 61 ++++++++++++++++++++++++++++++++++++-------- 2 files changed, 57 insertions(+), 17 deletions(-) diff --git a/lib/san.ts b/lib/san.ts index eeacde2..ab70b32 100644 --- a/lib/san.ts +++ b/lib/san.ts @@ -10,7 +10,7 @@ export interface AltNameMatch } -function getAltNames( cert: PeerCertificate ) +function getNames( cert: PeerCertificate ) { const CN = cert.subject?.CN; const sans = ( cert.subjectaltname ?? '' ) @@ -19,10 +19,11 @@ function getAltNames( cert: PeerCertificate ) .filter( name => name.startsWith( 'DNS:' ) ) .map( name => name.substr( 4 ) ); - if ( CN ) - sans.push( CN ); - - return [ ...new Set( sans ) ]; + if ( cert.subjectaltname ) + // Ignore CN if SAN:s are present; https://stackoverflow.com/a/29600674 + return [ ...new Set( sans ) ]; + else + return [ CN ]; } export function makeRegex( name: string ) @@ -45,7 +46,7 @@ export function parseOrigin( cert?: PeerCertificate ): AltNameMatch if ( cert ) { - getAltNames( cert ).forEach( name => + getNames( cert ).forEach( name => { if ( name.match( /.*\*.*\*.*/ ) ) throw new Error( `Invalid CN/subjectAltNames: ${name}` ); diff --git a/test/fetch-h2/san.ts b/test/fetch-h2/san.ts index bba6fa4..ff2ce3a 100644 --- a/test/fetch-h2/san.ts +++ b/test/fetch-h2/san.ts @@ -30,6 +30,22 @@ describe( "SAN", ( ) => } ); } ); + it( "Should match on CN when no SAN is provided (plain)", ( ) => + { + const cert = { subject: { CN: "foo.com" } } as PeerCertificate; + const { names, dynamic } = parseOrigin( cert ); + expect( names ).toStrictEqual( [ "foo.com" ] ); + expect( dynamic ).toBe( undefined ); + } ); + + it( "Should match on CN when no SAN is provided (dynamic)", ( ) => + { + const cert = { subject: { CN: "*.foo.com" } } as PeerCertificate; + const { names, dynamic } = parseOrigin( cert ); + expect( names.length ).toBe( 0 ); + expect( dynamic?.( "test.foo.com" ) ).toBe( true ); + } ); + describe( "Multi wildcard domains", ( ) => { it( "Should throw on double-wildcards", ( ) => @@ -39,21 +55,41 @@ describe( "SAN", ( ) => expect( test ).toThrow( /invalid/i ); } ); + const subjectaltname = [ + "DNS:foo.com", + "DNS:bar.com", + "DNS:example1.com", + "DNS:*.example1.com", + "DNS:*.example2.com", + ].join( ", " ); + const certs = [ { name: "CN is wildcard", cert: { subject: { CN: "*.example1.com" }, - subjectaltname: - "DNS:foo.com, DNS:bar.com, DNS:*.example2.com", + subjectaltname, } as PeerCertificate, }, { name: "CN is plain", cert: { - subject: { CN: "foo.com" }, - subjectaltname: - "DNS:bar.com, DNS:*.example1.com, DNS:*.example2.com", + subject: { CN: "example1.com" }, + subjectaltname, + } as PeerCertificate, + }, + { + name: "CN is wildcard but not in SAN", + cert: { + subject: { CN: "*.invalid.com" }, + subjectaltname, + } as PeerCertificate, + }, + { + name: "CN is plain but not in SAN", + cert: { + subject: { CN: "invalid.com" }, + subjectaltname, } as PeerCertificate, }, ]; @@ -62,14 +98,16 @@ describe( "SAN", ( ) => { it( `Should not match other domains`, ( ) => { - const match = parseOrigin( cert ); + const { names, dynamic } = parseOrigin( cert ); - expect( match.dynamic?.( "other.com" ) ).toBe( false ); - expect( match.dynamic?.( "sub.foo.com" ) ).toBe( false ); - expect( match.dynamic?.( "sub.bar.com" ) ).toBe( false ); + expect( names.includes( "invalid.com" ) ).toBe( false ); + expect( dynamic?.( "invalid.com" ) ).toBe( false ); + expect( dynamic?.( "test.invalid.com" ) ).toBe( false ); + expect( dynamic?.( "sub.foo.com" ) ).toBe( false ); + expect( dynamic?.( "sub.bar.com" ) ).toBe( false ); } ); - it( `Should not plain origins`, ( ) => + it( `Should handle plain names`, ( ) => { const match = parseOrigin( cert ); @@ -77,9 +115,10 @@ describe( "SAN", ( ) => expect( match.dynamic?.( "bar.com" ) ).toBe( false ); expect( match.names.includes( "foo.com" ) ).toBe( true ); expect( match.names.includes( "bar.com" ) ).toBe( true ); + expect( match.names.includes( "example1.com" ) ).toBe( true ); } ); - it( `Should not wildcard origins`, ( ) => + it( `Should not wildcard plain names`, ( ) => { const match = parseOrigin( cert ); From 91e111ada393b1ae86fd6d62618d3ed980516321 Mon Sep 17 00:00:00 2001 From: Colin Bendell Date: Thu, 12 Dec 2019 10:30:44 -0500 Subject: [PATCH 58/77] fix(zlib): Default flush options to Z_SYNC_FLUSH This avoids errors from partial or interrupted streams including an empty stream (eg: HEAD) fixes #72 --- lib/response.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/response.ts b/lib/response.ts index ca8e671..0780fed 100644 --- a/lib/response.ts +++ b/lib/response.ts @@ -6,9 +6,11 @@ import { import { pipeline } from "stream"; import { + constants as zlibConstants, createBrotliDecompress, createGunzip, createInflate, + ZlibOptions, } from "zlib"; const { @@ -276,11 +278,16 @@ function handleEncoding( // TODO: Add error handling }; + const zlibOpts: ZlibOptions = { + flush: zlibConstants.Z_SYNC_FLUSH, + finishFlush: zlibConstants.Z_SYNC_FLUSH, + }; + const decoders: { [ name: string ]: DecodeFunction; } = { deflate: ( stream: NodeJS.ReadableStream ) => pipeline( stream, createInflate( ), handleStreamResult ), gzip: ( stream: NodeJS.ReadableStream ) => - pipeline( stream, createGunzip( ), handleStreamResult ), + pipeline( stream, createGunzip( zlibOpts ), handleStreamResult ), }; if ( hasBuiltinBrotli( ) ) From c3309be2e8257440efabc6f80c70720ef69f0d67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 5 Jun 2020 11:01:53 +0200 Subject: [PATCH 59/77] test(goaway): skip goaway test, since it's broken in Node.js nowadays (in multiple versions) --- test/fetch-h2/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index aac2c72..e28113d 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -731,7 +731,7 @@ describe( `goaway (${protoVersion})`, ( ) => await server.shutdown( ); } ); - it( "user-disconnect closes all sessions", async ( ) => + it.skip( "user-disconnect closes all sessions", async ( ) => { const { server, port } = await makeServer( ); From fbc90bdea74c1fd8e58cc51c00f28fc592129ed7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Thu, 20 Aug 2020 19:48:43 +0200 Subject: [PATCH 60/77] fix(deps): bumped deps --- lib/body.ts | 6 +++--- package.json | 32 ++++++++++++++++---------------- test/fetch-h2/body.ts | 10 +++++----- test/fetch-h2/index.ts | 8 ++++---- test/lib/server-http1.ts | 6 +++--- test/lib/server-http2.ts | 8 ++++---- 6 files changed, 35 insertions(+), 35 deletions(-) diff --git a/lib/body.ts b/lib/body.ts index 2354484..7c8ba89 100644 --- a/lib/body.ts +++ b/lib/body.ts @@ -1,7 +1,7 @@ import { createHash } from "crypto"; import { tap } from "already"; -import getStream from "get-stream"; +import { buffer as getStreamBuffer } from "get-stream"; import * as through2 from "through2"; import * as toArrayBuffer from "to-arraybuffer"; @@ -221,7 +221,7 @@ export class Body implements IBody : Promise< Buffer > { if ( !this._signal ) - return getStream.buffer( readable ); + return getStreamBuffer( readable ); // Race the readable against the abort signal let callback: ( ) => void = ( ) => { }; @@ -236,7 +236,7 @@ export class Body implements IBody this._ensureNotAborted( ); return await Promise.race( [ - getStream.buffer( readable ), + getStreamBuffer( readable ), onAborted, ] ); } diff --git a/package.json b/package.json index a4cf496..7683387 100644 --- a/package.json +++ b/package.json @@ -52,31 +52,31 @@ "devDependencies": { "@types/execa": "^2.0.0", "@types/from2": "^2.3.0", - "@types/jest": "^25.1.4", - "@types/node": "^13.9.1", - "@types/through2": "^2.0.34", - "commitizen": "^4.0.3", + "@types/jest": "^25.2.3", + "@types/node": "^14.6.0", + "@types/through2": "^2.0.36", + "commitizen": "^4.1.2", "compd": "^1.3.7", - "concurrently": "^5.1.0", - "cz-conventional-changelog": "^3.1.0", - "execa": "^4.0.0", + "concurrently": "^5.3.0", + "cz-conventional-changelog": "^3.2.0", + "execa": "^4.0.3", "from2": "^2.3.0", "jest": "^25.1.0", - "mkcert": "^1.2.0", + "mkcert": "^1.3.0", "rimraf": "^3.0.2", "ts-jest": "^25.2.1", - "ts-node": "^8.6.2", - "tslint": "^6.1.0", - "typescript": "^3.8.3" + "ts-node": "^8.10.2", + "tslint": "^6.1.3", + "typescript": "^3.9.7" }, "dependencies": { - "@types/tough-cookie": "^2.3.6", - "already": "^1.12.0", + "@types/tough-cookie": "^4.0.0", + "already": "^1.13.1", "callguard": "^1.2.1", - "get-stream": "^5.1.0", - "through2": "^3.0.1", + "get-stream": "^6.0.0", + "through2": "^4.0.2", "to-arraybuffer": "^1.0.1", - "tough-cookie": "^3.0.1" + "tough-cookie": "^4.0.0" }, "config": { "commitizen": { diff --git a/test/fetch-h2/body.ts b/test/fetch-h2/body.ts index 8e9c644..cf21f2a 100644 --- a/test/fetch-h2/body.ts +++ b/test/fetch-h2/body.ts @@ -1,5 +1,5 @@ import { createHash } from "crypto"; -import getStream from "get-stream"; +import { buffer as getStreamBuffer } from "get-stream"; import * as through2 from "through2"; import { createIntegrity } from "../lib/utils"; @@ -498,21 +498,21 @@ describe( "body", ( ) => it( "handle null", async ( ) => { const body = new DataBody( null ); - const data = await getStream.buffer( await body.readable( ) ); + const data = await getStreamBuffer( await body.readable( ) ); expect( data.toString( ) ).toBe( "" ); } ); it( "handle string", async ( ) => { const body = new DataBody( "foo" ); - const data = await getStream.buffer( await body.readable( ) ); + const data = await getStreamBuffer( await body.readable( ) ); expect( data.toString( ) ).toBe( "foo" ); } ); it( "handle buffer", async ( ) => { const body = new DataBody( Buffer.from( "foo" ) ); - const data = await getStream.buffer( await body.readable( ) ); + const data = await getStreamBuffer( await body.readable( ) ); expect( data.toString( ) ).toBe( "foo" ); } ); @@ -521,7 +521,7 @@ describe( "body", ( ) => const stream = through2( ); stream.end( "foo" ); const body = new StreamBody( stream ); - const data = await getStream.buffer( await body.readable( ) ); + const data = await getStreamBuffer( await body.readable( ) ); expect( data.toString( ) ).toBe( "foo" ); } ); } ); diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index e28113d..fd6623a 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -1,7 +1,7 @@ import { defer, delay } from "already"; import { createHash } from "crypto"; import * as from2 from "from2"; -import getStream from "get-stream"; +import { buffer as getStreamBuffer } from "get-stream"; import * as through2 from "through2"; import { TestData } from "../lib/server-common"; @@ -603,7 +603,7 @@ describe( `generic (${protoVersion})`, ( ) => const stream = await response.readable( ); - const data = await getStream.buffer( stream ); + const data = await getStreamBuffer( stream ); expect( JSON.parse( data.toString( ) ) ).toEqual( testData ); @@ -630,7 +630,7 @@ describe( `generic (${protoVersion})`, ( ) => const stream = await response.readable( ); - const data = await getStream.buffer( stream ); + const data = await getStreamBuffer( stream ); expect( JSON.parse( data.toString( ) ) ).toEqual( testData ); @@ -660,7 +660,7 @@ describe( `generic (${protoVersion})`, ( ) => const stream = await response.readable( ); - const data = await getStream.buffer( stream ); + const data = await getStreamBuffer( stream ); expect( JSON.parse( data.toString( ) ) ).toEqual( testData ); diff --git a/test/lib/server-http1.ts b/test/lib/server-http1.ts index f172a6d..2720944 100644 --- a/test/lib/server-http1.ts +++ b/test/lib/server-http1.ts @@ -18,7 +18,7 @@ import { createHash } from "crypto"; import { createBrotliCompress, createDeflate, createGzip } from "zlib"; import { delay } from "already"; -import getStream from "get-stream"; +import { buffer as getStreamBuffer } from "get-stream"; import { ignoreError, @@ -136,7 +136,7 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > [ HTTP2_HEADER_SET_COOKIE ]: [ ], }; - const data = await getStream.buffer( request ); + const data = await getStreamBuffer( request ); const json = JSON.parse( data.toString( ) ); json.forEach( ( cookie: any ) => { @@ -180,7 +180,7 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > ":status": 200, }; - const data = await getStream.buffer( request ); + const data = await getStreamBuffer( request ); const json = JSON.parse( data.toString( ) ); sendHeaders( responseHeaders ); diff --git a/test/lib/server-http2.ts b/test/lib/server-http2.ts index 4c7d098..36fe2fe 100644 --- a/test/lib/server-http2.ts +++ b/test/lib/server-http2.ts @@ -14,7 +14,7 @@ import { createHash } from "crypto"; import { createBrotliCompress, createDeflate, createGzip } from "zlib"; import { delay } from "already"; -import getStream from "get-stream"; +import { buffer as getStreamBuffer } from "get-stream"; import { ignoreError, @@ -121,7 +121,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > [ HTTP2_HEADER_SET_COOKIE ]: [ ], }; - const data = await getStream.buffer( stream ); + const data = await getStreamBuffer( stream ); const json = JSON.parse( data.toString( ) ); json.forEach( ( cookie: any ) => { @@ -163,7 +163,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > ":status": 200, }; - const data = await getStream.buffer( stream ); + const data = await getStreamBuffer( stream ); const json = JSON.parse( data.toString( ) ); stream.once( "wantTrailers", ( ) => @@ -211,7 +211,7 @@ export class ServerHttp2 extends TypedServer< Http2Server > ":status": 200, }; - const data = await getStream.buffer( stream ); + const data = await getStreamBuffer( stream ); const json = JSON.parse( data.toString( ) ); json.forEach( ( pushable: any ) => From c22c63c5b7df3e16feb86892e6cdcfa424d88b3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Thu, 20 Aug 2020 19:50:48 +0200 Subject: [PATCH 61/77] feat(core): added support for absolute redirections Also fixed async ref issue with http2 relative redirections fix #107 --- lib/context.ts | 26 +++++++---- lib/core.ts | 38 ---------------- lib/fetch-common.ts | 9 +--- lib/fetch-http1.ts | 65 ++++++++++++++++++--------- lib/fetch-http2.ts | 62 +++++++++++++++++-------- lib/simple-session.ts | 50 +++++++++++++++++++++ lib/utils.ts | 14 +++++- test-client/index.ts | 1 + test/fetch-h2/event-loop-reference.ts | 42 ++++++++++++++++- test/lib/server-http1.ts | 16 +++++++ test/lib/server-http2.ts | 16 +++++++ 11 files changed, 243 insertions(+), 96 deletions(-) create mode 100644 lib/simple-session.ts diff --git a/lib/context.ts b/lib/context.ts index 24bf723..1412b4a 100644 --- a/lib/context.ts +++ b/lib/context.ts @@ -19,11 +19,13 @@ import { HttpProtocols, parsePerOrigin, PerOrigin, + RetryError, +} from "./core"; +import { SimpleSession, SimpleSessionHttp1, SimpleSessionHttp2, - RetryError, -} from "./core"; +} from "./simple-session"; import { fetch as fetchHttp1 } from "./fetch-http1"; import { fetch as fetchHttp2 } from "./fetch-http2"; import { version } from "./generated/version"; @@ -31,6 +33,7 @@ import { Request } from "./request"; import { Response } from "./response"; import { parseInput } from "./utils"; import OriginCache from "./origin-cache"; +import { FetchExtra } from "./fetch-common"; function makeDefaultUserAgent( ): string @@ -174,7 +177,7 @@ export class Context public async fetch( input: string | Request, init?: Partial< FetchInit > ) { - return this.retryFetch( input, init, 0 ); + return this.retryFetch( input, init ); } public async disconnect( url: string ) @@ -201,25 +204,27 @@ export class Context private async retryFetch( input: string | Request, init: Partial< FetchInit > | undefined, - count: number + extra?: FetchExtra, + count: number = 0 ) : Promise< Response > { ++count; - return this.retryableFetch( input, init ) + return this.retryableFetch( input, init, extra ) .catch( specific( RetryError, err => { // TODO: Implement a more robust retry logic if ( count > 10 ) throw err; - return this.retryFetch( input, init, count ); + return this.retryFetch( input, init, extra, count ); } ) ); } private async retryableFetch( input: string | Request, - init?: Partial< FetchInit > + init?: Partial< FetchInit >, + extra?: FetchExtra ) : Promise< Response > { @@ -243,6 +248,7 @@ export class Context cookieJar: this._cookieJar, protocol, userAgent: ( ) => this.userAgent( origin ), + newFetch: this.retryFetch.bind( this ), } ); const doFetchHttp1 = ( socket: Socket, cleanup: ( ) => void ) => @@ -259,7 +265,7 @@ export class Context } ), ...makeSimpleSession( "http1" ), }; - return fetchHttp1( sessionGetterHttp1, request, init ); + return fetchHttp1( sessionGetterHttp1, request, init, extra ); }; const doFetchHttp2 = async ( cacheableSession: CacheableH2Session ) => @@ -273,7 +279,9 @@ export class Context get: ( ) => ( { session, cleanup } ), ...makeSimpleSession( "http2" ), }; - return await fetchHttp2( sessionGetterHttp2, request, init ); + return await fetchHttp2( + sessionGetterHttp2, request, init, extra + ); } catch ( err ) { diff --git a/lib/core.ts b/lib/core.ts index e579346..93834bf 100644 --- a/lib/core.ts +++ b/lib/core.ts @@ -1,8 +1,4 @@ -import { ClientRequest } from "http"; -import { ClientHttp2Session } from "http2"; - import { AbortSignal } from "./abort"; -import { CookieJar } from "./cookie-jar"; import { Headers, RawHeaders } from "./headers"; @@ -241,37 +237,3 @@ export interface Http1Options maxFreeSockets: number | PerOrigin< number >; timeout: void | number | PerOrigin< void | number >; } - -export interface SimpleSession -{ - protocol: HttpProtocols; - - cookieJar: CookieJar; - - userAgent( ): string; - accept( ): string; - - contentDecoders( ): ReadonlyArray< Decoder >; -} - -export interface SimpleSessionHttp1Request -{ - req: ClientRequest; - cleanup: ( ) => void; -} - -export interface SimpleSessionHttp2Session -{ - session: Promise< ClientHttp2Session >; - cleanup: ( ) => void; -} - -export interface SimpleSessionHttp1 extends SimpleSession -{ - get( url: string ): SimpleSessionHttp1Request; -} - -export interface SimpleSessionHttp2 extends SimpleSession -{ - get( ): SimpleSessionHttp2Session; -} diff --git a/lib/fetch-common.ts b/lib/fetch-common.ts index 1c5a333..03662f8 100644 --- a/lib/fetch-common.ts +++ b/lib/fetch-common.ts @@ -4,13 +4,8 @@ import { URL } from "url"; import { Finally, rethrow } from "already"; import { BodyInspector } from "./body"; -import { - AbortError, - Decoder, - FetchInit, - SimpleSession, - TimeoutError, -} from "./core"; +import { AbortError, Decoder, FetchInit, TimeoutError } from "./core"; +import { SimpleSession } from "./simple-session"; import { Headers, RawHeaders } from "./headers"; import { Request } from "./request"; import { Response } from "./response"; diff --git a/lib/fetch-http1.ts b/lib/fetch-http1.ts index 0b1bea0..728c7ab 100644 --- a/lib/fetch-http1.ts +++ b/lib/fetch-http1.ts @@ -5,10 +5,8 @@ import { Socket } from "net"; import { syncGuard } from "callguard"; import { AbortController } from "./abort"; -import { - FetchInit, - SimpleSessionHttp1, -} from "./core"; +import { FetchInit } from "./core"; +import { SimpleSessionHttp1 } from "./simple-session"; import { FetchExtra, handleSignalAndTimeout, @@ -23,7 +21,13 @@ import { import { GuardedHeaders } from "./headers"; import { Request } from "./request"; import { Response, StreamResponse } from "./response"; -import { arrayify, isRedirectStatus, parseLocation, pipeline } from "./utils"; +import { + arrayify, + isRedirectStatus, + parseLocation, + pipeline, + ParsedLocation, +} from "./utils"; const { // Responses, these are the same in HTTP/1.1 and HTTP/2 @@ -215,8 +219,11 @@ export async function fetchImpl( ) ); + const { url: locationUrl, isRelative } = + location as ParsedLocation; + if ( redirect === "error" ) - return reject( makeRedirectionError( location ) ); + return reject( makeRedirectionError( locationUrl ) ); // redirect is 'follow' @@ -225,24 +232,36 @@ export async function fetchImpl( // body). The concept is fundementally broken anyway... if ( !endStream ) return reject( - makeRedirectionMethodError( location, method ) + makeRedirectionMethodError( locationUrl, method ) ); - if ( !location ) - return reject( makeIllegalRedirectError( ) ); - res.destroy( ); - resolve( - fetchImpl( - session, - request.clone( location ), - { signal, onTrailers }, + + if ( isRelative ) + { + resolve( + fetchImpl( + session, + request.clone( locationUrl ), + { signal, onTrailers }, + { + redirected: redirected.concat( url ), + timeoutAt, + } + ) + ); + } + else + { + resolve( session.newFetch( + request.clone( locationUrl ), + init, { - redirected: redirected.concat( url ), timeoutAt, + redirected: redirected.concat( url ), } - ) - ); + ) ); + } } ) ); } ); @@ -274,13 +293,15 @@ export async function fetchImpl( export function fetch( session: SimpleSessionHttp1, input: Request, - init?: Partial< FetchInit > + init?: Partial< FetchInit >, + extra?: FetchExtra ) : Promise< Response > { - const timeoutAt = void 0; - - const extra = { timeoutAt, redirected: [ ] }; + extra = { + timeoutAt: extra?.timeoutAt, + redirected: extra?.redirected ?? [ ], + }; return fetchImpl( session, input, init, extra ); } diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index 1ea06db..fee409a 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -11,8 +11,8 @@ import { AbortError, RetryError, FetchInit, - SimpleSessionHttp2, } from "./core"; +import { SimpleSessionHttp2 } from "./simple-session"; import { FetchExtra, handleSignalAndTimeout, @@ -27,7 +27,13 @@ import { import { GuardedHeaders } from "./headers"; import { Request } from "./request"; import { Response, StreamResponse } from "./response"; -import { arrayify, isRedirectStatus, parseLocation, pipeline } from "./utils"; +import { + arrayify, + isRedirectStatus, + parseLocation, + pipeline, + ParsedLocation, +} from "./utils"; import { hasGotGoaway } from "./utils-http2"; const { @@ -121,6 +127,7 @@ async function fetchImpl( }; let stream: ClientHttp2Stream; + let shouldCleanupSocket = true; try { stream = h2session.request( headersToSend, { endStream } ); @@ -177,7 +184,8 @@ async function fetchImpl( stream.on( "close", guard( ( ) => { - socketCleanup( ); + if ( shouldCleanupSocket ) + socketCleanup( ); // We'll get an 'error' event if there actually is an // error, but not if we got NGHTTP2_NO_ERROR. @@ -313,8 +321,11 @@ async function fetchImpl( ) ); + const { url: locationUrl, isRelative } = + location as ParsedLocation; + if ( redirect === "error" ) - return reject( makeRedirectionError( location ) ); + return reject( makeRedirectionError( locationUrl ) ); // redirect is 'follow' @@ -323,25 +334,38 @@ async function fetchImpl( // body). The concept is fundementally broken anyway... if ( !endStream ) return reject( - makeRedirectionMethodError( location, method ) + makeRedirectionMethodError( locationUrl, method ) ); if ( !location ) return reject( makeIllegalRedirectError( ) ); - stream.destroy( ); - resolve( - fetchImpl( + if ( isRelative ) + { + shouldCleanupSocket = false; + stream.destroy( ); + resolve( fetchImpl( session, - request.clone( location ), - { signal, onTrailers }, + request.clone( locationUrl ), + init, { raceConditionedGoaway, redirected: redirected.concat( url ), timeoutAt, } - ) - ); + ) ); + } + else + { + resolve( session.newFetch( + request.clone( locationUrl ), + init, + { + timeoutAt, + redirected: redirected.concat( url ), + } + ) ); + } } ) ); } ); @@ -371,14 +395,16 @@ async function fetchImpl( export function fetch( session: SimpleSessionHttp2, input: Request, - init?: Partial< FetchInit > + init?: Partial< FetchInit >, + extra?: FetchExtra ) : Promise< Response > { - const timeoutAt = void 0; - - const raceConditionedGoaway = new Set< string>( ); - const extra = { timeoutAt, redirected: [ ], raceConditionedGoaway }; + const http2Extra: FetchExtraHttp2 = { + timeoutAt: extra?.timeoutAt, + redirected: extra?.redirected ?? [ ], + raceConditionedGoaway: new Set< string>( ), + }; - return fetchImpl( session, input, init, extra ); + return fetchImpl( session, input, init, http2Extra ); } diff --git a/lib/simple-session.ts b/lib/simple-session.ts new file mode 100644 index 0000000..be2db18 --- /dev/null +++ b/lib/simple-session.ts @@ -0,0 +1,50 @@ +import { ClientRequest } from "http"; +import { ClientHttp2Session } from "http2"; + +import { CookieJar } from "./cookie-jar"; +import { HttpProtocols, Decoder, FetchInit } from "./core"; +import { FetchExtra } from "./fetch-common"; +import { Request } from "./request"; +import { Response } from "./response"; + + +export interface SimpleSession +{ + protocol: HttpProtocols; + + cookieJar: CookieJar; + + userAgent( ): string; + accept( ): string; + + contentDecoders( ): ReadonlyArray< Decoder >; + + newFetch( + input: string | Request, + init?: Partial< FetchInit >, + extra?: FetchExtra + ) + : Promise< Response >; +} + +export interface SimpleSessionHttp1Request +{ + req: ClientRequest; + cleanup: ( ) => void; +} + +export interface SimpleSessionHttp2Session +{ + session: Promise< ClientHttp2Session >; + cleanup: ( ) => void; +} + +export interface SimpleSessionHttp1 extends SimpleSession +{ + get( url: string ): SimpleSessionHttp1Request; +} + +export interface SimpleSessionHttp2 extends SimpleSession +{ + get( ): SimpleSessionHttp2Session; +} diff --git a/lib/utils.ts b/lib/utils.ts index 21e3171..4c7eaad 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -22,15 +22,27 @@ export function arrayify< T >( : [ value ]; } +export interface ParsedLocation +{ + url: string; + isRelative: boolean; +} + export function parseLocation( location: string | Array< string > | undefined, origin: string ) +: null | ParsedLocation { if ( "string" !== typeof location ) return null; + const originUrl = new URL( origin ); const url = new URL( location, origin ); - return url.href; + + return { + url: url.href, + isRelative: originUrl.origin === url.origin, + }; } export const isRedirectStatus: { [ status: string ]: boolean; } = { diff --git a/test-client/index.ts b/test-client/index.ts index c09ea14..0f3eff4 100755 --- a/test-client/index.ts +++ b/test-client/index.ts @@ -29,6 +29,7 @@ async function work( ) url, { method: < any >method, + redirect: 'follow', } ); diff --git a/test/fetch-h2/event-loop-reference.ts b/test/fetch-h2/event-loop-reference.ts index ac29f7e..5e4b9e1 100644 --- a/test/fetch-h2/event-loop-reference.ts +++ b/test/fetch-h2/event-loop-reference.ts @@ -6,7 +6,8 @@ import { TestData } from "../lib/server-common"; import { makeMakeServer } from "../lib/server-helpers"; -const script = path.resolve( path.join( process.cwd( ), "scripts", "test-client" ) ); +const script = + path.resolve( path.join( process.cwd( ), "scripts", "test-client" ) ); describe( "event-loop", ( ) => { @@ -42,5 +43,44 @@ describe( "event-loop", ( ) => await server.shutdown( ); } ); + + it( `should handle redirect ${proto} ${version}`, async ( ) => + { + const { port, server } = await makeServer( ); + + const url = `${proto}//localhost:${port}/redirect/delay/50`; + + const body = { foo: "bar" }; + + const { stdout } = await execa( + script, + [ "GET", url, version, "insecure" ], + { input: JSON.stringify( body ), stderr: 'inherit' } + ); + + expect( stdout ).toBe( "abcdefghij" ); + + await server.shutdown( ); + } ); + + it( `should handle absolute redirect ${proto} ${version}`, async ( ) => + { + const { port, server } = await makeServer( ); + + const redirectTo = `${proto}//localhost:${port}/delay/50`; + const url = `${proto}//localhost:${port}/redirect/${redirectTo}`; + + const body = { foo: "bar" }; + + const { stdout } = await execa( + script, + [ "GET", url, version, "insecure" ], + { input: JSON.stringify( body ), stderr: 'inherit' } + ); + + expect( stdout ).toBe( "abcdefghij" ); + + await server.shutdown( ); + } ); } ); } ); diff --git a/test/lib/server-http1.ts b/test/lib/server-http1.ts index 2720944..4a3ed95 100644 --- a/test/lib/server-http1.ts +++ b/test/lib/server-http1.ts @@ -33,6 +33,7 @@ const { HTTP2_HEADER_CONTENT_LENGTH, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_SET_COOKIE, + HTTP2_HEADER_LOCATION, } = h2constants; interface RawHeaders @@ -286,6 +287,21 @@ export class ServerHttp1 extends TypedServer< HttpServer | HttpsServer > { request.socket.destroy( ); } + else if ( path.startsWith( "/redirect/" ) ) + { + const redirectTo = + path.slice( 10 ).startsWith( "http" ) + ? path.slice( 10 ) + : path.slice( 9 ); + + const responseHeaders = { + ":status": 302, + [ HTTP2_HEADER_LOCATION ]: redirectTo, + }; + + sendHeaders( responseHeaders ); + response.end( ); + } else { response.end( ); diff --git a/test/lib/server-http2.ts b/test/lib/server-http2.ts index 36fe2fe..aaa580a 100644 --- a/test/lib/server-http2.ts +++ b/test/lib/server-http2.ts @@ -29,6 +29,7 @@ const { HTTP2_HEADER_CONTENT_LENGTH, HTTP2_HEADER_ACCEPT_ENCODING, HTTP2_HEADER_SET_COOKIE, + HTTP2_HEADER_LOCATION, } = constants; export class ServerHttp2 extends TypedServer< Http2Server > @@ -326,6 +327,21 @@ export class ServerHttp2 extends TypedServer< Http2Server > { stream.close( ); } + else if ( path.startsWith( "/redirect/" ) ) + { + const redirectTo = + path.slice( 10 ).startsWith( "http" ) + ? path.slice( 10 ) + : path.slice( 9 ); + + const responseHeaders = { + ":status": 302, + [ HTTP2_HEADER_LOCATION ]: redirectTo, + }; + + stream.respond( responseHeaders ); + stream.end( ); + } else { const matched = ( this._opts.matchers || [ ] ) From ee5c42ca4d13f835caaa3742019d8164bfa0d1b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Thu, 20 Aug 2020 19:54:08 +0200 Subject: [PATCH 62/77] ci(node): test on Node.js 14 --- .github/workflows/branches.yml | 1 + .github/workflows/master.yml | 1 + .travis.yml | 22 ---------------------- 3 files changed, 2 insertions(+), 22 deletions(-) delete mode 100644 .travis.yml diff --git a/.github/workflows/branches.yml b/.github/workflows/branches.yml index 29f729f..6dba6b5 100644 --- a/.github/workflows/branches.yml +++ b/.github/workflows/branches.yml @@ -19,6 +19,7 @@ jobs: - 10.x - 12.x - 13.x + - 14.x steps: - uses: actions/checkout@v2 - name: Use Node.js ${{ matrix.node-version }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index ead7d9c..2397e11 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -18,6 +18,7 @@ jobs: - 10.x - 12.x - 13.x + - 14.x steps: - uses: actions/checkout@v2 - name: Use Node.js ${{ matrix.node-version }} diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 846a235..0000000 --- a/.travis.yml +++ /dev/null @@ -1,22 +0,0 @@ -language: node_js -notifications: - email: false -node_js: - - "10" - - "12" - - "13" -before_script: - - yarn build -script: - - yarn test -branches: - except: - - /^v\d+\.\d+\.\d+$/ -jobs: - include: - - stage: release - node_js: lts/* - script: - - yarn test - - cat coverage/lcov.info | npx coveralls - - npx semantic-release From a93a54a2d3db09e20527f9a609c37f96ed2f8ab7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Thu, 20 Aug 2020 22:32:05 +0200 Subject: [PATCH 63/77] feat(headers): added (non-standard) toJSON to the Headers class --- README.md | 1 + lib/headers.ts | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/README.md b/README.md index 21df5c2..69e63e2 100644 --- a/README.md +++ b/README.md @@ -167,6 +167,7 @@ These are features in `fetch-h2`, that don't exist in the Fetch API. Some things * The `Request` class (options to `fetch`) has an extra property `allowForbiddenHeaders`, which defaults to `false`. * The `Response` class also has an extra property `allowForbiddenHeaders`, which defaults to `false` (or to the value of the `Request` if it was constructed through a `fetch` call, which is the common case). * The response object has an extra property `httpVersion` which is either `1` or `2` (numbers), depending on what was negotiated with the server. + * The `Headers` class (e.g. retried by `{response}.headers`) has a `toJSON` function which converts the headers to a simple JavaScript object. ## Contexts diff --git a/lib/headers.ts b/lib/headers.ts index 7863a7b..959ae45 100644 --- a/lib/headers.ts +++ b/lib/headers.ts @@ -237,6 +237,16 @@ export class Headers for ( const value of this._data.values( ) ) yield value.join( "," ); } + + // This is non-standard, but useful + public toJSON( ) + { + return [ ...this.entries( ) ] + .reduce( ( prev, [ key, val ] ) => + Object.assign( prev, { [ key ]: val } ), + { } + ); + } } export class GuardedHeaders extends Headers From bec7c32ec855ac0091e3dff461281dd4bd8dfbaa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Thu, 20 Aug 2020 22:38:32 +0200 Subject: [PATCH 64/77] fix(core): always sends the 'host' (http/1) or ':authority' (http/2) header fix #110 --- lib/fetch-common.ts | 7 +++- test/fetch-h2/context.ts | 80 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 85 insertions(+), 2 deletions(-) diff --git a/lib/fetch-common.ts b/lib/fetch-common.ts index 03662f8..7b862ec 100644 --- a/lib/fetch-common.ts +++ b/lib/fetch-common.ts @@ -16,6 +16,7 @@ const { HTTP2_HEADER_METHOD, HTTP2_HEADER_SCHEME, HTTP2_HEADER_PATH, + HTTP2_HEADER_AUTHORITY, // Methods HTTP2_METHOD_GET, @@ -124,6 +125,7 @@ export async function setupFetch( const { origin, protocol, + host, pathname, search, hash, } = new URL( url ); const path = pathname + search + hash; @@ -143,6 +145,9 @@ export async function setupFetch( if ( headers.has( HTTP2_HEADER_COOKIE ) ) cookies.push( ...arrayify( headers.get( HTTP2_HEADER_COOKIE ) ) ); + if ( !headers.has( "host" ) ) + headers.set( "host", host ); + const headersToSend: RawHeaders = { // Set required headers ...( session.protocol === "http1" ? { } : { @@ -165,7 +170,7 @@ export async function setupFetch( if ( key === "host" && session.protocol === "http2" ) // Convert to :authority like curl does: // https://github.com/grantila/fetch-h2/issues/9 - headersToSend[ ":authority" ] = val; + headersToSend[ HTTP2_HEADER_AUTHORITY ] = val; else if ( key !== HTTP2_HEADER_COOKIE ) headersToSend[ key ] = val; } diff --git a/test/fetch-h2/context.ts b/test/fetch-h2/context.ts index 1412800..b2ec749 100644 --- a/test/fetch-h2/context.ts +++ b/test/fetch-h2/context.ts @@ -1,3 +1,4 @@ +import { map } from "already"; import { TestData } from "../lib/server-common"; import { makeMakeServer } from "../lib/server-helpers"; @@ -270,7 +271,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => { const { server } = await makeServer( ); - const { disconnectAll, fetch } = context( ); + const { disconnectAll, fetch } = context( { ...cycleOpts } ); const awaitFetch = fetch( "${proto}//localhost:0" ); @@ -305,5 +306,82 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => await server.shutdown( ); } ); } ); + + describe( "session sharing", ( ) => + { + jest.setTimeout( 2500 ); + + it( "should re-use session for same host", async ( ) => + { + const { disconnectAll, fetch } = context( { ...cycleOpts } ); + + const urls = [ + [ "https://en.wikipedia.org/wiki/33", "33" ], + [ "https://en.wikipedia.org/wiki/44", "44" ], + [ "https://en.wikipedia.org/wiki/42", "42" ], + ]; + + const resps = await map( + urls, + { concurrency: Infinity }, + async ( [ url, title ] ) => + { + const resp = await fetch( url ); + const text = await resp.text( ); + const m = text.match( /]*>(.*)<\/h1>/ ); + return { expected: title, got: m?.[ 1 ] }; + } + ); + + resps.forEach( ( { expected, got } ) => + { + expect( expected ).toBe( got ); + } ); + + await disconnectAll( ); + } ); + + it( "should re-use session for same SAN but different host", + async ( ) => + { + const { disconnectAll, fetch } = context( { ...cycleOpts } ); + + const urls = [ + { lang: "en", title: "33" }, + { lang: "en", title: "44" }, + { lang: "sv", title: "33" }, + { lang: "sv", title: "44" }, + ] as const; + + const resps = await map( + urls, + { concurrency: Infinity }, + async ( { lang, title } ) => + { + const url = `https://${lang}.wikipedia.org/wiki/${title}`; + const resp = await fetch( url ); + const text = await resp.text( ); + const mLang = text.match( /]* lang="([^"]+)"/ ); + const mTitle = text.match( /]*>([^<]+)<\/h1>/ ); + return { + expectedLang: lang, + gotLang: mLang?.[ 1 ], + expectedTitle: title, + gotTitle: mTitle?.[ 1 ], + }; + } + ); + + resps.forEach( + ( { expectedLang, gotLang, expectedTitle, gotTitle } ) => + { + expect( expectedLang ).toBe( gotLang ); + expect( expectedTitle ).toBe( gotTitle ); + } + ); + + await disconnectAll( ); + } ); + } ); } ); } ); From f313c55a6294d48e057f70f633933fed79e4de45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 21 Aug 2020 01:01:21 +0200 Subject: [PATCH 65/77] fix(node): fixed https connection issue on Node.js 12 and 13 by applying the workaround as in https://github.com/nodejs/node/issues/33343 fix #104 --- lib/context-https.ts | 10 ++++++++++ lib/fetch-http2.ts | 3 +-- test/fetch-h2/context.ts | 4 ++-- test/fetch-h2/index.ts | 4 ++-- 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/lib/context-https.ts b/lib/context-https.ts index 7cf5243..406f1e7 100644 --- a/lib/context-https.ts +++ b/lib/context-https.ts @@ -4,6 +4,10 @@ import { connect, ConnectionOptions, TLSSocket } from "tls"; import { HttpProtocols } from "./core"; import { AltNameMatch, parseOrigin } from "./san"; + +const needsSocketHack = [ "12", "13" ] + .includes( process.versions.node.split( '.' )[ 0 ] ); + const alpnProtocols = { http1: Buffer.from( "\x08http/1.1" ), @@ -84,6 +88,12 @@ export function connectTLS( resolve( { socket, protocol, altNameMatch } ); } ); + if ( needsSocketHack ) + socket.once( 'secureConnect', ( ) => + { + ( socket as any ).secureConnecting = false; + } ); + socket.once( "error", reject ); } ); } diff --git a/lib/fetch-http2.ts b/lib/fetch-http2.ts index fee409a..3736a84 100644 --- a/lib/fetch-http2.ts +++ b/lib/fetch-http2.ts @@ -248,8 +248,7 @@ async function fetchImpl( } ) ); - stream.on( "response", guard( - ( headers: IncomingHttp2Headers ) => + stream.on( "response", guard( ( headers: IncomingHttp2Headers ) => { const { signal: bodySignal = void 0, diff --git a/test/fetch-h2/context.ts b/test/fetch-h2/context.ts index b2ec749..393eee3 100644 --- a/test/fetch-h2/context.ts +++ b/test/fetch-h2/context.ts @@ -131,7 +131,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => ).toBeTruthy( ); } - disconnectAll( ); + await disconnectAll( ); await server.shutdown( ); } ); @@ -154,7 +154,7 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => const res = await response.json( ); expect( res[ "user-agent" ] ).toBe( "foobar" ); - disconnectAll( ); + await disconnectAll( ); await server.shutdown( ); } ); diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index fd6623a..6b02f18 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -824,8 +824,8 @@ describe( `premature stream close (${protoVersion})`, ( ) => const expected = version === "http1" ? "socket hang up" - : "Stream prematurely closed"; - expect( err.message ).toContain( expected ); + : [ "Stream prematurely closed", "Connection closed" ]; + expect( expected ).toContain( err.message ); } await disconnectAll( ); From 90d6cf9209b7ec9678e053d91a829fd47518328f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 4 Sep 2020 19:32:00 +0000 Subject: [PATCH 66/77] docs(readme): added download badge --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 69e63e2..d6de05a 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ [![npm version][npm-image]][npm-url] +[![downloads][downloads-image]][npm-url] [![build status][build-image]][build-url] [![coverage status][coverage-image]][coverage-url] [![Greenkeeper badge](https://badges.greenkeeper.io/grantila/fetch-h2.svg)](https://greenkeeper.io/) @@ -389,6 +390,7 @@ const response = await fetch( url, { method, body } ); [npm-image]: https://img.shields.io/npm/v/fetch-h2.svg [npm-url]: https://npmjs.org/package/fetch-h2 +[downloads-image]: https://img.shields.io/npm/dm/fetch-h2.svg [build-image]: https://img.shields.io/github/workflow/status/grantila/fetch-h2/Master.svg [build-url]: https://github.com/grantila/fetch-h2/actions?query=workflow%3AMaster [coverage-image]: https://coveralls.io/repos/github/grantila/fetch-h2/badge.svg?branch=master From a04b6346f0e60f01ea1420261724e63900f8c8e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 4 Sep 2020 19:36:58 +0000 Subject: [PATCH 67/77] chore(deps): bumped all deps --- package.json | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/package.json b/package.json index 7683387..dccf1f9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "fetch-h2", - "version": "1.0.1", + "version": "0.0.0-development", "description": "HTTP/1+2 Fetch API client for Node.js", "author": "Gustaf Räntilä", "license": "MIT", @@ -52,27 +52,28 @@ "devDependencies": { "@types/execa": "^2.0.0", "@types/from2": "^2.3.0", - "@types/jest": "^25.2.3", - "@types/node": "^14.6.0", + "@types/jest": "^26.0.13", + "@types/node": "^14.6.4", "@types/through2": "^2.0.36", - "commitizen": "^4.1.2", + "commitizen": "^4.2.1", "compd": "^1.3.7", "concurrently": "^5.3.0", - "cz-conventional-changelog": "^3.2.0", + "cz-conventional-changelog": "^3.3.0", "execa": "^4.0.3", "from2": "^2.3.0", - "jest": "^25.1.0", + "jest": "^26.4.2", + "list-open-files": "^1.1.0", "mkcert": "^1.3.0", "rimraf": "^3.0.2", - "ts-jest": "^25.2.1", - "ts-node": "^8.10.2", + "ts-jest": "^26.3.0", + "ts-node": "^9.0.0", "tslint": "^6.1.3", - "typescript": "^3.9.7" + "typescript": "^4.0.2" }, "dependencies": { "@types/tough-cookie": "^4.0.0", "already": "^1.13.1", - "callguard": "^1.2.1", + "callguard": "^2.0.0", "get-stream": "^6.0.0", "through2": "^4.0.2", "to-arraybuffer": "^1.0.1", From 2f6f6ae4ab6d1f3b98357f3e355a23f795178ae3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 4 Sep 2020 21:48:40 +0200 Subject: [PATCH 68/77] Don't build branches action in master --- .github/workflows/branches.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/branches.yml b/.github/workflows/branches.yml index 6dba6b5..f77e057 100644 --- a/.github/workflows/branches.yml +++ b/.github/workflows/branches.yml @@ -5,8 +5,8 @@ name: Branches on: push: - branches: - - '*' + branches-ignore: + - master jobs: build: From 5e2422eddc568a7a73cd2d5f54555a9298e54224 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 4 Sep 2020 21:49:44 +0200 Subject: [PATCH 69/77] test(test): test the export, not just the raw TypeScript --- .gitignore | 1 + jest.config.exported.js | 6 +++++ package.json | 8 +++++- scripts/create-exported-tests.ts | 46 ++++++++++++++++++++++++++++++++ 4 files changed, 60 insertions(+), 1 deletion(-) create mode 100644 jest.config.exported.js create mode 100644 scripts/create-exported-tests.ts diff --git a/.gitignore b/.gitignore index b77e388..7a2c25b 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ node_modules/ .node-version .nyc_output/ yarn.lock +test-exported/ diff --git a/jest.config.exported.js b/jest.config.exported.js new file mode 100644 index 0000000..6673697 --- /dev/null +++ b/jest.config.exported.js @@ -0,0 +1,6 @@ +const config = require( './jest.config.js' ); +module.exports = { + ...config, + testMatch: ['/test-exported/**/*.ts'], + modulePathIgnorePatterns: ['/lib/', '/test-client/', '/integration/'], +}; diff --git a/package.json b/package.json index dccf1f9..3dff6cb 100644 --- a/package.json +++ b/package.json @@ -24,14 +24,17 @@ "lint": "node_modules/.bin/tslint --project .", "jest:core": "node_modules/.bin/jest --detectOpenHandles --coverage", "jest:fast": "yarn jest:core --config jest.config.unit.js $@", + "jest:exported": "node_modules/.bin/jest --config jest.config.exported.js $@", "jest:integration": "node_modules/.bin/compd -f test/docker-compose.yaml yarn jest:core", "jest:debug": "node --inspect-brk node_modules/.bin/jest", "test": "yarn lint && yarn jest:integration", + "test:exported": "./node_modules/.bin/ts-node scripts/create-exported-tests.ts && yarn jest:exported", "buildtest": "npm run build && npm run jest", "buildtestcov": "npm run build && npm run test", "coveralls": "cat coverage/lcov.info | node_modules/.bin/coveralls", "version": "./node_modules/.bin/ts-node scripts/version-update.ts && npm run build && npm run test && scripts/version-git-add.sh", - "prepack": "npm run build && npm run test", + "clean:pack": "node_modules/.bin/rimraf dist/test* && find dist/ -name '*.map' -delete", + "prepack": "npm run build && npm run test && npm run clean:pack && npm run test:exported", "makecerts": "openssl req -x509 -nodes -days 7300 -newkey rsa:2048 -keyout certs/key.pem -out certs/cert.pem", "cz": "git-cz" }, @@ -54,6 +57,8 @@ "@types/from2": "^2.3.0", "@types/jest": "^26.0.13", "@types/node": "^14.6.4", + "@types/recursive-readdir": "^2.2.0", + "@types/rimraf": "^3.0.0", "@types/through2": "^2.0.36", "commitizen": "^4.2.1", "compd": "^1.3.7", @@ -64,6 +69,7 @@ "jest": "^26.4.2", "list-open-files": "^1.1.0", "mkcert": "^1.3.0", + "recursive-readdir": "^2.2.2", "rimraf": "^3.0.2", "ts-jest": "^26.3.0", "ts-node": "^9.0.0", diff --git a/scripts/create-exported-tests.ts b/scripts/create-exported-tests.ts new file mode 100644 index 0000000..f319c13 --- /dev/null +++ b/scripts/create-exported-tests.ts @@ -0,0 +1,46 @@ +import * as path from "path" +import { + readFile as fsReadFile, + writeFile as fsWriteFile, +} from "fs" +import { promisify } from "util" + +import * as readdir from "recursive-readdir" +import * as execa from "execa" +import * as libRimraf from "rimraf" + + +const readFile = promisify( fsReadFile ); +const writeFile = promisify( fsWriteFile ); +const rimraf = promisify( libRimraf ); + +async function createExportedTests( ) +{ + const root = path.join( __dirname, ".." ); + const source = path.join( root, "test" ); + const target = path.join( root, "test-exported" ); + + await rimraf( target ); + + await execa( "cp", [ "-r", source, target ] ); + + const files = await readdir( target ); + + for ( const filename of files ) + { + const data = await readFile( filename, 'utf8' ); + await writeFile( + filename, + data + .replace( "../../index", "../../dist" ) + .replace( "../../lib", "../../dist/lib" ) + ); + } +} + +createExportedTests( ) +.catch( err => +{ + console.error( err.stack ); + process.exit( 1 ); +} ); From 121c378e955409e162c23313bdfc3148804cdac6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 4 Sep 2020 21:51:36 +0200 Subject: [PATCH 70/77] test(test): added socket/session re-use test --- test/fetch-h2/context.ts | 32 +++++++ test/fetch-h2/http1.ts | 95 ++++++++++++++----- .../event-loop-reference.ts | 0 3 files changed, 101 insertions(+), 26 deletions(-) rename test/{fetch-h2 => integration}/event-loop-reference.ts (100%) diff --git a/test/fetch-h2/context.ts b/test/fetch-h2/context.ts index 393eee3..7b1a725 100644 --- a/test/fetch-h2/context.ts +++ b/test/fetch-h2/context.ts @@ -1,4 +1,6 @@ import { map } from "already"; +import { lsof } from "list-open-files"; + import { TestData } from "../lib/server-common"; import { makeMakeServer } from "../lib/server-helpers"; @@ -321,6 +323,8 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => [ "https://en.wikipedia.org/wiki/42", "42" ], ]; + const [ { files: openFilesBefore } ] = await lsof( ); + const resps = await map( urls, { concurrency: Infinity }, @@ -333,6 +337,19 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => } ); + const [ { files: openFilesAfter } ] = await lsof( { } ); + + const numAfter = + openFilesAfter.filter( fd => fd.type === 'IP' ).length; + const numBefore = + openFilesBefore.filter( fd => fd.type === 'IP' ).length; + + // HTTP/1.1 will most likely spawn new sockets, but timing *may* + // affect this. For HTTP/2, it should always just use 1 socket per + // origin / SAN cluster. + if ( version === 'http2' ) + expect( numBefore ).toEqual( numAfter - 1 ); + resps.forEach( ( { expected, got } ) => { expect( expected ).toBe( got ); @@ -353,6 +370,8 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => { lang: "sv", title: "44" }, ] as const; + const [ { files: openFilesBefore } ] = await lsof( ); + const resps = await map( urls, { concurrency: Infinity }, @@ -372,6 +391,19 @@ describe( `context (${version} over ${proto.replace( ":", "" )})`, ( ) => } ); + const [ { files: openFilesAfter } ] = await lsof( { } ); + + const numAfter = + openFilesAfter.filter( fd => fd.type === 'IP' ).length; + const numBefore = + openFilesBefore.filter( fd => fd.type === 'IP' ).length; + + // HTTP/1.1 will most likely spawn new sockets, but timing *may* + // affect this. For HTTP/2, it should always just use 1 socket per + // origin / SAN cluster. + if ( version === 'http2' ) + expect( numBefore ).toEqual( numAfter - 1 ); + resps.forEach( ( { expectedLang, gotLang, expectedTitle, gotTitle } ) => { diff --git a/test/fetch-h2/http1.ts b/test/fetch-h2/http1.ts index 8f3ace9..31a8585 100644 --- a/test/fetch-h2/http1.ts +++ b/test/fetch-h2/http1.ts @@ -1,3 +1,5 @@ +import { lsof } from "list-open-files"; + import { makeMakeServer } from "../lib/server-helpers"; import { @@ -13,46 +15,87 @@ describe( `http1`, ( ) => describe( "keep-alive", ( ) => { - it( "should not send 'connection: close' by default", async ( ) => + describe( "http1.keelAlive === true (default)", ( ) => { - const { server, port } = await makeServer( ); - const { disconnectAll, fetch } = context( { ...cycleOpts } ); + it( "should not send 'connection: close'", async ( ) => + { + const { server, port } = await makeServer( ); + const { disconnectAll, fetch } = context( { ...cycleOpts } ); + + const response1 = ensureStatusSuccess( + await fetch( `http://localhost:${port}/headers` ) + ); + + const headers = await response1.json( ); + + expect( headers.connection ).not.toBe( "close" ); + + disconnectAll( ); + + await server.shutdown( ); + } ); + + it( "should re-use socket", async ( ) => + { + const { server, port } = await makeServer( ); + const { disconnectAll, fetch } = context( { ...cycleOpts } ); - const response1 = ensureStatusSuccess( - await fetch( `http://localhost:${port}/headers` ) - ); + const [ { files: openFilesA } ] = await lsof( { } ); - const headers = await response1.json( ); + const response1 = ensureStatusSuccess( + await fetch( `http://localhost:${port}/headers` ) + ); + await response1.json( ); - expect( headers.connection ).not.toBe( "close" ); + const [ { files: openFilesB } ] = await lsof( { } ); - disconnectAll( ); + const response2 = ensureStatusSuccess( + await fetch( `http://localhost:${port}/headers` ) + ); + await response2.json( ); - await server.shutdown( ); + const [ { files: openFilesC } ] = await lsof( { } ); + + const ipA = openFilesA.filter( fd => fd.type === 'IP' ); + const ipB = openFilesB.filter( fd => fd.type === 'IP' ); + const ipC = openFilesC.filter( fd => fd.type === 'IP' ); + + // 2 less because client+server + expect( ipA.length ).toEqual( ipB.length - 2 ); + expect( ipB.length ).toEqual( ipC.length ); + expect( ipB ).toEqual( ipC ); + + disconnectAll( ); + + await server.shutdown( ); + } ); } ); - it( "should send 'connection: close' if http1.keelAlive === false", - async ( ) => + describe( "http1.keelAlive === false", ( ) => { - const { server, port } = await makeServer( ); - const { disconnectAll, fetch } = context( { - ...cycleOpts, - http1: { - keepAlive: false, - }, - } ); + it( "should send 'connection: close'", + async ( ) => + { + const { server, port } = await makeServer( ); + const { disconnectAll, fetch } = context( { + ...cycleOpts, + http1: { + keepAlive: false, + }, + } ); - const response1 = ensureStatusSuccess( - await fetch( `http://localhost:${port}/headers` ) - ); + const response1 = ensureStatusSuccess( + await fetch( `http://localhost:${port}/headers` ) + ); - const headers = await response1.json( ); + const headers = await response1.json( ); - expect( headers.connection ).toBe( "close" ); + expect( headers.connection ).toBe( "close" ); - disconnectAll( ); + disconnectAll( ); - await server.shutdown( ); + await server.shutdown( ); + } ); } ); } ); } ); diff --git a/test/fetch-h2/event-loop-reference.ts b/test/integration/event-loop-reference.ts similarity index 100% rename from test/fetch-h2/event-loop-reference.ts rename to test/integration/event-loop-reference.ts From cc9cb2ba96fabb0f2676dd1db3fa96f285eba23a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 4 Sep 2020 23:39:04 +0200 Subject: [PATCH 71/77] fix(test): increased timeout of a slow test --- test/fetch-h2/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/fetch-h2/index.ts b/test/fetch-h2/index.ts index 6b02f18..cbb33ad 100644 --- a/test/fetch-h2/index.ts +++ b/test/fetch-h2/index.ts @@ -407,7 +407,7 @@ describe( `generic (${protoVersion})`, ( ) => it( "should be able to POST large (16MiB) stream with known length", async ( ) => { - jest.setTimeout( 2000 ); + jest.setTimeout( 4000 ); const { server, port } = await makeServer( ); @@ -455,7 +455,7 @@ describe( `generic (${protoVersion})`, ( ) => it( "should be able to POST large (16MiB) stream with unknown length", async ( ) => { - jest.setTimeout( 2000 ); + jest.setTimeout( 4000 ); const { server, port } = await makeServer( ); From d0c863c9a9d786ce518bbaae9e53adb4599964d1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Fri, 4 Sep 2020 23:45:55 +0200 Subject: [PATCH 72/77] test(ci): skip a flaky (fd-counting) test in CI --- test/fetch-h2/http1.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/fetch-h2/http1.ts b/test/fetch-h2/http1.ts index 31a8585..86ead0a 100644 --- a/test/fetch-h2/http1.ts +++ b/test/fetch-h2/http1.ts @@ -8,6 +8,8 @@ import { import { ensureStatusSuccess } from "../lib/utils"; +const itSkipCi = process.env.CI ? it.skip : it; + describe( `http1`, ( ) => { const { cycleOpts, makeServer } = @@ -35,7 +37,7 @@ describe( `http1`, ( ) => await server.shutdown( ); } ); - it( "should re-use socket", async ( ) => + itSkipCi( "should re-use socket", async ( ) => { const { server, port } = await makeServer( ); const { disconnectAll, fetch } = context( { ...cycleOpts } ); From 0da9349016fde5af0a96f6f1136b46edbda6b50f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 15 May 2021 14:24:44 +0200 Subject: [PATCH 73/77] fix(types): export HttpVersion --- index.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/index.ts b/index.ts index 9560d4c..12b9bd9 100644 --- a/index.ts +++ b/index.ts @@ -12,6 +12,7 @@ import { Method, OnTrailers, TimeoutError, + HttpVersion, } from "./lib/core"; import { Headers } from "./lib/headers"; import { Request } from "./lib/request"; @@ -69,6 +70,7 @@ export { Response, AbortError, TimeoutError, + HttpVersion, OnTrailers, ContextOptions, DecodeFunction, From a1a9204db9478adfb8867cfae4132ca81fb19f7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 15 May 2021 14:29:59 +0200 Subject: [PATCH 74/77] chore(deps): bumped all dev deps --- package.json | 22 +++++++++++----------- test/lib/server-common.ts | 2 +- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/package.json b/package.json index 3dff6cb..c33f0de 100644 --- a/package.json +++ b/package.json @@ -55,26 +55,26 @@ "devDependencies": { "@types/execa": "^2.0.0", "@types/from2": "^2.3.0", - "@types/jest": "^26.0.13", - "@types/node": "^14.6.4", + "@types/jest": "^26.0.23", + "@types/node": "^15.3.0", "@types/recursive-readdir": "^2.2.0", "@types/rimraf": "^3.0.0", "@types/through2": "^2.0.36", - "commitizen": "^4.2.1", - "compd": "^1.3.7", - "concurrently": "^5.3.0", + "commitizen": "^4.2.4", + "compd": "^2.0.2", + "concurrently": "^6.1.0", "cz-conventional-changelog": "^3.3.0", - "execa": "^4.0.3", + "execa": "^5.0.0", "from2": "^2.3.0", - "jest": "^26.4.2", + "jest": "^26.6.3", "list-open-files": "^1.1.0", - "mkcert": "^1.3.0", + "mkcert": "^1.4.0", "recursive-readdir": "^2.2.2", "rimraf": "^3.0.2", - "ts-jest": "^26.3.0", - "ts-node": "^9.0.0", + "ts-jest": "^26.5.6", + "ts-node": "^9.1.1", "tslint": "^6.1.3", - "typescript": "^4.0.2" + "typescript": "^4.2.4" }, "dependencies": { "@types/tough-cookie": "^4.0.0", diff --git a/test/lib/server-common.ts b/test/lib/server-common.ts index 3680cdd..4808a8e 100644 --- a/test/lib/server-common.ts +++ b/test/lib/server-common.ts @@ -47,7 +47,7 @@ export abstract class Server public async listen( port: number | undefined = void 0 ): Promise< number > { - return new Promise( ( resolve, _reject ) => + return new Promise< void >( ( resolve, _reject ) => { this._server.listen( port, "0.0.0.0", resolve ); } ) From 41c37064682a5ea43e8728eee71e52e8cfd67e34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 15 May 2021 14:30:22 +0200 Subject: [PATCH 75/77] ci(nodejs): test on nodejs 15 and 16 --- .github/workflows/branches.yml | 2 ++ .github/workflows/master.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.github/workflows/branches.yml b/.github/workflows/branches.yml index f77e057..0813448 100644 --- a/.github/workflows/branches.yml +++ b/.github/workflows/branches.yml @@ -20,6 +20,8 @@ jobs: - 12.x - 13.x - 14.x + - 15.x + - 16.x steps: - uses: actions/checkout@v2 - name: Use Node.js ${{ matrix.node-version }} diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 2397e11..8d0327f 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -19,6 +19,8 @@ jobs: - 12.x - 13.x - 14.x + - 15.x + - 16.x steps: - uses: actions/checkout@v2 - name: Use Node.js ${{ matrix.node-version }} From 1e03178816220647eba44f78aa7d8867eef318c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 15 May 2021 14:33:34 +0200 Subject: [PATCH 76/77] feat(engine): exporting ES2019 BREAKING CHANGE: Requires NodeJS 12 --- .github/workflows/branches.yml | 1 - .github/workflows/master.yml | 3 +-- README.md | 2 ++ package.json | 2 +- tsconfig.json | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/branches.yml b/.github/workflows/branches.yml index 0813448..6c2e7c4 100644 --- a/.github/workflows/branches.yml +++ b/.github/workflows/branches.yml @@ -16,7 +16,6 @@ jobs: strategy: matrix: node-version: - - 10.x - 12.x - 13.x - 14.x diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 8d0327f..6334169 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -15,7 +15,6 @@ jobs: strategy: matrix: node-version: - - 10.x - 12.x - 13.x - 14.x @@ -43,7 +42,7 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v1 with: - node-version: 12 + node-version: 14 - run: npm i - run: npm run build - run: npm run test --coverage diff --git a/README.md b/README.md index d6de05a..b022483 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,8 @@ Since 2.0.0, `fetch-h2` requires Node.js 10.4. Since 2.4.0, `fetch-h2` has full TLS SAN (Subject Alternative Name) support. +Since 3.0.0, `fetch-h2` requires Node.js 12. + # API diff --git a/package.json b/package.json index c33f0de..11101c3 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "types": "./dist/index.d.ts", "directories": {}, "engines": { - "node": ">=10.4" + "node": ">=12" }, "files": [ "dist" diff --git a/tsconfig.json b/tsconfig.json index 9fe7e19..b003661 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,11 +1,11 @@ { "compilerOptions": { "declaration": true, - "lib": [ "es2015", "es2017" ], + "lib": [ "ES2019" ], "outDir": "dist", "sourceMap": true, "module": "CommonJS", - "target": "es2018", + "target": "ES2019", "noUnusedParameters": true, "noUnusedLocals": true, "strict": true From 19592edc8472f745c9d3d09192161f073a3a6524 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustaf=20R=C3=A4ntil=C3=A4?= Date: Sat, 15 May 2021 14:35:09 +0200 Subject: [PATCH 77/77] feat(deps): bumped deps to latest version --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 11101c3..d2c85f1 100644 --- a/package.json +++ b/package.json @@ -78,9 +78,9 @@ }, "dependencies": { "@types/tough-cookie": "^4.0.0", - "already": "^1.13.1", + "already": "^1.13.2", "callguard": "^2.0.0", - "get-stream": "^6.0.0", + "get-stream": "^6.0.1", "through2": "^4.0.2", "to-arraybuffer": "^1.0.1", "tough-cookie": "^4.0.0"